#pragma once #include #include #include #include #include #include #include #include #include #include #include #include #include #include #include #include #include #include #include "tokenizer.h" #include "synonym_index.h" struct doc_seq_id_t { uint32_t seq_id; bool is_new; }; struct highlight_field_t { std::string name; bool fully_highlighted; bool infix; tsl::htrie_map qtoken_leaves; highlight_field_t(const std::string& name, bool fully_highlighted, bool infix): name(name), fully_highlighted(fully_highlighted), infix(infix) { } }; class Collection { private: mutable std::shared_mutex mutex; const uint8_t CURATED_RECORD_IDENTIFIER = 100; const size_t DEFAULT_TOPSTER_SIZE = 250; struct highlight_t { size_t field_index; std::string field; std::vector snippets; std::vector values; std::vector indices; uint64_t match_score; std::vector> matched_tokens; highlight_t(): field_index(0), match_score(0) { } bool operator<(const highlight_t& a) const { return std::tie(match_score, field_index) > std::tie(a.match_score, field_index); } }; struct match_index_t { Match match; uint64_t match_score = 0; size_t index; match_index_t(Match match, uint64_t match_score, size_t index): match(match), match_score(match_score), index(index) { } bool operator<(const match_index_t& a) const { if(match_score != a.match_score) { return match_score > a.match_score; } return index < a.index; } }; const std::string name; const std::atomic collection_id; const std::atomic created_at; std::atomic num_documents; // Auto incrementing record ID used internally for indexing - not exposed to the client std::atomic next_seq_id; Store* store; std::vector fields; tsl::htrie_map search_schema; std::map overrides; std::string default_sorting_field; const float max_memory_ratio; std::string fallback_field_type; std::unordered_map dynamic_fields; tsl::htrie_map nested_fields; bool enable_nested_fields; std::vector symbols_to_index; std::vector token_separators; Index* index; SynonymIndex* synonym_index; // methods std::string get_doc_id_key(const std::string & doc_id) const; std::string get_seq_id_key(uint32_t seq_id) const; void highlight_result(const std::string& h_obj, const field &search_field, const size_t search_field_index, const tsl::htrie_map& qtoken_leaves, const KV* field_order_kv, const nlohmann::json &document, nlohmann::json& highlight_doc, nlohmann::json& highlight_full_doc, nlohmann::json& highlight_meta, StringUtils & string_utils, const size_t snippet_threshold, const size_t highlight_affix_num_tokens, bool highlight_fully, bool is_infix_search, const std::string& highlight_start_tag, const std::string& highlight_end_tag, const uint8_t* index_symbols, highlight_t &highlight, bool& found_highlight, bool& found_full_highlight) const; void remove_document(const nlohmann::json & document, const uint32_t seq_id, bool remove_from_store); void curate_results(string& actual_query, bool enable_overrides, bool already_segmented, const std::map>& pinned_hits, const std::vector& hidden_hits, std::vector>& included_ids, std::vector& excluded_ids, std::vector& filter_overrides, bool& filter_curated_hits, std::string& curated_sort_by) const; static Option detect_new_fields(nlohmann::json& document, const DIRTY_VALUES& dirty_values, const tsl::htrie_map& schema, const std::unordered_map& dyn_fields, const tsl::htrie_map& nested_fields, const std::string& fallback_field_type, std::vector& new_fields, bool enable_nested_fields); static bool facet_count_compare(const std::pair& a, const std::pair& b) { return std::tie(a.second.count, a.first) > std::tie(b.second.count, b.first); } static bool facet_count_str_compare(const facet_value_t& a, const facet_value_t& b) { size_t a_count = a.count; size_t b_count = b.count; size_t a_value_size = UINT64_MAX - a.value.size(); size_t b_value_size = UINT64_MAX - b.value.size(); return std::tie(a_count, a_value_size) > std::tie(b_count, b_value_size); } static Option parse_pinned_hits(const std::string& pinned_hits_str, std::map>& pinned_hits); Index* init_index(); static std::vector to_char_array(const std::vector& strs); Option validate_and_standardize_sort_fields(const std::vector & sort_fields, std::vector& sort_fields_std) const; Option persist_collection_meta(); Option batch_alter_data(const tsl::htrie_map& schema_additions, const std::unordered_map& new_dynamic_fields, const std::vector& del_fields, const std::string& this_fallback_field_type, const bool do_validation); Option validate_alter_payload(nlohmann::json& schema_changes, tsl::htrie_map& schema_additions, tsl::htrie_map& schema_reindex, std::unordered_map& addition_dynamic_fields, std::unordered_map& reindex_dynamic_fields, std::vector& del_fields, std::string& fallback_field_type); void process_filter_overrides(std::vector& filter_overrides, std::vector& q_include_tokens, token_ordering token_order, std::vector& filters, std::vector>& included_ids, std::vector& excluded_ids) const; void populate_text_match_info(nlohmann::json& info, uint64_t match_score) const; static void remove_flat_fields(nlohmann::json& document); bool handle_highlight_text(std::string& text, bool normalise, const field &search_field, const std::vector& symbols_to_index, const std::vector& token_separators, highlight_t& highlight, StringUtils & string_utils, bool use_word_tokenizer, const size_t highlight_affix_num_tokens, const tsl::htrie_map& qtoken_leaves, int last_valid_offset_index, const Match& match, const size_t prefix_token_num_chars, bool highlight_fully, const size_t snippet_threshold, bool is_infix_search, std::vector& raw_query_tokens, size_t last_valid_offset, const std::string& highlight_start_tag, const std::string& highlight_end_tag, const uint8_t* index_symbols, const match_index_t& match_index) const; static Option extract_field_name(const std::string& field_name, const tsl::htrie_map& search_schema, std::vector& processed_search_fields, bool extract_only_string_fields, bool enable_nested_fields); static Option flatten_and_identify_new_fields(nlohmann::json& doc, const std::vector& nested_fields_found, const tsl::htrie_map& schema, std::vector& new_fields); bool is_nested_array(const nlohmann::json& obj, std::vector path_parts, size_t part_i) const; template static bool highlight_nested_field(const nlohmann::json& hdoc, nlohmann::json& hobj, const nlohmann::json& fdoc, nlohmann::json& fobj, std::vector& path_parts, size_t path_index, T func); static Option resolve_field_type(field& new_field, nlohmann::detail::iter_impl>& kv, nlohmann::json& document, const DIRTY_VALUES& dirty_values, const bool found_dynamic_field, const std::string& fallback_field_type, std::vector& new_fields, std::vector& nested_fields_found); public: enum {MAX_ARRAY_MATCHES = 5}; const size_t PER_PAGE_MAX = 250; const size_t GROUP_LIMIT_MAX = 99; // Using a $ prefix so that these meta keys stay above record entries in a lexicographically ordered KV store static constexpr const char* COLLECTION_META_PREFIX = "$CM"; static constexpr const char* COLLECTION_NEXT_SEQ_PREFIX = "$CS"; static constexpr const char* COLLECTION_OVERRIDE_PREFIX = "$CO"; static constexpr const char* SEQ_ID_PREFIX = "$SI"; static constexpr const char* DOC_ID_PREFIX = "$DI"; static constexpr const char* COLLECTION_NAME_KEY = "name"; static constexpr const char* COLLECTION_ID_KEY = "id"; static constexpr const char* COLLECTION_SEARCH_FIELDS_KEY = "fields"; static constexpr const char* COLLECTION_DEFAULT_SORTING_FIELD_KEY = "default_sorting_field"; static constexpr const char* COLLECTION_CREATED = "created_at"; static constexpr const char* COLLECTION_NUM_MEMORY_SHARDS = "num_memory_shards"; static constexpr const char* COLLECTION_FALLBACK_FIELD_TYPE = "fallback_field_type"; static constexpr const char* COLLECTION_ENABLE_NESTED_FIELDS = "enable_nested_fields"; static constexpr const char* COLLECTION_SYMBOLS_TO_INDEX = "symbols_to_index"; static constexpr const char* COLLECTION_SEPARATORS = "token_separators"; // methods Collection() = delete; Collection(const std::string& name, const uint32_t collection_id, const uint64_t created_at, const uint32_t next_seq_id, Store *store, const std::vector& fields, const std::string& default_sorting_field, const float max_memory_ratio, const std::string& fallback_field_type, const std::vector& symbols_to_index, const std::vector& token_separators, const bool enable_nested_fields); ~Collection(); static std::string get_next_seq_id_key(const std::string & collection_name); static std::string get_meta_key(const std::string & collection_name); static std::string get_override_key(const std::string & collection_name, const std::string & override_id); std::string get_seq_id_collection_prefix() const; std::string get_name() const; uint64_t get_created_at() const; uint32_t get_collection_id() const; uint32_t get_next_seq_id(); Option doc_id_to_seq_id(const std::string & doc_id) const; std::vector get_facet_fields(); std::vector get_sort_fields(); std::vector get_fields(); std::unordered_map get_dynamic_fields(); tsl::htrie_map get_schema(); std::string get_default_sorting_field(); Option to_doc(const std::string& json_str, nlohmann::json& document, const index_operation_t& operation, const DIRTY_VALUES dirty_values, const std::string& id=""); static uint32_t get_seq_id_from_key(const std::string & key); Option get_document_from_store(const std::string & seq_id_key, nlohmann::json & document) const; Option get_document_from_store(const uint32_t& seq_id, nlohmann::json & document) const; Option index_in_memory(nlohmann::json & document, uint32_t seq_id, const index_operation_t op, const DIRTY_VALUES& dirty_values); static void prune_doc(nlohmann::json& doc, const tsl::htrie_set& include_names, const tsl::htrie_set& exclude_names, std::string parent_name = "", size_t depth = 0); const Index* _get_index() const; bool facet_value_to_string(const facet &a_facet, const facet_count_t &facet_count, const nlohmann::json &document, std::string &value) const; static void populate_result_kvs(Topster *topster, std::vector> &result_kvs); void batch_index(std::vector& index_records, std::vector& json_out, size_t &num_indexed, const bool& write_docs, const bool& write_id); bool is_exceeding_memory_threshold() const; void parse_search_query(const std::string &query, std::vector& q_include_tokens, std::vector>& q_exclude_tokens, std::vector>& q_phrases, const std::string& locale, const bool already_segmented) const; // PUBLIC OPERATIONS nlohmann::json get_summary_json() const; size_t batch_index_in_memory(std::vector& index_records); Option add(const std::string & json_str, const index_operation_t& operation=CREATE, const std::string& id="", const DIRTY_VALUES& dirty_values=DIRTY_VALUES::COERCE_OR_REJECT); nlohmann::json add_many(std::vector& json_lines, nlohmann::json& document, const index_operation_t& operation=CREATE, const std::string& id="", const DIRTY_VALUES& dirty_values=DIRTY_VALUES::COERCE_OR_REJECT, const bool& write_docs=false, const bool& write_id=false); Option search(const std::string & query, const std::vector & search_fields, const std::string & simple_filter_query, const std::vector & facet_fields, const std::vector & sort_fields, const std::vector& num_typos, size_t per_page = 10, size_t page = 1, token_ordering token_order = FREQUENCY, const std::vector& prefixes = {true}, size_t drop_tokens_threshold = Index::DROP_TOKENS_THRESHOLD, const spp::sparse_hash_set & include_fields = spp::sparse_hash_set(), const spp::sparse_hash_set & exclude_fields = spp::sparse_hash_set(), size_t max_facet_values=10, const std::string & simple_facet_query = "", const size_t snippet_threshold = 30, const size_t highlight_affix_num_tokens = 4, const std::string & highlight_full_fields = "", size_t typo_tokens_threshold = Index::TYPO_TOKENS_THRESHOLD, const std::string& pinned_hits_str="", const std::string& hidden_hits="", const std::vector& group_by_fields={}, size_t group_limit = 3, const std::string& highlight_start_tag="", const std::string& highlight_end_tag="", std::vector query_by_weights={}, size_t limit_hits=UINT32_MAX, bool prioritize_exact_match=true, bool pre_segmented_query=false, bool enable_overrides=true, const std::string& highlight_fields="", const bool exhaustive_search = false, size_t search_stop_millis = 6000*1000, size_t min_len_1typo = 4, size_t min_len_2typo = 7, enable_t split_join_tokens = fallback, size_t max_candidates = 4, const std::vector& infixes = {off}, const size_t max_extra_prefix = INT16_MAX, const size_t max_extra_suffix = INT16_MAX, const size_t facet_query_num_typos = 2, const size_t filter_curated_hits_option = 2, const bool prioritize_token_position = false, const std::string& vector_query_str = "") const; Option get_filter_ids(const std::string & simple_filter_query, std::vector>& index_ids); Option get(const std::string & id) const; Option remove(const std::string & id, bool remove_from_store = true); Option remove_if_found(uint32_t seq_id, bool remove_from_store = true); size_t get_num_documents() const; DIRTY_VALUES parse_dirty_values_option(std::string& dirty_values) const; std::vector get_symbols_to_index(); std::vector get_token_separators(); std::string get_fallback_field_type(); bool get_enable_nested_fields(); // Override operations Option add_override(const override_t & override); Option remove_override(const std::string & id); std::map get_overrides() { std::shared_lock lock(mutex); return overrides; }; // synonym operations spp::sparse_hash_map get_synonyms(); bool get_synonym(const std::string& id, synonym_t& synonym); Option add_synonym(const nlohmann::json& syn_json); Option remove_synonym(const std::string & id); void synonym_reduction(const std::vector& tokens, std::vector>& results) const; SynonymIndex* get_synonym_index(); // highlight ops static void highlight_text(const string& highlight_start_tag, const string& highlight_end_tag, const string& text, const std::map& token_offsets, size_t snippet_end_offset, std::vector& matched_tokens, std::map::iterator& offset_it, std::stringstream& highlighted_text, const uint8_t* index_symbols, size_t snippet_start_offset) ; void process_highlight_fields(const std::vector& search_fields, const tsl::htrie_set& exclude_fields, const tsl::htrie_set& include_fields, const std::vector& highlight_field_names, const std::vector& highlight_full_field_names, const std::vector& infixes, std::vector& q_tokens, const tsl::htrie_map& qtoken_set, std::vector& highlight_items) const; static void copy_highlight_doc(std::vector& hightlight_items, const nlohmann::json& src, nlohmann::json& dst); Option alter(nlohmann::json& alter_payload); void process_search_field_weights(const std::vector& raw_search_fields, std::vector& query_by_weights, std::vector& weighted_search_fields, std::vector& reordered_search_fields) const; }; template bool Collection::highlight_nested_field(const nlohmann::json& hdoc, nlohmann::json& hobj, const nlohmann::json& fdoc, nlohmann::json& fobj, std::vector& path_parts, size_t path_index, T func) { if(path_index == path_parts.size()) { // end of path: guaranteed to be a string if(!hobj.is_string()) { return false; } func(hobj, fobj); } const std::string& fragment = path_parts[path_index]; const auto& it = hobj.find(fragment); if(it != hobj.end()) { if(it.value().is_array()) { bool resolved = false; for(size_t i = 0; i < it.value().size(); i++) { auto& h_ele = it.value().at(i); auto& f_ele = fobj.empty() ? fobj : fobj[fragment][i]; resolved |= highlight_nested_field(hdoc, h_ele, fdoc, f_ele, path_parts, path_index + 1, func); } return resolved; } else { auto& f_ele = fobj.empty() ? fobj : fobj[fragment]; return highlight_nested_field(hdoc, it.value(), fdoc, f_ele, path_parts, path_index + 1, func); } } { return false; } }