mirror of
https://github.com/typesense/typesense.git
synced 2025-05-22 23:06:30 +08:00
Support overriding wildcard query.
This commit is contained in:
parent
d0018a1746
commit
6bc60adbae
@ -2155,6 +2155,10 @@ Option<nlohmann::json> Collection::search(std::string raw_query,
|
||||
parse_search_query(query, q_include_tokens,
|
||||
field_query_tokens[0].q_exclude_tokens, field_query_tokens[0].q_phrases, "",
|
||||
false, stopwords_set);
|
||||
|
||||
process_filter_overrides(filter_overrides, q_include_tokens, token_order, filter_tree_root,
|
||||
included_ids, excluded_ids, override_metadata);
|
||||
|
||||
for(size_t i = 0; i < q_include_tokens.size(); i++) {
|
||||
auto& q_include_token = q_include_tokens[i];
|
||||
field_query_tokens[0].q_include_tokens.emplace_back(i, q_include_token, (i == q_include_tokens.size() - 1),
|
||||
|
@ -1591,7 +1591,7 @@ TEST_F(CollectionAllFieldsTest, FieldNameMatchingRegexpShouldNotBeIndexedInNonAu
|
||||
}
|
||||
|
||||
TEST_F(CollectionAllFieldsTest, EmbedFromFieldJSONInvalidField) {
|
||||
EmbedderManager::set_model_dir("/tmp/typensense_test/models");
|
||||
EmbedderManager::set_model_dir("/tmp/typesense_test/models");
|
||||
nlohmann::json field_json;
|
||||
field_json["name"] = "embedding";
|
||||
field_json["type"] = "float[]";
|
||||
|
@ -3728,7 +3728,7 @@ TEST_F(CollectionOverrideTest, WildcardTagRuleThatMatchesAllQueries) {
|
||||
|
||||
// includes instead of filter_by
|
||||
coll1->remove_override("ov-1");
|
||||
override_json1 = R"({
|
||||
auto override_json2 = R"({
|
||||
"id": "ov-1",
|
||||
"rule": {
|
||||
"tags": ["*"]
|
||||
@ -3738,9 +3738,10 @@ TEST_F(CollectionOverrideTest, WildcardTagRuleThatMatchesAllQueries) {
|
||||
]
|
||||
})"_json;
|
||||
|
||||
op = override_t::parse(override_json1, "ov-1", override1);
|
||||
override_t override2;
|
||||
op = override_t::parse(override_json2, "ov-2", override2);
|
||||
ASSERT_TRUE(op.ok());
|
||||
coll1->add_override(override1);
|
||||
coll1->add_override(override2);
|
||||
|
||||
results = coll1->search("foobar", {"name"}, "",
|
||||
{}, sort_fields, {2}, 10, 1, FREQUENCY,
|
||||
@ -3907,3 +3908,98 @@ TEST_F(CollectionOverrideTest, MetadataValidation) {
|
||||
|
||||
collectionManager.drop_collection("coll1");
|
||||
}
|
||||
|
||||
TEST_F(CollectionOverrideTest, WildcardSearchOverride) {
|
||||
Collection* coll1;
|
||||
|
||||
std::vector<field> fields = {field("name", field_types::STRING, false),
|
||||
field("category", field_types::STRING, true),};
|
||||
|
||||
coll1 = collectionManager.get_collection("coll1").get();
|
||||
if (coll1 == nullptr) {
|
||||
coll1 = collectionManager.create_collection("coll1", 1, fields, "").get();
|
||||
}
|
||||
|
||||
nlohmann::json doc1;
|
||||
doc1["id"] = "0";
|
||||
doc1["name"] = "queryA";
|
||||
doc1["category"] = "kids";
|
||||
|
||||
nlohmann::json doc2;
|
||||
doc2["id"] = "1";
|
||||
doc2["name"] = "queryA";
|
||||
doc2["category"] = "kitchen";
|
||||
|
||||
nlohmann::json doc3;
|
||||
doc3["id"] = "2";
|
||||
doc3["name"] = "Clay Toy";
|
||||
doc3["category"] = "home";
|
||||
|
||||
ASSERT_TRUE(coll1->add(doc1.dump()).ok());
|
||||
ASSERT_TRUE(coll1->add(doc2.dump()).ok());
|
||||
ASSERT_TRUE(coll1->add(doc3.dump()).ok());
|
||||
|
||||
std::vector<sort_by> sort_fields = {sort_by("_text_match", "DESC")};
|
||||
|
||||
nlohmann::json override_json1 = R"({
|
||||
"id": "ov-1",
|
||||
"rule": {
|
||||
"query": "*",
|
||||
"match": "exact"
|
||||
},
|
||||
"filter_by": "category: kids"
|
||||
})"_json;
|
||||
|
||||
override_t override1;
|
||||
auto op = override_t::parse(override_json1, "ov-1", override1);
|
||||
ASSERT_TRUE(op.ok());
|
||||
coll1->add_override(override1);
|
||||
|
||||
std::string override_tags = "";
|
||||
auto results = coll1->search("*", {}, "",
|
||||
{}, sort_fields, {2}, 10, 1, FREQUENCY,
|
||||
{false}, Index::DROP_TOKENS_THRESHOLD,
|
||||
spp::sparse_hash_set<std::string>(),
|
||||
spp::sparse_hash_set<std::string>(), 10, "", 30, 4, "title", 20, {}, {}, {}, 0,
|
||||
"<mark>", "</mark>", {}, 1000, true, false, true, "", false, 10000,
|
||||
4, 7, fallback, 4, {off}, 100, 100, 2, 2, false, "", true, 0, max_score, 100, 0,
|
||||
0, HASH, 30000, 2, "", {}, {}, "right_to_left",
|
||||
true, true, false, -1, "", override_tags).get();
|
||||
|
||||
ASSERT_EQ(1, results["hits"].size());
|
||||
ASSERT_EQ("0", results["hits"][0]["document"]["id"].get<std::string>());
|
||||
|
||||
// includes instead of filter_by
|
||||
coll1->remove_override("ov-1");
|
||||
|
||||
override_t override2;
|
||||
auto override_json2 = R"({
|
||||
"id": "ov-2",
|
||||
"rule": {
|
||||
"query": "*",
|
||||
"match": "exact"
|
||||
},
|
||||
"includes": [
|
||||
{"id": "1", "position": 1}
|
||||
]
|
||||
})"_json;
|
||||
|
||||
op = override_t::parse(override_json2, "ov-2", override2);
|
||||
ASSERT_TRUE(op.ok());
|
||||
coll1->add_override(override2);
|
||||
|
||||
results = coll1->search("*", {}, "",
|
||||
{}, sort_fields, {2}, 10, 1, FREQUENCY,
|
||||
{false}, Index::DROP_TOKENS_THRESHOLD,
|
||||
spp::sparse_hash_set<std::string>(),
|
||||
spp::sparse_hash_set<std::string>(), 10, "", 30, 4, "title", 20, {}, {}, {}, 0,
|
||||
"<mark>", "</mark>", {}, 1000, true, false, true, "", false, 10000,
|
||||
4, 7, fallback, 4, {off}, 100, 100, 2, 2, false, "", true, 0, max_score, 100, 0,
|
||||
0, HASH, 30000, 2, "", {}, {}, "right_to_left",
|
||||
true, true, false, -1, "", override_tags).get();
|
||||
|
||||
ASSERT_EQ(3, results["hits"].size());
|
||||
ASSERT_EQ("1", results["hits"][0]["document"]["id"].get<std::string>());
|
||||
|
||||
collectionManager.drop_collection("coll1");
|
||||
}
|
||||
|
Loading…
x
Reference in New Issue
Block a user