mirror of
https://github.com/typesense/typesense.git
synced 2025-05-20 13:42:26 +08:00
Merge branch 'v0.24.1' into v0.25
# Conflicts: # src/collection.cpp # test/collection_locale_test.cpp
This commit is contained in:
commit
7655f6de1d
@ -524,8 +524,8 @@ struct filter_node_t {
|
||||
filter filter_exp;
|
||||
FILTER_OPERATOR filter_operator;
|
||||
bool isOperator;
|
||||
filter_node_t* left;
|
||||
filter_node_t* right;
|
||||
filter_node_t* left = nullptr;
|
||||
filter_node_t* right = nullptr;
|
||||
|
||||
filter_node_t(filter filter_exp)
|
||||
: filter_exp(std::move(filter_exp)),
|
||||
|
@ -1144,6 +1144,8 @@ Option<nlohmann::json> Collection::search(const std::string & raw_query,
|
||||
filter_node_t* filter_tree_root = nullptr;
|
||||
Option<bool> parse_filter_op = filter::parse_filter_query(filter_query, search_schema,
|
||||
store, doc_id_prefix, filter_tree_root);
|
||||
std::unique_ptr<filter_node_t> filter_tree_root_guard(filter_tree_root);
|
||||
|
||||
if(!parse_filter_op.ok()) {
|
||||
return Option<nlohmann::json>(parse_filter_op.code(), parse_filter_op.error());
|
||||
}
|
||||
@ -1389,6 +1391,8 @@ Option<nlohmann::json> Collection::search(const std::string & raw_query,
|
||||
filter_curated_hits, split_join_tokens, vector_query,
|
||||
facet_sample_percent, facet_sample_threshold);
|
||||
|
||||
std::unique_ptr<search_args> search_params_guard(search_params);
|
||||
|
||||
index->run_search(search_params);
|
||||
|
||||
// for grouping we have to re-aggregate
|
||||
@ -1901,11 +1905,6 @@ Option<nlohmann::json> Collection::search(const std::string & raw_query,
|
||||
result["facet_counts"].push_back(facet_result);
|
||||
}
|
||||
|
||||
// free search params
|
||||
delete search_params;
|
||||
|
||||
delete filter_tree_root;
|
||||
|
||||
result["search_cutoff"] = search_cutoff;
|
||||
|
||||
result["request_params"] = nlohmann::json::object();
|
||||
|
@ -389,7 +389,7 @@ Option<bool> toParseTree(std::queue<std::string>& postfix, filter_node_t*& root,
|
||||
const std::string expression = postfix.front();
|
||||
postfix.pop();
|
||||
|
||||
filter_node_t* filter_node;
|
||||
filter_node_t* filter_node = nullptr;
|
||||
if (isOperator(expression)) {
|
||||
auto message = "Could not parse the filter query: unbalanced `" + expression + "` operands.";
|
||||
|
||||
@ -400,6 +400,7 @@ Option<bool> toParseTree(std::queue<std::string>& postfix, filter_node_t*& root,
|
||||
nodeStack.pop();
|
||||
|
||||
if (nodeStack.empty()) {
|
||||
delete operandB;
|
||||
return Option<bool>(400, message);
|
||||
}
|
||||
auto operandA = nodeStack.top();
|
||||
@ -410,6 +411,11 @@ Option<bool> toParseTree(std::queue<std::string>& postfix, filter_node_t*& root,
|
||||
filter filter_exp;
|
||||
Option<bool> toFilter_op = toFilter(expression, filter_exp, search_schema, store, doc_id_prefix);
|
||||
if (!toFilter_op.ok()) {
|
||||
while(!nodeStack.empty()) {
|
||||
auto filterNode = nodeStack.top();
|
||||
delete filterNode;
|
||||
nodeStack.pop();
|
||||
}
|
||||
return toFilter_op;
|
||||
}
|
||||
|
||||
|
@ -2114,7 +2114,7 @@ bool Index::static_filter_query_eval(const override_t* override,
|
||||
if ((override->rule.match == override_t::MATCH_EXACT && override->rule.query == query) ||
|
||||
(override->rule.match == override_t::MATCH_CONTAINS &&
|
||||
StringUtils::contains_word(query, override->rule.query))) {
|
||||
filter_node_t* new_filter_tree_root;
|
||||
filter_node_t* new_filter_tree_root = nullptr;
|
||||
Option<bool> filter_op = filter::parse_filter_query(override->filter_by, search_schema,
|
||||
store, "", new_filter_tree_root);
|
||||
if (filter_op.ok()) {
|
||||
@ -2269,7 +2269,7 @@ void Index::process_filter_overrides(const std::vector<const override_t*>& filte
|
||||
token_order, absorbed_tokens, filter_by_clause);
|
||||
|
||||
if (resolved_override) {
|
||||
filter_node_t* new_filter_tree_root;
|
||||
filter_node_t* new_filter_tree_root = nullptr;
|
||||
Option<bool> filter_op = filter::parse_filter_query(filter_by_clause, search_schema,
|
||||
store, "", new_filter_tree_root);
|
||||
if (filter_op.ok()) {
|
||||
|
@ -353,6 +353,16 @@ TEST_F(CollectionFilteringTest, HandleBadlyFormedFilterQuery) {
|
||||
nlohmann::json results = coll_array_fields->search("Jeremy", query_fields, "tagzz: gold", facets, sort_fields, {0}, 10, 1, FREQUENCY, {false}).get();
|
||||
ASSERT_EQ(0, results["hits"].size());
|
||||
|
||||
// compound filter expression containing an unknown field
|
||||
results = coll_array_fields->search("Jeremy", query_fields,
|
||||
"(age:>0 || timestamps:> 0) || tagzz: gold", facets, sort_fields, {0}, 10, 1, FREQUENCY, {false}).get();
|
||||
ASSERT_EQ(0, results["hits"].size());
|
||||
|
||||
// unbalanced paranthesis
|
||||
results = coll_array_fields->search("Jeremy", query_fields,
|
||||
"(age:>0 || timestamps:> 0) || ", facets, sort_fields, {0}, 10, 1, FREQUENCY, {false}).get();
|
||||
ASSERT_EQ(0, results["hits"].size());
|
||||
|
||||
// searching using a string for a numeric field
|
||||
results = coll_array_fields->search("Jeremy", query_fields, "age: abcdef", facets, sort_fields, {0}, 10, 1, FREQUENCY, {false}).get();
|
||||
ASSERT_EQ(0, results["hits"].size());
|
||||
|
@ -779,6 +779,35 @@ TEST_F(CollectionOverrideTest, IncludeOverrideWithFilterBy) {
|
||||
ASSERT_EQ(2, results["hits"].size());
|
||||
ASSERT_EQ("2", results["hits"][0]["document"]["id"].get<std::string>());
|
||||
ASSERT_EQ("0", results["hits"][1]["document"]["id"].get<std::string>());
|
||||
|
||||
// when bad filter by clause is used in override
|
||||
override_json_include = {
|
||||
{"id", "include-rule-2"},
|
||||
{
|
||||
"rule", {
|
||||
{"query", "test"},
|
||||
{"match", override_t::MATCH_EXACT}
|
||||
}
|
||||
},
|
||||
{"filter_curated_hits", false},
|
||||
{"stop_processing", false},
|
||||
{"remove_matched_tokens", false},
|
||||
{"filter_by", "price >55"}
|
||||
};
|
||||
|
||||
override_json_include["includes"] = nlohmann::json::array();
|
||||
override_json_include["includes"][0] = nlohmann::json::object();
|
||||
override_json_include["includes"][0]["id"] = "2";
|
||||
override_json_include["includes"][0]["position"] = 1;
|
||||
|
||||
override_t override_include2;
|
||||
op = override_t::parse(override_json_include, "include-rule-2", override_include2);
|
||||
ASSERT_TRUE(op.ok());
|
||||
coll1->add_override(override_include2);
|
||||
|
||||
results = coll1->search("random-name", {"name"}, "",
|
||||
{}, sort_fields, {2}, 10, 1, FREQUENCY, {true}, 0).get();
|
||||
ASSERT_EQ(0, results["hits"].size());
|
||||
}
|
||||
|
||||
TEST_F(CollectionOverrideTest, ReplaceQuery) {
|
||||
@ -1673,6 +1702,52 @@ TEST_F(CollectionOverrideTest, DynamicFilteringMissingField) {
|
||||
collectionManager.drop_collection("coll1");
|
||||
}
|
||||
|
||||
TEST_F(CollectionOverrideTest, DynamicFilteringBadFilterBy) {
|
||||
Collection *coll1;
|
||||
|
||||
std::vector<field> fields = {field("name", field_types::STRING, false),
|
||||
field("category", field_types::STRING, true),
|
||||
field("points", field_types::INT32, false)};
|
||||
|
||||
coll1 = collectionManager.get_collection("coll1").get();
|
||||
if(coll1 == nullptr) {
|
||||
coll1 = collectionManager.create_collection("coll1", 1, fields, "points").get();
|
||||
}
|
||||
|
||||
nlohmann::json doc1;
|
||||
doc1["id"] = "0";
|
||||
doc1["name"] = "Amazing Shoes";
|
||||
doc1["category"] = "shoes";
|
||||
doc1["points"] = 3;
|
||||
|
||||
ASSERT_TRUE(coll1->add(doc1.dump()).ok());
|
||||
|
||||
std::vector<sort_by> sort_fields = { sort_by("_text_match", "DESC"), sort_by("points", "DESC") };
|
||||
|
||||
nlohmann::json override_json = {
|
||||
{"id", "dynamic-cat-filter"},
|
||||
{
|
||||
"rule", {
|
||||
{"query", "{category}"}, // this field does NOT exist
|
||||
{"match", override_t::MATCH_EXACT}
|
||||
}
|
||||
},
|
||||
{"remove_matched_tokens", true},
|
||||
{"filter_by", "category: {category} && foo"}
|
||||
};
|
||||
|
||||
override_t override;
|
||||
auto op = override_t::parse(override_json, "dynamic-cat-filter", override);
|
||||
ASSERT_TRUE(op.ok());
|
||||
coll1->add_override(override);
|
||||
|
||||
auto results = coll1->search("shoes", {"name", "category"}, "",
|
||||
{}, sort_fields, {2, 2}, 10).get();
|
||||
|
||||
ASSERT_EQ(1, results["hits"].size());
|
||||
collectionManager.drop_collection("coll1");
|
||||
}
|
||||
|
||||
TEST_F(CollectionOverrideTest, DynamicFilteringMultiplePlaceholders) {
|
||||
Collection* coll1;
|
||||
|
||||
|
Loading…
x
Reference in New Issue
Block a user