Fix posting_list_t::has_exact_match. (#1767)

* Add failing tests for `posting_list_t::has_exact_match`.

* Fix failing test.

* Add test case for `string[]` field.
This commit is contained in:
Harpreet Sangar 2024-06-03 20:54:12 +05:30 committed by GitHub
parent 3291fca5ba
commit 2c3b595982
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
2 changed files with 74 additions and 3 deletions

View File

@ -1331,8 +1331,8 @@ bool posting_list_t::has_exact_match(std::vector<posting_list_t::iterator_t>& po
start_offset_index++;
if(offset == (i + 1)) {
// we have found a matching index, no need to look further
return true;
// we have found a matching index, no need to look further for this token.
break;
}
if(offset > (i + 1)) {
@ -1429,7 +1429,7 @@ bool posting_list_t::has_exact_match(std::vector<posting_list_t::iterator_t>& po
}
}
return false;
return true;
}
bool posting_list_t::found_token_sequence(const std::vector<token_positions_t>& token_positions,

View File

@ -139,6 +139,77 @@ TEST_F(CollectionFilteringTest, FilterOnTextFields) {
ASSERT_EQ("Error with filter field `tags`: Filter value cannot be empty.", res_op.error());
collectionManager.drop_collection("coll_array_fields");
auto schema_json =
R"({
"name": "title",
"fields": [
{"name": "title", "type": "string"},
{"name": "titles", "type": "string[]"}
]
})"_json;
std::vector<nlohmann::json> documents = {
R"({
"title": "foo bar baz",
"titles": []
})"_json,
R"({
"title": "foo bar baz",
"titles": ["foo bar baz"]
})"_json,
R"({
"title": "foo bar baz",
"titles": ["bar foo baz", "foo bar baz"]
})"_json,
R"({
"title": "bar foo baz",
"titles": ["bar foo baz"]
})"_json,
};
auto collection_create_op = collectionManager.create_collection(schema_json);
ASSERT_TRUE(collection_create_op.ok());
for (auto const &json: documents) {
auto add_op = collection_create_op.get()->add(json.dump());
ASSERT_TRUE(add_op.ok());
}
std::map<std::string, std::string> req_params = {
{"collection", "title"},
{"q", "foo"},
{"query_by", "title"},
{"filter_by", "title:= foo bar baz"}
};
nlohmann::json embedded_params;
std::string json_res;
auto now_ts = std::chrono::duration_cast<std::chrono::microseconds>(
std::chrono::system_clock::now().time_since_epoch()).count();
auto search_op = collectionManager.do_search(req_params, embedded_params, json_res, now_ts);
ASSERT_TRUE(search_op.ok());
auto res_obj = nlohmann::json::parse(json_res);
ASSERT_EQ(3, res_obj["found"].get<size_t>());
ASSERT_EQ(3, res_obj["hits"].size());
ASSERT_EQ("2", res_obj["hits"][0]["document"].at("id"));
ASSERT_EQ("1", res_obj["hits"][1]["document"].at("id"));
ASSERT_EQ("0", res_obj["hits"][2]["document"].at("id"));
req_params = {
{"collection", "title"},
{"q", "foo"},
{"query_by", "titles"},
{"filter_by", "titles:= foo bar baz"}
};
search_op = collectionManager.do_search(req_params, embedded_params, json_res, now_ts);
ASSERT_TRUE(search_op.ok());
res_obj = nlohmann::json::parse(json_res);
ASSERT_EQ(2, res_obj["found"].get<size_t>());
ASSERT_EQ(2, res_obj["hits"].size());
ASSERT_EQ("2", res_obj["hits"][0]["document"].at("id"));
ASSERT_EQ("1", res_obj["hits"][1]["document"].at("id"));
}
TEST_F(CollectionFilteringTest, FacetFieldStringFiltering) {