mirror of
https://github.com/typesense/typesense.git
synced 2025-05-18 04:32:38 +08:00
Proper API responses when pagination exceeds result boundaries.
This commit is contained in:
parent
dda3a0a06a
commit
ffba0371b0
2
TODO.md
2
TODO.md
@ -42,7 +42,7 @@
|
||||
- ~~Pagination parameter~~
|
||||
- ~~Drop collection API~~
|
||||
- ~~JSONP response~~
|
||||
- "error":"Not found." is sent when query has no hits
|
||||
- ~~"error":"Not found." is sent when query has no hits~~
|
||||
- Number of records in collection
|
||||
- Use rocksdb batch put for atomic insertion
|
||||
- Fix API response codes
|
||||
|
@ -136,7 +136,7 @@ public:
|
||||
|
||||
Option<std::string> add(const std::string & json_str);
|
||||
|
||||
nlohmann::json search(std::string query, const std::vector<std::string> search_fields,
|
||||
Option<nlohmann::json> search(std::string query, const std::vector<std::string> search_fields,
|
||||
const std::string & simple_filter_query, const std::vector<std::string> & facet_fields,
|
||||
const std::vector<sort_field> & sort_fields, const int num_typos,
|
||||
const size_t per_page = 10, const size_t page = 1,
|
||||
|
@ -17,10 +17,17 @@ public:
|
||||
|
||||
}
|
||||
|
||||
Option(uint32_t code, const std::string & error_msg): error_code(code), error_msg(error_msg), is_ok(false) {
|
||||
Option(const uint32_t code, const std::string & error_msg): error_code(code), error_msg(error_msg), is_ok(false) {
|
||||
|
||||
}
|
||||
|
||||
Option(const Option &obj) {
|
||||
value = obj.value;
|
||||
is_ok = obj.is_ok;
|
||||
error_msg = obj.error_msg;
|
||||
error_code = obj.error_code;
|
||||
}
|
||||
|
||||
bool ok() const {
|
||||
return is_ok;
|
||||
}
|
||||
|
15
src/api.cpp
15
src/api.cpp
@ -207,7 +207,7 @@ void get_search(http_req & req, http_res & res) {
|
||||
token_order = MAX_SCORE;
|
||||
}
|
||||
|
||||
nlohmann::json result = collection->search(req.params["q"], search_fields, filter_str, facet_fields,
|
||||
Option<nlohmann::json> result_op = collection->search(req.params["q"], search_fields, filter_str, facet_fields,
|
||||
sort_fields, std::stoi(req.params[NUM_TYPOS]),
|
||||
std::stoi(req.params[PER_PAGE]), std::stoi(req.params[PAGE]),
|
||||
token_order, prefix);
|
||||
@ -215,12 +215,19 @@ void get_search(http_req & req, http_res & res) {
|
||||
uint64_t timeMillis = std::chrono::duration_cast<std::chrono::milliseconds>(
|
||||
std::chrono::high_resolution_clock::now() - begin).count();
|
||||
|
||||
result["took_ms"] = timeMillis;
|
||||
|
||||
if(!result_op.ok()) {
|
||||
const std::string & json_res_body = (req.params.count(CALLBACK) == 0) ? result_op.error() :
|
||||
(req.params[CALLBACK] + "(" + result_op.error() + ");");
|
||||
return res.send(result_op.code(), json_res_body);
|
||||
}
|
||||
|
||||
nlohmann::json result = result_op.get();
|
||||
result["took_ms"] = timeMillis;
|
||||
const std::string & results_json_str = result.dump();
|
||||
|
||||
struct rusage r_usage;
|
||||
getrusage(RUSAGE_SELF,&r_usage);
|
||||
//struct rusage r_usage;
|
||||
//getrusage(RUSAGE_SELF,&r_usage);
|
||||
//std::cout << "Memory usage: " << r_usage.ru_maxrss << std::endl;
|
||||
|
||||
if(req.params.count(CALLBACK) == 0) {
|
||||
|
@ -557,7 +557,7 @@ Option<uint32_t> Collection::do_filtering(uint32_t** filter_ids_out, const std::
|
||||
return Option<>(filter_ids_length);
|
||||
}
|
||||
|
||||
nlohmann::json Collection::search(std::string query, const std::vector<std::string> search_fields,
|
||||
Option<nlohmann::json> Collection::search(std::string query, const std::vector<std::string> search_fields,
|
||||
const std::string & simple_filter_query, const std::vector<std::string> & facet_fields,
|
||||
const std::vector<sort_field> & sort_fields, const int num_typos,
|
||||
const size_t per_page, const size_t page,
|
||||
@ -568,22 +568,22 @@ nlohmann::json Collection::search(std::string query, const std::vector<std::stri
|
||||
// validate search fields
|
||||
for(const std::string & field_name: search_fields) {
|
||||
if(search_schema.count(field_name) == 0) {
|
||||
result["error"] = "Could not find a search field named `" + field_name + "` in the schema.";
|
||||
return result;
|
||||
std::string error = "Could not find a search field named `" + field_name + "` in the schema.";
|
||||
return Option<nlohmann::json>(400, error);
|
||||
}
|
||||
|
||||
field search_field = search_schema.at(field_name);
|
||||
if(search_field.type != field_types::STRING && search_field.type != field_types::STRING_ARRAY) {
|
||||
result["error"] = "Search field `" + field_name + "` should be a string or a string array.";
|
||||
return result;
|
||||
std::string error = "Search field `" + field_name + "` should be a string or a string array.";
|
||||
return Option<nlohmann::json>(400, error);
|
||||
}
|
||||
}
|
||||
|
||||
// validate facet fields
|
||||
for(const std::string & field_name: facet_fields) {
|
||||
if(facet_schema.count(field_name) == 0) {
|
||||
result["error"] = "Could not find a facet field named `" + field_name + "` in the schema.";
|
||||
return result;
|
||||
std::string error = "Could not find a facet field named `" + field_name + "` in the schema.";
|
||||
return Option<nlohmann::json>(400, error);
|
||||
}
|
||||
facets.push_back(facet(field_name));
|
||||
}
|
||||
@ -594,16 +594,16 @@ nlohmann::json Collection::search(std::string query, const std::vector<std::stri
|
||||
|
||||
for(const sort_field & _sort_field: sort_fields) {
|
||||
if(sort_index.count(_sort_field.name) == 0) {
|
||||
result["error"] = "Could not find a sort field named `" + _sort_field.name + "` in the schema.";
|
||||
return result;
|
||||
std::string error = "Could not find a sort field named `" + _sort_field.name + "` in the schema.";
|
||||
return Option<nlohmann::json>(400, error);
|
||||
}
|
||||
|
||||
std::string sort_order = _sort_field.order;
|
||||
StringUtils::toupper(sort_order);
|
||||
|
||||
if(sort_order != sort_field_const::asc && sort_order != sort_field_const::desc) {
|
||||
result["error"] = "Order for sort field` " + _sort_field.name + "` should be either ASC or DESC.";
|
||||
return result;
|
||||
std::string error = "Order for sort field` " + _sort_field.name + "` should be either ASC or DESC.";
|
||||
return Option<nlohmann::json>(400, error);
|
||||
}
|
||||
|
||||
sort_fields_std.push_back({_sort_field.name, sort_order});
|
||||
@ -613,15 +613,15 @@ nlohmann::json Collection::search(std::string query, const std::vector<std::stri
|
||||
uint32_t* filter_ids = nullptr;
|
||||
Option<uint32_t> op_filter_ids_length = do_filtering(&filter_ids, simple_filter_query);
|
||||
if(!op_filter_ids_length.ok()) {
|
||||
result["error"] = op_filter_ids_length.error();
|
||||
return result;
|
||||
return Option<nlohmann::json>(op_filter_ids_length.code(), op_filter_ids_length.error());
|
||||
}
|
||||
|
||||
const uint32_t filter_ids_length = op_filter_ids_length.get();
|
||||
|
||||
// check for valid pagination
|
||||
if((page * per_page) > MAX_RESULTS) {
|
||||
result["error"] = "Cannot paginate past " + std::to_string(MAX_RESULTS) + " results.";
|
||||
return result;
|
||||
std::string message = "Only the first " + std::to_string(MAX_RESULTS) + " results are available.";
|
||||
return Option<nlohmann::json>(422, message);
|
||||
}
|
||||
|
||||
const size_t num_results = (page * per_page);
|
||||
@ -661,13 +661,13 @@ nlohmann::json Collection::search(std::string query, const std::vector<std::stri
|
||||
});
|
||||
|
||||
result["hits"] = nlohmann::json::array();
|
||||
result["found"] = all_result_ids_len;
|
||||
|
||||
const int start_result_index = (page - 1) * per_page;
|
||||
const int kvsize = field_order_kvs.size();
|
||||
|
||||
if(start_result_index > (kvsize - 1)) {
|
||||
result["error"] = "Not found.";
|
||||
return result;
|
||||
return Option<nlohmann::json>(result);
|
||||
}
|
||||
|
||||
const int end_result_index = std::min(int(page * per_page), kvsize) - 1;
|
||||
@ -727,8 +727,6 @@ nlohmann::json Collection::search(std::string query, const std::vector<std::stri
|
||||
result["hits"].push_back(document);
|
||||
}
|
||||
|
||||
result["found"] = all_result_ids_len;
|
||||
|
||||
result["facet_counts"] = nlohmann::json::array();
|
||||
|
||||
// populate facets
|
||||
|
@ -88,6 +88,7 @@ const char* HttpServer::get_status_reason(uint32_t status_code) {
|
||||
case 403: return "Forbidden";
|
||||
case 404: return "Not Found";
|
||||
case 409: return "Conflict";
|
||||
case 422: return "Unprocessable Entity";
|
||||
case 500: return "Internal Server Error";
|
||||
default: return "";
|
||||
}
|
||||
|
@ -100,7 +100,7 @@ TEST_F(CollectionManagerTest, RestoreRecordsOnRestart) {
|
||||
std::vector<std::string> search_fields = {"starring", "title"};
|
||||
std::vector<std::string> facets;
|
||||
|
||||
nlohmann::json results = collection1->search("thomas", search_fields, "", facets, sort_fields, 0, 10, 1, FREQUENCY, false);
|
||||
nlohmann::json results = collection1->search("thomas", search_fields, "", facets, sort_fields, 0, 10, 1, FREQUENCY, false).get();
|
||||
ASSERT_EQ(4, results["hits"].size());
|
||||
|
||||
spp::sparse_hash_map<std::string, field> schema = collection1->get_schema();
|
||||
@ -122,7 +122,7 @@ TEST_F(CollectionManagerTest, RestoreRecordsOnRestart) {
|
||||
ASSERT_EQ(schema.size(), collection1->get_schema().size());
|
||||
ASSERT_EQ("points", collection1->get_token_ranking_field());
|
||||
|
||||
results = collection1->search("thomas", search_fields, "", facets, sort_fields, 0, 10, 1, FREQUENCY, false);
|
||||
results = collection1->search("thomas", search_fields, "", facets, sort_fields, 0, 10, 1, FREQUENCY, false).get();
|
||||
ASSERT_EQ(4, results["hits"].size());
|
||||
}
|
||||
|
||||
|
@ -63,7 +63,7 @@ protected:
|
||||
|
||||
TEST_F(CollectionTest, ExactSearchShouldBeStable) {
|
||||
std::vector<std::string> facets;
|
||||
nlohmann::json results = collection->search("the", query_fields, "", facets, sort_fields, 0, 10);
|
||||
nlohmann::json results = collection->search("the", query_fields, "", facets, sort_fields, 0, 10).get();
|
||||
ASSERT_EQ(7, results["hits"].size());
|
||||
ASSERT_EQ(7, results["found"].get<int>());
|
||||
|
||||
@ -80,7 +80,7 @@ TEST_F(CollectionTest, ExactSearchShouldBeStable) {
|
||||
// check ASC sorting
|
||||
std::vector<sort_field> sort_fields_asc = { sort_field("points", "ASC") };
|
||||
|
||||
results = collection->search("the", query_fields, "", facets, sort_fields_asc, 0, 10);
|
||||
results = collection->search("the", query_fields, "", facets, sort_fields_asc, 0, 10).get();
|
||||
ASSERT_EQ(7, results["hits"].size());
|
||||
ASSERT_EQ(7, results["found"].get<int>());
|
||||
|
||||
@ -96,7 +96,7 @@ TEST_F(CollectionTest, ExactSearchShouldBeStable) {
|
||||
|
||||
TEST_F(CollectionTest, ExactPhraseSearch) {
|
||||
std::vector<std::string> facets;
|
||||
nlohmann::json results = collection->search("rocket launch", query_fields, "", facets, sort_fields, 0, 10);
|
||||
nlohmann::json results = collection->search("rocket launch", query_fields, "", facets, sort_fields, 0, 10).get();
|
||||
ASSERT_EQ(5, results["hits"].size());
|
||||
ASSERT_EQ(5, results["found"].get<uint32_t>());
|
||||
|
||||
@ -120,7 +120,7 @@ TEST_F(CollectionTest, ExactPhraseSearch) {
|
||||
|
||||
// Check ASC sort order
|
||||
std::vector<sort_field> sort_fields_asc = { sort_field("points", "ASC") };
|
||||
results = collection->search("rocket launch", query_fields, "", facets, sort_fields_asc, 0, 10);
|
||||
results = collection->search("rocket launch", query_fields, "", facets, sort_fields_asc, 0, 10).get();
|
||||
ASSERT_EQ(5, results["hits"].size());
|
||||
ASSERT_EQ(5, results["found"].get<uint32_t>());
|
||||
|
||||
@ -134,7 +134,7 @@ TEST_F(CollectionTest, ExactPhraseSearch) {
|
||||
}
|
||||
|
||||
// Check pagination
|
||||
results = collection->search("rocket launch", query_fields, "", facets, sort_fields, 0, 3);
|
||||
results = collection->search("rocket launch", query_fields, "", facets, sort_fields, 0, 3).get();
|
||||
ASSERT_EQ(3, results["hits"].size());
|
||||
ASSERT_EQ(4, results["found"].get<uint32_t>());
|
||||
|
||||
@ -151,7 +151,7 @@ TEST_F(CollectionTest, ExactPhraseSearch) {
|
||||
TEST_F(CollectionTest, SkipUnindexedTokensDuringPhraseSearch) {
|
||||
// Tokens that are not found in the index should be skipped
|
||||
std::vector<std::string> facets;
|
||||
nlohmann::json results = collection->search("DoesNotExist from", query_fields, "", facets, sort_fields, 0, 10);
|
||||
nlohmann::json results = collection->search("DoesNotExist from", query_fields, "", facets, sort_fields, 0, 10).get();
|
||||
ASSERT_EQ(2, results["hits"].size());
|
||||
|
||||
std::vector<std::string> ids = {"2", "17"};
|
||||
@ -164,7 +164,7 @@ TEST_F(CollectionTest, SkipUnindexedTokensDuringPhraseSearch) {
|
||||
}
|
||||
|
||||
// with non-zero cost
|
||||
results = collection->search("DoesNotExist from", query_fields, "", facets, sort_fields, 1, 10);
|
||||
results = collection->search("DoesNotExist from", query_fields, "", facets, sort_fields, 1, 10).get();
|
||||
ASSERT_EQ(2, results["hits"].size());
|
||||
|
||||
for(size_t i = 0; i < results["hits"].size(); i++) {
|
||||
@ -175,7 +175,7 @@ TEST_F(CollectionTest, SkipUnindexedTokensDuringPhraseSearch) {
|
||||
}
|
||||
|
||||
// with 2 indexed words
|
||||
results = collection->search("from DoesNotExist insTruments", query_fields, "", facets, sort_fields, 1, 10);
|
||||
results = collection->search("from DoesNotExist insTruments", query_fields, "", facets, sort_fields, 1, 10).get();
|
||||
ASSERT_EQ(2, results["hits"].size());
|
||||
ids = {"2", "17"};
|
||||
|
||||
@ -187,17 +187,17 @@ TEST_F(CollectionTest, SkipUnindexedTokensDuringPhraseSearch) {
|
||||
}
|
||||
|
||||
results.clear();
|
||||
results = collection->search("DoesNotExist1 DoesNotExist2", query_fields, "", facets, sort_fields, 0, 10);
|
||||
results = collection->search("DoesNotExist1 DoesNotExist2", query_fields, "", facets, sort_fields, 0, 10).get();
|
||||
ASSERT_EQ(0, results["hits"].size());
|
||||
|
||||
results.clear();
|
||||
results = collection->search("DoesNotExist1 DoesNotExist2", query_fields, "", facets, sort_fields, 2, 10);
|
||||
results = collection->search("DoesNotExist1 DoesNotExist2", query_fields, "", facets, sort_fields, 2, 10).get();
|
||||
ASSERT_EQ(0, results["hits"].size());
|
||||
}
|
||||
|
||||
TEST_F(CollectionTest, PartialPhraseSearch) {
|
||||
std::vector<std::string> facets;
|
||||
nlohmann::json results = collection->search("rocket research", query_fields, "", facets, sort_fields, 0, 10);
|
||||
nlohmann::json results = collection->search("rocket research", query_fields, "", facets, sort_fields, 0, 10).get();
|
||||
ASSERT_EQ(4, results["hits"].size());
|
||||
|
||||
std::vector<std::string> ids = {"1", "8", "16", "17"};
|
||||
@ -212,7 +212,7 @@ TEST_F(CollectionTest, PartialPhraseSearch) {
|
||||
|
||||
TEST_F(CollectionTest, QueryWithTypo) {
|
||||
std::vector<std::string> facets;
|
||||
nlohmann::json results = collection->search("kind biologcal", query_fields, "", facets, sort_fields, 2, 3);
|
||||
nlohmann::json results = collection->search("kind biologcal", query_fields, "", facets, sort_fields, 2, 3).get();
|
||||
ASSERT_EQ(3, results["hits"].size());
|
||||
|
||||
std::vector<std::string> ids = {"19", "20", "21"};
|
||||
@ -225,7 +225,7 @@ TEST_F(CollectionTest, QueryWithTypo) {
|
||||
}
|
||||
|
||||
results.clear();
|
||||
results = collection->search("fer thx", query_fields, "", facets, sort_fields, 1, 3);
|
||||
results = collection->search("fer thx", query_fields, "", facets, sort_fields, 1, 3).get();
|
||||
ids = {"1", "10", "13"};
|
||||
|
||||
ASSERT_EQ(3, results["hits"].size());
|
||||
@ -240,7 +240,7 @@ TEST_F(CollectionTest, QueryWithTypo) {
|
||||
|
||||
TEST_F(CollectionTest, TypoTokenRankedByScoreAndFrequency) {
|
||||
std::vector<std::string> facets;
|
||||
nlohmann::json results = collection->search("loox", query_fields, "", facets, sort_fields, 1, 2, 1, MAX_SCORE, false);
|
||||
nlohmann::json results = collection->search("loox", query_fields, "", facets, sort_fields, 1, 2, 1, MAX_SCORE, false).get();
|
||||
ASSERT_EQ(2, results["hits"].size());
|
||||
std::vector<std::string> ids = {"22", "23"};
|
||||
|
||||
@ -251,7 +251,7 @@ TEST_F(CollectionTest, TypoTokenRankedByScoreAndFrequency) {
|
||||
ASSERT_STREQ(id.c_str(), result_id.c_str());
|
||||
}
|
||||
|
||||
results = collection->search("loox", query_fields, "", facets, sort_fields, 1, 3, 1, FREQUENCY, false);
|
||||
results = collection->search("loox", query_fields, "", facets, sort_fields, 1, 3, 1, FREQUENCY, false).get();
|
||||
ASSERT_EQ(3, results["hits"].size());
|
||||
ids = {"3", "12", "24"};
|
||||
|
||||
@ -263,19 +263,19 @@ TEST_F(CollectionTest, TypoTokenRankedByScoreAndFrequency) {
|
||||
}
|
||||
|
||||
// Check pagination
|
||||
results = collection->search("loox", query_fields, "", facets, sort_fields, 1, 1, 1, FREQUENCY, false);
|
||||
results = collection->search("loox", query_fields, "", facets, sort_fields, 1, 1, 1, FREQUENCY, false).get();
|
||||
ASSERT_EQ(3, results["found"].get<int>());
|
||||
ASSERT_EQ(1, results["hits"].size());
|
||||
std::string solo_id = results["hits"].at(0)["id"];
|
||||
ASSERT_STREQ("3", solo_id.c_str());
|
||||
|
||||
results = collection->search("loox", query_fields, "", facets, sort_fields, 1, 2, 1, FREQUENCY, false);
|
||||
results = collection->search("loox", query_fields, "", facets, sort_fields, 1, 2, 1, FREQUENCY, false).get();
|
||||
ASSERT_EQ(3, results["found"].get<int>());
|
||||
ASSERT_EQ(2, results["hits"].size());
|
||||
|
||||
// Check total ordering
|
||||
|
||||
results = collection->search("loox", query_fields, "", facets, sort_fields, 1, 10, 1, FREQUENCY, false);
|
||||
results = collection->search("loox", query_fields, "", facets, sort_fields, 1, 10, 1, FREQUENCY, false).get();
|
||||
ASSERT_EQ(5, results["hits"].size());
|
||||
ids = {"3", "12", "24", "22", "23"};
|
||||
|
||||
@ -286,7 +286,7 @@ TEST_F(CollectionTest, TypoTokenRankedByScoreAndFrequency) {
|
||||
ASSERT_STREQ(id.c_str(), result_id.c_str());
|
||||
}
|
||||
|
||||
results = collection->search("loox", query_fields, "", facets, sort_fields, 1, 10, 1, MAX_SCORE, false);
|
||||
results = collection->search("loox", query_fields, "", facets, sort_fields, 1, 10, 1, MAX_SCORE, false).get();
|
||||
ASSERT_EQ(5, results["hits"].size());
|
||||
ids = {"22", "23", "3", "12", "24"};
|
||||
|
||||
@ -301,7 +301,7 @@ TEST_F(CollectionTest, TypoTokenRankedByScoreAndFrequency) {
|
||||
TEST_F(CollectionTest, TextContainingAnActualTypo) {
|
||||
// A line contains "ISX" but not "what" - need to ensure that correction to "ISS what" happens
|
||||
std::vector<std::string> facets;
|
||||
nlohmann::json results = collection->search("ISX what", query_fields, "", facets, sort_fields, 1, 4, 1, FREQUENCY, false);
|
||||
nlohmann::json results = collection->search("ISX what", query_fields, "", facets, sort_fields, 1, 4, 1, FREQUENCY, false).get();
|
||||
ASSERT_EQ(4, results["hits"].size());
|
||||
ASSERT_EQ(4, results["found"].get<uint32_t>());
|
||||
|
||||
@ -315,7 +315,7 @@ TEST_F(CollectionTest, TextContainingAnActualTypo) {
|
||||
}
|
||||
|
||||
// Record containing exact token match should appear first
|
||||
results = collection->search("ISX", query_fields, "", facets, sort_fields, 1, 10, 1, FREQUENCY, false);
|
||||
results = collection->search("ISX", query_fields, "", facets, sort_fields, 1, 10, 1, FREQUENCY, false).get();
|
||||
ASSERT_EQ(8, results["hits"].size());
|
||||
ASSERT_EQ(8, results["found"].get<uint32_t>());
|
||||
|
||||
@ -331,7 +331,7 @@ TEST_F(CollectionTest, TextContainingAnActualTypo) {
|
||||
|
||||
TEST_F(CollectionTest, PrefixSearching) {
|
||||
std::vector<std::string> facets;
|
||||
nlohmann::json results = collection->search("ex", query_fields, "", facets, sort_fields, 0, 10, 1, FREQUENCY, true);
|
||||
nlohmann::json results = collection->search("ex", query_fields, "", facets, sort_fields, 0, 10, 1, FREQUENCY, true).get();
|
||||
ASSERT_EQ(2, results["hits"].size());
|
||||
std::vector<std::string> ids = {"12", "6"};
|
||||
|
||||
@ -342,7 +342,7 @@ TEST_F(CollectionTest, PrefixSearching) {
|
||||
ASSERT_STREQ(id.c_str(), result_id.c_str());
|
||||
}
|
||||
|
||||
results = collection->search("ex", query_fields, "", facets, sort_fields, 0, 10, 1, MAX_SCORE, true);
|
||||
results = collection->search("ex", query_fields, "", facets, sort_fields, 0, 10, 1, MAX_SCORE, true).get();
|
||||
ASSERT_EQ(2, results["hits"].size());
|
||||
ids = {"6", "12"};
|
||||
|
||||
@ -353,7 +353,7 @@ TEST_F(CollectionTest, PrefixSearching) {
|
||||
ASSERT_STREQ(id.c_str(), result_id.c_str());
|
||||
}
|
||||
|
||||
results = collection->search("what ex", query_fields, "", facets, sort_fields, 0, 10, 1, MAX_SCORE, true);
|
||||
results = collection->search("what ex", query_fields, "", facets, sort_fields, 0, 10, 1, MAX_SCORE, true).get();
|
||||
ASSERT_EQ(9, results["hits"].size());
|
||||
ids = {"6", "12", "19", "22", "13", "8", "15", "24", "21"};
|
||||
|
||||
@ -387,7 +387,7 @@ TEST_F(CollectionTest, MultipleFields) {
|
||||
|
||||
query_fields = {"title", "starring"};
|
||||
std::vector<std::string> facets;
|
||||
nlohmann::json results = coll_mul_fields->search("Will", query_fields, "", facets, sort_fields, 0, 10, 1, FREQUENCY, false);
|
||||
nlohmann::json results = coll_mul_fields->search("Will", query_fields, "", facets, sort_fields, 0, 10, 1, FREQUENCY, false).get();
|
||||
ASSERT_EQ(4, results["hits"].size());
|
||||
|
||||
std::vector<std::string> ids = {"3", "2", "1", "0"};
|
||||
@ -402,7 +402,7 @@ TEST_F(CollectionTest, MultipleFields) {
|
||||
// when "starring" takes higher priority than "title"
|
||||
|
||||
query_fields = {"starring", "title"};
|
||||
results = coll_mul_fields->search("thomas", query_fields, "", facets, sort_fields, 0, 10, 1, FREQUENCY, false);
|
||||
results = coll_mul_fields->search("thomas", query_fields, "", facets, sort_fields, 0, 10, 1, FREQUENCY, false).get();
|
||||
ASSERT_EQ(4, results["hits"].size());
|
||||
|
||||
ids = {"15", "14", "12", "13"};
|
||||
@ -415,11 +415,11 @@ TEST_F(CollectionTest, MultipleFields) {
|
||||
}
|
||||
|
||||
query_fields = {"starring", "title", "cast"};
|
||||
results = coll_mul_fields->search("ben affleck", query_fields, "", facets, sort_fields, 0, 10, 1, FREQUENCY, false);
|
||||
results = coll_mul_fields->search("ben affleck", query_fields, "", facets, sort_fields, 0, 10, 1, FREQUENCY, false).get();
|
||||
ASSERT_EQ(1, results["hits"].size());
|
||||
|
||||
query_fields = {"cast"};
|
||||
results = coll_mul_fields->search("chris", query_fields, "", facets, sort_fields, 0, 10, 1, FREQUENCY, false);
|
||||
results = coll_mul_fields->search("chris", query_fields, "", facets, sort_fields, 0, 10, 1, FREQUENCY, false).get();
|
||||
ASSERT_EQ(3, results["hits"].size());
|
||||
|
||||
ids = {"6", "1", "7"};
|
||||
@ -431,7 +431,7 @@ TEST_F(CollectionTest, MultipleFields) {
|
||||
}
|
||||
|
||||
query_fields = {"cast"};
|
||||
results = coll_mul_fields->search("chris pine", query_fields, "", facets, sort_fields, 0, 10, 1, FREQUENCY, false);
|
||||
results = coll_mul_fields->search("chris pine", query_fields, "", facets, sort_fields, 0, 10, 1, FREQUENCY, false).get();
|
||||
ASSERT_EQ(3, results["hits"].size());
|
||||
|
||||
ids = {"7", "6", "1"};
|
||||
@ -471,7 +471,7 @@ TEST_F(CollectionTest, FilterOnNumericFields) {
|
||||
// Plain search with no filters - results should be sorted by rank fields
|
||||
query_fields = {"name"};
|
||||
std::vector<std::string> facets;
|
||||
nlohmann::json results = coll_array_fields->search("Jeremy", query_fields, "", facets, sort_fields, 0, 10, 1, FREQUENCY, false);
|
||||
nlohmann::json results = coll_array_fields->search("Jeremy", query_fields, "", facets, sort_fields, 0, 10, 1, FREQUENCY, false).get();
|
||||
ASSERT_EQ(5, results["hits"].size());
|
||||
|
||||
std::vector<std::string> ids = {"3", "1", "4", "0", "2"};
|
||||
@ -484,7 +484,7 @@ TEST_F(CollectionTest, FilterOnNumericFields) {
|
||||
}
|
||||
|
||||
// Searching on an int32 field
|
||||
results = coll_array_fields->search("Jeremy", query_fields, "age:>24", facets, sort_fields, 0, 10, 1, FREQUENCY, false);
|
||||
results = coll_array_fields->search("Jeremy", query_fields, "age:>24", facets, sort_fields, 0, 10, 1, FREQUENCY, false).get();
|
||||
ASSERT_EQ(3, results["hits"].size());
|
||||
|
||||
ids = {"3", "1", "4"};
|
||||
@ -496,14 +496,14 @@ TEST_F(CollectionTest, FilterOnNumericFields) {
|
||||
ASSERT_STREQ(id.c_str(), result_id.c_str());
|
||||
}
|
||||
|
||||
results = coll_array_fields->search("Jeremy", query_fields, "age:>=24", facets, sort_fields, 0, 10, 1, FREQUENCY, false);
|
||||
results = coll_array_fields->search("Jeremy", query_fields, "age:>=24", facets, sort_fields, 0, 10, 1, FREQUENCY, false).get();
|
||||
ASSERT_EQ(4, results["hits"].size());
|
||||
|
||||
results = coll_array_fields->search("Jeremy", query_fields, "age:24", facets, sort_fields, 0, 10, 1, FREQUENCY, false);
|
||||
results = coll_array_fields->search("Jeremy", query_fields, "age:24", facets, sort_fields, 0, 10, 1, FREQUENCY, false).get();
|
||||
ASSERT_EQ(1, results["hits"].size());
|
||||
|
||||
// Searching a number against an int32 array field
|
||||
results = coll_array_fields->search("Jeremy", query_fields, "years:>2002", facets, sort_fields, 0, 10, 1, FREQUENCY, false);
|
||||
results = coll_array_fields->search("Jeremy", query_fields, "years:>2002", facets, sort_fields, 0, 10, 1, FREQUENCY, false).get();
|
||||
ASSERT_EQ(3, results["hits"].size());
|
||||
|
||||
ids = {"1", "0", "2"};
|
||||
@ -514,7 +514,7 @@ TEST_F(CollectionTest, FilterOnNumericFields) {
|
||||
ASSERT_STREQ(id.c_str(), result_id.c_str());
|
||||
}
|
||||
|
||||
results = coll_array_fields->search("Jeremy", query_fields, "years:<1989", facets, sort_fields, 0, 10, 1, FREQUENCY, false);
|
||||
results = coll_array_fields->search("Jeremy", query_fields, "years:<1989", facets, sort_fields, 0, 10, 1, FREQUENCY, false).get();
|
||||
ASSERT_EQ(1, results["hits"].size());
|
||||
|
||||
ids = {"3"};
|
||||
@ -526,7 +526,7 @@ TEST_F(CollectionTest, FilterOnNumericFields) {
|
||||
}
|
||||
|
||||
// multiple filters
|
||||
results = coll_array_fields->search("Jeremy", query_fields, "years:<2005 && years:>1987", facets, sort_fields, 0, 10, 1, FREQUENCY, false);
|
||||
results = coll_array_fields->search("Jeremy", query_fields, "years:<2005 && years:>1987", facets, sort_fields, 0, 10, 1, FREQUENCY, false).get();
|
||||
ASSERT_EQ(1, results["hits"].size());
|
||||
|
||||
ids = {"4"};
|
||||
@ -538,7 +538,7 @@ TEST_F(CollectionTest, FilterOnNumericFields) {
|
||||
}
|
||||
|
||||
// multiple search values (works like SQL's IN operator) against a single int field
|
||||
results = coll_array_fields->search("Jeremy", query_fields, "age:[21, 24, 63]", facets, sort_fields, 0, 10, 1, FREQUENCY, false);
|
||||
results = coll_array_fields->search("Jeremy", query_fields, "age:[21, 24, 63]", facets, sort_fields, 0, 10, 1, FREQUENCY, false).get();
|
||||
ASSERT_EQ(3, results["hits"].size());
|
||||
|
||||
ids = {"3", "0", "2"};
|
||||
@ -550,7 +550,7 @@ TEST_F(CollectionTest, FilterOnNumericFields) {
|
||||
}
|
||||
|
||||
// multiple search values against an int32 array field - also use extra padding between symbols
|
||||
results = coll_array_fields->search("Jeremy", query_fields, "years : [ 2015, 1985 , 1999]", facets, sort_fields, 0, 10, 1, FREQUENCY, false);
|
||||
results = coll_array_fields->search("Jeremy", query_fields, "years : [ 2015, 1985 , 1999]", facets, sort_fields, 0, 10, 1, FREQUENCY, false).get();
|
||||
ASSERT_EQ(4, results["hits"].size());
|
||||
|
||||
ids = {"3", "1", "4", "0"};
|
||||
@ -562,7 +562,7 @@ TEST_F(CollectionTest, FilterOnNumericFields) {
|
||||
}
|
||||
|
||||
// searching on an int64 array field - also ensure that padded space causes no issues
|
||||
results = coll_array_fields->search("Jeremy", query_fields, "timestamps : > 475205222", facets, sort_fields, 0, 10, 1, FREQUENCY, false);
|
||||
results = coll_array_fields->search("Jeremy", query_fields, "timestamps : > 475205222", facets, sort_fields, 0, 10, 1, FREQUENCY, false).get();
|
||||
ASSERT_EQ(4, results["hits"].size());
|
||||
|
||||
ids = {"1", "4", "0", "2"};
|
||||
@ -575,7 +575,7 @@ TEST_F(CollectionTest, FilterOnNumericFields) {
|
||||
}
|
||||
|
||||
// when filters don't match any record, no results should be returned
|
||||
results = coll_array_fields->search("Jeremy", query_fields, "timestamps:<1", facets, sort_fields, 0, 10, 1, FREQUENCY, false);
|
||||
results = coll_array_fields->search("Jeremy", query_fields, "timestamps:<1", facets, sort_fields, 0, 10, 1, FREQUENCY, false).get();
|
||||
ASSERT_EQ(0, results["hits"].size());
|
||||
|
||||
collectionManager.drop_collection("coll_array_fields");
|
||||
@ -607,7 +607,7 @@ TEST_F(CollectionTest, FilterOnTextFields) {
|
||||
|
||||
query_fields = {"name"};
|
||||
std::vector<std::string> facets;
|
||||
nlohmann::json results = coll_array_fields->search("Jeremy", query_fields, "tags: gold", facets, sort_fields, 0, 10, 1, FREQUENCY, false);
|
||||
nlohmann::json results = coll_array_fields->search("Jeremy", query_fields, "tags: gold", facets, sort_fields, 0, 10, 1, FREQUENCY, false).get();
|
||||
ASSERT_EQ(4, results["hits"].size());
|
||||
|
||||
std::vector<std::string> ids = {"1", "4", "0", "2"};
|
||||
@ -619,7 +619,7 @@ TEST_F(CollectionTest, FilterOnTextFields) {
|
||||
ASSERT_STREQ(id.c_str(), result_id.c_str());
|
||||
}
|
||||
|
||||
results = coll_array_fields->search("Jeremy", query_fields, "tags : bronze", facets, sort_fields, 0, 10, 1, FREQUENCY, false);
|
||||
results = coll_array_fields->search("Jeremy", query_fields, "tags : bronze", facets, sort_fields, 0, 10, 1, FREQUENCY, false).get();
|
||||
ASSERT_EQ(2, results["hits"].size());
|
||||
|
||||
ids = {"4", "2"};
|
||||
@ -632,7 +632,7 @@ TEST_F(CollectionTest, FilterOnTextFields) {
|
||||
}
|
||||
|
||||
// search with a list of tags, also testing extra padding of space
|
||||
results = coll_array_fields->search("Jeremy", query_fields, "tags: [bronze, silver]", facets, sort_fields, 0, 10, 1, FREQUENCY, false);
|
||||
results = coll_array_fields->search("Jeremy", query_fields, "tags: [bronze, silver]", facets, sort_fields, 0, 10, 1, FREQUENCY, false).get();
|
||||
ASSERT_EQ(4, results["hits"].size());
|
||||
|
||||
ids = {"3", "4", "0", "2"};
|
||||
@ -645,7 +645,7 @@ TEST_F(CollectionTest, FilterOnTextFields) {
|
||||
}
|
||||
|
||||
// should be exact matches (no normalization or fuzzy searching should happen)
|
||||
results = coll_array_fields->search("Jeremy", query_fields, "tags: BRONZE", facets, sort_fields, 0, 10, 1, FREQUENCY, false);
|
||||
results = coll_array_fields->search("Jeremy", query_fields, "tags: BRONZE", facets, sort_fields, 0, 10, 1, FREQUENCY, false).get();
|
||||
ASSERT_EQ(0, results["hits"].size());
|
||||
|
||||
collectionManager.drop_collection("coll_array_fields");
|
||||
@ -681,27 +681,27 @@ TEST_F(CollectionTest, HandleBadlyFormedFilterQuery) {
|
||||
std::vector<std::string> facets;
|
||||
|
||||
// when filter field does not exist in the schema
|
||||
nlohmann::json results = coll_array_fields->search("Jeremy", query_fields, "tagzz: gold", facets, sort_fields, 0, 10, 1, FREQUENCY, false);
|
||||
nlohmann::json results = coll_array_fields->search("Jeremy", query_fields, "tagzz: gold", facets, sort_fields, 0, 10, 1, FREQUENCY, false).get();
|
||||
ASSERT_EQ(0, results["hits"].size());
|
||||
|
||||
// searching using a string for a numeric field
|
||||
results = coll_array_fields->search("Jeremy", query_fields, "age: abcdef", facets, sort_fields, 0, 10, 1, FREQUENCY, false);
|
||||
results = coll_array_fields->search("Jeremy", query_fields, "age: abcdef", facets, sort_fields, 0, 10, 1, FREQUENCY, false).get();
|
||||
ASSERT_EQ(0, results["hits"].size());
|
||||
|
||||
// searching using a string for a numeric array field
|
||||
results = coll_array_fields->search("Jeremy", query_fields, "timestamps: abcdef", facets, sort_fields, 0, 10, 1, FREQUENCY, false);
|
||||
results = coll_array_fields->search("Jeremy", query_fields, "timestamps: abcdef", facets, sort_fields, 0, 10, 1, FREQUENCY, false).get();
|
||||
ASSERT_EQ(0, results["hits"].size());
|
||||
|
||||
// malformed k:v syntax
|
||||
results = coll_array_fields->search("Jeremy", query_fields, "timestamps abcdef", facets, sort_fields, 0, 10, 1, FREQUENCY, false);
|
||||
results = coll_array_fields->search("Jeremy", query_fields, "timestamps abcdef", facets, sort_fields, 0, 10, 1, FREQUENCY, false).get();
|
||||
ASSERT_EQ(0, results["hits"].size());
|
||||
|
||||
// just empty spaces
|
||||
results = coll_array_fields->search("Jeremy", query_fields, " ", facets, sort_fields, 0, 10, 1, FREQUENCY, false);
|
||||
results = coll_array_fields->search("Jeremy", query_fields, " ", facets, sort_fields, 0, 10, 1, FREQUENCY, false).get();
|
||||
ASSERT_EQ(0, results["hits"].size());
|
||||
|
||||
// wrapping number with quotes
|
||||
results = coll_array_fields->search("Jeremy", query_fields, "age: '21'", facets, sort_fields, 0, 10, 1, FREQUENCY, false);
|
||||
results = coll_array_fields->search("Jeremy", query_fields, "age: '21'", facets, sort_fields, 0, 10, 1, FREQUENCY, false).get();
|
||||
ASSERT_EQ(0, results["hits"].size());
|
||||
|
||||
collectionManager.drop_collection("coll_array_fields");
|
||||
@ -737,7 +737,7 @@ TEST_F(CollectionTest, FacetCounts) {
|
||||
std::vector<std::string> facets = {"tags"};
|
||||
|
||||
// single facet with no filters
|
||||
nlohmann::json results = coll_array_fields->search("Jeremy", query_fields, "", facets, sort_fields, 0, 10, 1, FREQUENCY, false);
|
||||
nlohmann::json results = coll_array_fields->search("Jeremy", query_fields, "", facets, sort_fields, 0, 10, 1, FREQUENCY, false).get();
|
||||
ASSERT_EQ(5, results["hits"].size());
|
||||
|
||||
ASSERT_EQ(1, results["facet_counts"].size());
|
||||
@ -757,7 +757,7 @@ TEST_F(CollectionTest, FacetCounts) {
|
||||
facets.clear();
|
||||
facets.push_back("tags");
|
||||
facets.push_back("name");
|
||||
results = coll_array_fields->search("Jeremy", query_fields, "", facets, sort_fields, 0, 10, 1, FREQUENCY, false);
|
||||
results = coll_array_fields->search("Jeremy", query_fields, "", facets, sort_fields, 0, 10, 1, FREQUENCY, false).get();
|
||||
|
||||
ASSERT_EQ(5, results["hits"].size());
|
||||
ASSERT_EQ(2, results["facet_counts"].size());
|
||||
@ -772,7 +772,7 @@ TEST_F(CollectionTest, FacetCounts) {
|
||||
// facet with filters
|
||||
facets.clear();
|
||||
facets.push_back("tags");
|
||||
results = coll_array_fields->search("Jeremy", query_fields, "age: >24", facets, sort_fields, 0, 10, 1, FREQUENCY, false);
|
||||
results = coll_array_fields->search("Jeremy", query_fields, "age: >24", facets, sort_fields, 0, 10, 1, FREQUENCY, false).get();
|
||||
|
||||
ASSERT_EQ(3, results["hits"].size());
|
||||
ASSERT_EQ(1, results["facet_counts"].size());
|
||||
@ -812,7 +812,7 @@ TEST_F(CollectionTest, SortingOrder) {
|
||||
query_fields = {"title"};
|
||||
std::vector<std::string> facets;
|
||||
sort_fields = { sort_field("points", "ASC") };
|
||||
nlohmann::json results = coll_mul_fields->search("the", query_fields, "", facets, sort_fields, 0, 15, 1, FREQUENCY, false);
|
||||
nlohmann::json results = coll_mul_fields->search("the", query_fields, "", facets, sort_fields, 0, 15, 1, FREQUENCY, false).get();
|
||||
ASSERT_EQ(10, results["hits"].size());
|
||||
|
||||
std::vector<std::string> ids = {"17", "13", "10", "4", "0", "1", "8", "6", "16", "11"};
|
||||
@ -826,7 +826,7 @@ TEST_F(CollectionTest, SortingOrder) {
|
||||
|
||||
// limiting results to just 5, "ASC" keyword must be case insensitive
|
||||
sort_fields = { sort_field("points", "asc") };
|
||||
results = coll_mul_fields->search("the", query_fields, "", facets, sort_fields, 0, 5, 1, FREQUENCY, false);
|
||||
results = coll_mul_fields->search("the", query_fields, "", facets, sort_fields, 0, 5, 1, FREQUENCY, false).get();
|
||||
ASSERT_EQ(5, results["hits"].size());
|
||||
|
||||
ids = {"17", "13", "10", "4", "0"};
|
||||
@ -841,7 +841,7 @@ TEST_F(CollectionTest, SortingOrder) {
|
||||
// desc
|
||||
|
||||
sort_fields = { sort_field("points", "dEsc") };
|
||||
results = coll_mul_fields->search("the", query_fields, "", facets, sort_fields, 0, 15, 1, FREQUENCY, false);
|
||||
results = coll_mul_fields->search("the", query_fields, "", facets, sort_fields, 0, 15, 1, FREQUENCY, false).get();
|
||||
ASSERT_EQ(10, results["hits"].size());
|
||||
|
||||
ids = {"11", "16", "6", "8", "1", "0", "10", "4", "13", "17"};
|
||||
@ -856,7 +856,7 @@ TEST_F(CollectionTest, SortingOrder) {
|
||||
// With empty list of sort_by fields:
|
||||
// should be ordered desc on the seq_id, since the match score will be the same for all records.
|
||||
sort_fields = { };
|
||||
results = coll_mul_fields->search("the", query_fields, "", facets, sort_fields, 0, 15, 1, FREQUENCY, false);
|
||||
results = coll_mul_fields->search("the", query_fields, "", facets, sort_fields, 0, 15, 1, FREQUENCY, false).get();
|
||||
ASSERT_EQ(10, results["hits"].size());
|
||||
|
||||
ids = {"17", "16", "13", "11", "10", "8", "6", "4", "1", "0"};
|
||||
@ -901,28 +901,29 @@ TEST_F(CollectionTest, SearchingWithMissingFields) {
|
||||
std::vector<std::string> facets;
|
||||
std::vector<std::string> query_fields_not_found = {"titlez"};
|
||||
|
||||
nlohmann::json res = coll_array_fields->search("the", query_fields_not_found, "", facets, sort_fields, 0, 10);
|
||||
ASSERT_EQ(0, res["hits"].size());
|
||||
ASSERT_STREQ("Could not find a search field named `titlez` in the schema.",res["error"].get<std::string>().c_str());
|
||||
Option<nlohmann::json> res_op = coll_array_fields->search("the", query_fields_not_found, "", facets, sort_fields, 0, 10);
|
||||
ASSERT_FALSE(res_op.ok());
|
||||
ASSERT_EQ(400, res_op.code());
|
||||
ASSERT_STREQ("Could not find a search field named `titlez` in the schema.", res_op.error().c_str());
|
||||
|
||||
// when a query field is an integer field
|
||||
res = coll_array_fields->search("the", {"age"}, "", facets, sort_fields, 0, 10);
|
||||
ASSERT_EQ(0, res["hits"].size());
|
||||
ASSERT_STREQ("Search field `age` should be a string or a string array.", res["error"].get<std::string>().c_str());
|
||||
res_op = coll_array_fields->search("the", {"age"}, "", facets, sort_fields, 0, 10);
|
||||
ASSERT_EQ(400, res_op.code());
|
||||
ASSERT_STREQ("Search field `age` should be a string or a string array.", res_op.error().c_str());
|
||||
|
||||
// when a facet field is not defined in the schema
|
||||
res = coll_array_fields->search("the", {"name"}, "", {"timestamps"}, sort_fields, 0, 10);
|
||||
ASSERT_EQ(0, res["hits"].size());
|
||||
ASSERT_STREQ("Could not find a facet field named `timestamps` in the schema.", res["error"].get<std::string>().c_str());
|
||||
res_op = coll_array_fields->search("the", {"name"}, "", {"timestamps"}, sort_fields, 0, 10);
|
||||
ASSERT_EQ(400, res_op.code());
|
||||
ASSERT_STREQ("Could not find a facet field named `timestamps` in the schema.", res_op.error().c_str());
|
||||
|
||||
// when a rank field is not defined in the schema
|
||||
res = coll_array_fields->search("the", {"name"}, "", {}, { sort_field("timestamps", "ASC") }, 0, 10);
|
||||
ASSERT_EQ(0, res["hits"].size());
|
||||
ASSERT_STREQ("Could not find a sort field named `timestamps` in the schema.", res["error"].get<std::string>().c_str());
|
||||
res_op = coll_array_fields->search("the", {"name"}, "", {}, { sort_field("timestamps", "ASC") }, 0, 10);
|
||||
ASSERT_EQ(400, res_op.code());
|
||||
ASSERT_STREQ("Could not find a sort field named `timestamps` in the schema.", res_op.error().c_str());
|
||||
|
||||
res = coll_array_fields->search("the", {"name"}, "", {}, { sort_field("_rank", "ASC") }, 0, 10);
|
||||
ASSERT_EQ(0, res["hits"].size());
|
||||
ASSERT_STREQ("Could not find a sort field named `_rank` in the schema.", res["error"].get<std::string>().c_str());
|
||||
res_op = coll_array_fields->search("the", {"name"}, "", {}, { sort_field("_rank", "ASC") }, 0, 10);
|
||||
ASSERT_EQ(400, res_op.code());
|
||||
ASSERT_STREQ("Could not find a sort field named `_rank` in the schema.", res_op.error().c_str());
|
||||
|
||||
collectionManager.drop_collection("coll_array_fields");
|
||||
}
|
||||
@ -1014,7 +1015,7 @@ TEST_F(CollectionTest, EmptyIndexShouldNotCrash) {
|
||||
empty_coll = collectionManager.create_collection("empty_coll", fields, facet_fields, sort_fields_index, "age");
|
||||
}
|
||||
|
||||
nlohmann::json results = empty_coll->search("a", {"name"}, "", {}, sort_fields, 0, 10, 1, FREQUENCY, false);
|
||||
nlohmann::json results = empty_coll->search("a", {"name"}, "", {}, sort_fields, 0, 10, 1, FREQUENCY, false).get();
|
||||
ASSERT_EQ(0, results["hits"].size());
|
||||
collectionManager.drop_collection("empty_coll");
|
||||
}
|
||||
@ -1053,7 +1054,7 @@ TEST_F(CollectionTest, DeletionOfADocument) {
|
||||
nlohmann::json results;
|
||||
|
||||
// asserts before removing any record
|
||||
results = collection_for_del->search("cryogenic", query_fields, "", {}, sort_fields, 0, 5, 1, FREQUENCY, false);
|
||||
results = collection_for_del->search("cryogenic", query_fields, "", {}, sort_fields, 0, 5, 1, FREQUENCY, false).get();
|
||||
ASSERT_EQ(1, results["hits"].size());
|
||||
|
||||
it = store->get_iterator();
|
||||
@ -1067,14 +1068,14 @@ TEST_F(CollectionTest, DeletionOfADocument) {
|
||||
// actually remove a record now
|
||||
collection_for_del->remove("1");
|
||||
|
||||
results = collection_for_del->search("cryogenic", query_fields, "", {}, sort_fields, 0, 5, 1, FREQUENCY, false);
|
||||
results = collection_for_del->search("cryogenic", query_fields, "", {}, sort_fields, 0, 5, 1, FREQUENCY, false).get();
|
||||
ASSERT_EQ(0, results["hits"].size());
|
||||
|
||||
results = collection_for_del->search("archives", query_fields, "", {}, sort_fields, 0, 5, 1, FREQUENCY, false);
|
||||
results = collection_for_del->search("archives", query_fields, "", {}, sort_fields, 0, 5, 1, FREQUENCY, false).get();
|
||||
ASSERT_EQ(1, results["hits"].size());
|
||||
|
||||
collection_for_del->remove("foo"); // custom id record
|
||||
results = collection_for_del->search("martian", query_fields, "", {}, sort_fields, 0, 5, 1, FREQUENCY, false);
|
||||
results = collection_for_del->search("martian", query_fields, "", {}, sort_fields, 0, 5, 1, FREQUENCY, false).get();
|
||||
ASSERT_EQ(0, results["hits"].size());
|
||||
|
||||
// delete all records
|
||||
|
Loading…
x
Reference in New Issue
Block a user