mirror of
https://github.com/typesense/typesense.git
synced 2025-05-21 06:02:26 +08:00
Fix faceting count edge case.
This commit is contained in:
parent
c6fe1369b9
commit
1afa193161
@ -2007,8 +2007,6 @@ void Index::search(std::vector<query_tokens_t>& field_query_tokens,
|
||||
const size_t num_threads = std::min(concurrency, all_result_ids_len);
|
||||
const size_t window_size = (num_threads == 0) ? 0 :
|
||||
(all_result_ids_len + num_threads - 1) / num_threads; // rounds up
|
||||
size_t result_index = 0;
|
||||
|
||||
size_t num_processed = 0;
|
||||
std::mutex m_process;
|
||||
std::condition_variable cv_process;
|
||||
@ -2021,12 +2019,13 @@ void Index::search(std::vector<query_tokens_t>& field_query_tokens,
|
||||
}
|
||||
|
||||
size_t num_queued = 0;
|
||||
size_t result_index = 0;
|
||||
|
||||
for(size_t thread_id = 0; thread_id < num_threads && result_index < all_result_ids_len; thread_id++) {
|
||||
size_t batch_res_len = window_size;
|
||||
|
||||
if(result_index + window_size > all_result_ids_len) {
|
||||
batch_res_len = (result_index + window_size) - all_result_ids_len;
|
||||
batch_res_len = all_result_ids_len - result_index;
|
||||
}
|
||||
|
||||
uint32_t* batch_result_ids = all_result_ids + result_index;
|
||||
|
@ -1460,3 +1460,36 @@ TEST_F(CollectionSpecificTest, ImportDocumentWithRepeatingIDInTheSameBatch) {
|
||||
|
||||
collectionManager.drop_collection("coll1");
|
||||
}
|
||||
|
||||
TEST_F(CollectionSpecificTest, FacetParallelizationVerification) {
|
||||
std::vector<field> fields = {field("name", field_types::STRING, false),
|
||||
field("category", field_types::STRING, true),
|
||||
field("points", field_types::INT32, false),};
|
||||
|
||||
Collection* coll1 = collectionManager.create_collection("coll1", 1, fields, "points").get();
|
||||
|
||||
// choose a number that's not a multiple of 4
|
||||
|
||||
for(size_t i = 0; i < 18; i++) {
|
||||
nlohmann::json doc1;
|
||||
doc1["id"] = std::to_string(i);
|
||||
doc1["name"] = "Levis";
|
||||
doc1["category"] = "jeans";
|
||||
doc1["points"] = 3;
|
||||
|
||||
ASSERT_TRUE(coll1->add(doc1.dump()).ok());
|
||||
}
|
||||
|
||||
auto results = coll1->search("levis", {"name"},
|
||||
"", {"category"}, {}, {0}, 10,
|
||||
1, FREQUENCY, {false},
|
||||
2, spp::sparse_hash_set<std::string>(),
|
||||
spp::sparse_hash_set<std::string>(), 10, "", 30, 4, "", 10, {}, {}, {}, 0,
|
||||
"<mark>", "</mark>", {0},
|
||||
1000, true).get();
|
||||
|
||||
ASSERT_STREQ("category", results["facet_counts"][0]["field_name"].get<std::string>().c_str());
|
||||
ASSERT_EQ(18, (int) results["facet_counts"][0]["counts"][0]["count"]);
|
||||
|
||||
collectionManager.drop_collection("coll1");
|
||||
}
|
||||
|
Loading…
x
Reference in New Issue
Block a user