mirror of
https://github.com/typesense/typesense.git
synced 2025-05-20 05:32:30 +08:00
Merge branch 'v0.25-join' into v0.26-facets
# Conflicts: # test/collection_nested_fields_test.cpp
This commit is contained in:
commit
42c544b70c
@ -347,9 +347,8 @@ bool field::flatten_obj(nlohmann::json& doc, nlohmann::json& value, bool has_arr
|
||||
while(it != value.end()) {
|
||||
const std::string& child_field_name = flat_name + "." + it.key();
|
||||
if(it.value().is_null()) {
|
||||
if(has_array) {
|
||||
doc[child_field_name].push_back(nullptr);
|
||||
} else {
|
||||
if(!has_array) {
|
||||
// we don't want to push null values into an array because that's not valid
|
||||
doc[child_field_name] = nullptr;
|
||||
}
|
||||
|
||||
|
@ -6314,13 +6314,13 @@ void Index::remove_field(uint32_t seq_id, const nlohmann::json& document, const
|
||||
if (posting_t::num_ids(leaf->values) == 0) {
|
||||
void* values = art_delete(search_index.at(field_name), key, key_len);
|
||||
posting_t::destroy_list(values);
|
||||
}
|
||||
}
|
||||
|
||||
if(search_field.infix) {
|
||||
auto strhash = StringUtils::hash_wy(key, token.size());
|
||||
const auto& infix_sets = infix_index.at(search_field.name);
|
||||
infix_sets[strhash % 4]->erase(token);
|
||||
if(search_field.infix) {
|
||||
auto strhash = StringUtils::hash_wy(key, token.size());
|
||||
const auto& infix_sets = infix_index.at(search_field.name);
|
||||
infix_sets[strhash % 4]->erase(token);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
} else if(search_field.is_int32()) {
|
||||
|
@ -462,7 +462,7 @@ TEST_F(CollectionInfixSearchTest, InfixDeleteAndUpdate) {
|
||||
collectionManager.drop_collection("coll1");
|
||||
}
|
||||
|
||||
TEST_F(CollectionInfixSearchTest, MultiFielInfixSearch) {
|
||||
TEST_F(CollectionInfixSearchTest, MultiFieldInfixSearch) {
|
||||
std::vector<field> fields = {field("title", field_types::STRING, false, false, true, "", -1, 1),
|
||||
field("mpn", field_types::STRING, false, false, true, "", -1, 1),
|
||||
field("points", field_types::INT32, false),};
|
||||
@ -494,3 +494,53 @@ TEST_F(CollectionInfixSearchTest, MultiFielInfixSearch) {
|
||||
|
||||
collectionManager.drop_collection("coll1");
|
||||
}
|
||||
|
||||
TEST_F(CollectionInfixSearchTest, DeleteDocWithInfixIndex) {
|
||||
std::vector<field> fields = {field("title", field_types::STRING, false, false, true, "", -1, 1),
|
||||
field("mpn", field_types::STRING, false, false, true, "", -1, 1),
|
||||
field("points", field_types::INT32, false),};
|
||||
|
||||
Collection* coll1 = collectionManager.create_collection("coll1", 1, fields, "points").get();
|
||||
|
||||
nlohmann::json doc;
|
||||
doc["id"] = "0";
|
||||
doc["title"] = "Running Shoe";
|
||||
doc["mpn"] = "HYDGHSGAH";
|
||||
doc["points"] = 100;
|
||||
ASSERT_TRUE(coll1->add(doc.dump()).ok());
|
||||
|
||||
doc["id"] = "1";
|
||||
doc["title"] = "Running Band";
|
||||
doc["mpn"] = "GHX100037IN";
|
||||
doc["points"] = 100;
|
||||
ASSERT_TRUE(coll1->add(doc.dump()).ok());
|
||||
|
||||
auto results = coll1->search("nni",
|
||||
{"title"}, "", {}, {}, {0}, 3, 1, FREQUENCY, {true}, 5,
|
||||
spp::sparse_hash_set<std::string>(),
|
||||
spp::sparse_hash_set<std::string>(), 10, "", 30, 4, "title", 20, {}, {}, {}, 0,
|
||||
"<mark>", "</mark>", {}, 1000, true, false, true, "", false, 6000 * 1000, 4, 7, fallback,
|
||||
4, {always}).get();
|
||||
|
||||
ASSERT_EQ(2, results["found"].get<size_t>());
|
||||
ASSERT_EQ(2, results["hits"].size());
|
||||
|
||||
// drop one document
|
||||
|
||||
coll1->remove("0");
|
||||
|
||||
// search again
|
||||
|
||||
results = coll1->search("nni",
|
||||
{"title"}, "", {}, {}, {0}, 3, 1, FREQUENCY, {true}, 5,
|
||||
spp::sparse_hash_set<std::string>(),
|
||||
spp::sparse_hash_set<std::string>(), 10, "", 30, 4, "title", 20, {}, {}, {}, 0,
|
||||
"<mark>", "</mark>", {}, 1000, true, false, true, "", false, 6000 * 1000, 4, 7, fallback,
|
||||
4, {always}).get();
|
||||
|
||||
ASSERT_EQ(1, results["found"].get<size_t>());
|
||||
ASSERT_EQ(1, results["hits"].size());
|
||||
ASSERT_STREQ("1", results["hits"][0]["document"]["id"].get<std::string>().c_str());
|
||||
|
||||
collectionManager.drop_collection("coll1");
|
||||
}
|
@ -1487,6 +1487,109 @@ TEST_F(CollectionNestedFieldsTest, ExplicitSchemaForNestedArrayTypeValidation) {
|
||||
"Hint: field inside an array of objects must be an array type as well.", add_op.error());
|
||||
}
|
||||
|
||||
TEST_F(CollectionNestedFieldsTest, OptionalNestedOptionalOjectArrStringField) {
|
||||
nlohmann::json schema = R"({
|
||||
"name": "coll1",
|
||||
"enable_nested_fields": true,
|
||||
"fields": [
|
||||
{"facet":true,"name":"data","optional":false,"type":"object"},
|
||||
{"facet":false,"name":"data.locations.stateShort","optional":true,"type":"string[]"}
|
||||
]
|
||||
})"_json;
|
||||
|
||||
auto op = collectionManager.create_collection(schema);
|
||||
ASSERT_TRUE(op.ok());
|
||||
Collection* coll1 = op.get();
|
||||
|
||||
auto doc1 = R"({
|
||||
"data": {
|
||||
"locations": [
|
||||
{
|
||||
"stateShort": null
|
||||
}
|
||||
]
|
||||
}
|
||||
})"_json;
|
||||
|
||||
auto add_op = coll1->add(doc1.dump(), CREATE);
|
||||
ASSERT_TRUE(add_op.ok());
|
||||
|
||||
doc1 = R"({
|
||||
"data": {
|
||||
"locations": [
|
||||
{
|
||||
"stateShort": null
|
||||
},
|
||||
{
|
||||
"stateShort": "NY"
|
||||
}
|
||||
]
|
||||
}
|
||||
})"_json;
|
||||
|
||||
coll1->add(doc1.dump(), CREATE);
|
||||
|
||||
auto results = coll1->search("ny", {"data.locations.stateShort"},
|
||||
"", {}, {}, {0}, 10, 1,
|
||||
token_ordering::FREQUENCY, {true}, 10, spp::sparse_hash_set<std::string>(),
|
||||
spp::sparse_hash_set<std::string>(), 10, "", 30, 4).get();
|
||||
|
||||
ASSERT_EQ(1, results["found"].get<size_t>());
|
||||
}
|
||||
|
||||
TEST_F(CollectionNestedFieldsTest, OptionalNestedNonOptionalOjectArrStringField) {
|
||||
nlohmann::json schema = R"({
|
||||
"name": "coll1",
|
||||
"enable_nested_fields": true,
|
||||
"fields": [
|
||||
{"facet":true,"name":"data","type":"object"},
|
||||
{"facet":false,"name":"data.locations.stateShort","type":"string[]"}
|
||||
]
|
||||
})"_json;
|
||||
|
||||
auto op = collectionManager.create_collection(schema);
|
||||
ASSERT_TRUE(op.ok());
|
||||
Collection* coll1 = op.get();
|
||||
|
||||
auto doc1 = R"({
|
||||
"data": {
|
||||
"locations": [
|
||||
{
|
||||
"stateShort": null
|
||||
}
|
||||
]
|
||||
}
|
||||
})"_json;
|
||||
|
||||
auto add_op = coll1->add(doc1.dump(), CREATE);
|
||||
ASSERT_FALSE(add_op.ok());
|
||||
ASSERT_EQ("Field `data.locations.stateShort` has been declared in the schema, but is not found in the document.",
|
||||
add_op.error());
|
||||
|
||||
doc1 = R"({
|
||||
"data": {
|
||||
"locations": [
|
||||
{
|
||||
"stateShort": null
|
||||
},
|
||||
{
|
||||
"stateShort": "NY"
|
||||
}
|
||||
]
|
||||
}
|
||||
})"_json;
|
||||
|
||||
coll1->add(doc1.dump(), CREATE);
|
||||
|
||||
auto results = coll1->search("ny", {"data.locations.stateShort"},
|
||||
"", {}, {}, {0}, 10, 1,
|
||||
token_ordering::FREQUENCY, {true}, 10, spp::sparse_hash_set<std::string>(),
|
||||
spp::sparse_hash_set<std::string>(), 10, "", 30, 4).get();
|
||||
|
||||
ASSERT_EQ(1, results["found"].get<size_t>());
|
||||
}
|
||||
|
||||
|
||||
TEST_F(CollectionNestedFieldsTest, UnindexedNestedFieldShouldNotClutterSchema) {
|
||||
nlohmann::json schema = R"({
|
||||
"name": "coll1",
|
||||
|
Loading…
x
Reference in New Issue
Block a user