mirror of
https://github.com/typesense/typesense.git
synced 2025-05-20 21:52:23 +08:00
Handle null value update of reference field. (#1814)
* Handle null value update of reference field. * Fix `CollectionVectorTest, VectorWithNullValue`. * Add tests.
This commit is contained in:
parent
b88f1fedbe
commit
b92bba5f3f
@ -95,11 +95,11 @@ uint32_t Collection::get_next_seq_id() {
|
||||
}
|
||||
|
||||
Option<bool> single_value_filter_query(nlohmann::json& document, const std::string& field_name,
|
||||
const std::string& ref_field_type, bool is_optional, std::string& filter_query) {
|
||||
const std::string& ref_field_type, std::string& filter_query) {
|
||||
auto const& value = document[field_name];
|
||||
|
||||
if (is_optional && value.is_null()) {
|
||||
return Option<bool>(422, "Optional field has `null` value.");
|
||||
if (value.is_null()) {
|
||||
return Option<bool>(422, "Field has `null` value.");
|
||||
}
|
||||
|
||||
if (value.is_string() && ref_field_type == field_types::STRING) {
|
||||
@ -130,7 +130,8 @@ Option<bool> Collection::add_reference_helper_fields(nlohmann::json& document, c
|
||||
// Add reference helper fields in the document.
|
||||
for (auto const& pair: reference_fields) {
|
||||
auto field_name = pair.first;
|
||||
auto optional = schema.at(field_name).optional;
|
||||
auto const& field = schema.at(field_name);
|
||||
auto const& optional = field.optional;
|
||||
// Strict checking for presence of non-optional reference field during indexing operation.
|
||||
auto is_required = !is_update && !optional;
|
||||
if (is_required && document.count(field_name) != 1) {
|
||||
@ -291,7 +292,7 @@ Option<bool> Collection::add_reference_helper_fields(nlohmann::json& document, c
|
||||
|
||||
temp_doc[field_name] = object_array[i].at(keys[1]);
|
||||
auto single_value_filter_query_op = single_value_filter_query(temp_doc, field_name, ref_field_type,
|
||||
optional, filter_query);
|
||||
filter_query);
|
||||
if (!single_value_filter_query_op.ok()) {
|
||||
if (single_value_filter_query_op.code() == 422) {
|
||||
continue;
|
||||
@ -351,11 +352,16 @@ Option<bool> Collection::add_reference_helper_fields(nlohmann::json& document, c
|
||||
continue;
|
||||
}
|
||||
filter_query[filter_query.size() - 1] = ']';
|
||||
} else if (field.is_array() && document[field_name].is_null()) {
|
||||
document[reference_helper_field] = nlohmann::json::array();
|
||||
document[fields::reference_helper_fields] += reference_helper_field;
|
||||
|
||||
continue;
|
||||
} else {
|
||||
auto single_value_filter_query_op = single_value_filter_query(document, field_name, ref_field_type,
|
||||
optional, filter_query);
|
||||
filter_query);
|
||||
if (!single_value_filter_query_op.ok()) {
|
||||
if (single_value_filter_query_op.code() == 422) {
|
||||
if (optional && single_value_filter_query_op.code() == 422) {
|
||||
continue;
|
||||
}
|
||||
return single_value_filter_query_op;
|
||||
|
@ -62,6 +62,10 @@ Option<uint32_t> validator_t::coerce_element(const field& a_field, nlohmann::jso
|
||||
}
|
||||
}
|
||||
} else if(a_field.is_array()) {
|
||||
if (doc_ele.is_null()) {
|
||||
doc_ele = nlohmann::json::array();
|
||||
}
|
||||
|
||||
if(!doc_ele.is_array()) {
|
||||
bool is_auto_embedding = a_field.type == field_types::FLOAT_ARRAY && a_field.embed.count(fields::from) > 0;
|
||||
if((a_field.optional && (dirty_values == DIRTY_VALUES::DROP ||
|
||||
|
@ -1341,10 +1341,6 @@ TEST_F(CollectionJoinTest, JoinAfterUpdateOfArrayField) {
|
||||
exercise_doc["bodyParts"] = {"abcd1", "abcd2", "abcd3"};
|
||||
ASSERT_TRUE(exercise_coll->add(exercise_doc.dump()).ok());
|
||||
|
||||
// now update document to remove an array element
|
||||
exercise_doc["bodyParts"] = {"abcd1", "abcd3"};
|
||||
ASSERT_TRUE(exercise_coll->add(exercise_doc.dump(), UPDATE).ok());
|
||||
|
||||
// search for the document
|
||||
std::map<std::string, std::string> req_params = {
|
||||
{"collection", "exercises"},
|
||||
@ -1360,8 +1356,72 @@ TEST_F(CollectionJoinTest, JoinAfterUpdateOfArrayField) {
|
||||
ASSERT_TRUE(search_op.ok());
|
||||
|
||||
auto res = nlohmann::json::parse(json_res);
|
||||
ASSERT_EQ(3, res["hits"][0]["document"]["bodyParts"].size());
|
||||
ASSERT_EQ(3, res["hits"][0]["document"]["parts"].size());
|
||||
|
||||
// now update document to remove an array element
|
||||
exercise_doc = R"({
|
||||
"id": "0",
|
||||
"bodyParts": ["abcd1", "abcd3"]
|
||||
})"_json;
|
||||
ASSERT_TRUE(exercise_coll->add(exercise_doc.dump(), UPDATE).ok());
|
||||
|
||||
req_params = {
|
||||
{"collection", "exercises"},
|
||||
{"q", "*"},
|
||||
{"include_fields", "$bodyParts(uid, name, strategy:nest) as parts"}
|
||||
};
|
||||
search_op = collectionManager.do_search(req_params, embedded_params, json_res, now_ts);
|
||||
|
||||
res = nlohmann::json::parse(json_res);
|
||||
ASSERT_EQ(2, res["hits"][0]["document"]["bodyParts"].size());
|
||||
ASSERT_EQ(2, res["hits"][0]["document"]["parts"].size());
|
||||
|
||||
// remove both elements
|
||||
exercise_doc["bodyParts"] = nullptr;
|
||||
ASSERT_TRUE(exercise_coll->add(exercise_doc.dump(), UPDATE).ok());
|
||||
|
||||
req_params = {
|
||||
{"collection", "exercises"},
|
||||
{"q", "*"},
|
||||
{"include_fields", "$bodyParts(uid, name, strategy:nest) as parts"}
|
||||
};
|
||||
search_op = collectionManager.do_search(req_params, embedded_params, json_res, now_ts);
|
||||
ASSERT_TRUE(search_op.ok());
|
||||
|
||||
res = nlohmann::json::parse(json_res);
|
||||
ASSERT_EQ(0, res["hits"][0]["document"]["bodyParts"].size());
|
||||
ASSERT_EQ(0, res["hits"][0]["document"]["parts"].size());
|
||||
|
||||
exercise_doc["bodyParts"] = {"abcd1"};
|
||||
ASSERT_TRUE(exercise_coll->add(exercise_doc.dump(), UPDATE).ok());
|
||||
|
||||
req_params = {
|
||||
{"collection", "exercises"},
|
||||
{"q", "*"},
|
||||
{"include_fields", "$bodyParts(uid, name, strategy:nest) as parts"}
|
||||
};
|
||||
search_op = collectionManager.do_search(req_params, embedded_params, json_res, now_ts);
|
||||
ASSERT_TRUE(search_op.ok());
|
||||
|
||||
res = nlohmann::json::parse(json_res);
|
||||
ASSERT_EQ(1, res["hits"][0]["document"]["bodyParts"].size());
|
||||
ASSERT_EQ(1, res["hits"][0]["document"]["parts"].size());
|
||||
|
||||
exercise_doc["bodyParts"] = {};
|
||||
ASSERT_TRUE(exercise_coll->add(exercise_doc.dump(), UPDATE).ok());
|
||||
|
||||
req_params = {
|
||||
{"collection", "exercises"},
|
||||
{"q", "*"},
|
||||
{"include_fields", "$bodyParts(uid, name, strategy:nest) as parts"}
|
||||
};
|
||||
search_op = collectionManager.do_search(req_params, embedded_params, json_res, now_ts);
|
||||
ASSERT_TRUE(search_op.ok());
|
||||
|
||||
res = nlohmann::json::parse(json_res);
|
||||
ASSERT_EQ(0, res["hits"][0]["document"]["bodyParts"].size());
|
||||
ASSERT_EQ(0, res["hits"][0]["document"]["parts"].size());
|
||||
}
|
||||
|
||||
TEST_F(CollectionJoinTest, FilterByReference_SingleMatch) {
|
||||
|
@ -1142,7 +1142,7 @@ TEST_F(CollectionVectorTest, VectorWithNullValue) {
|
||||
|
||||
ASSERT_TRUE(nlohmann::json::parse(json_lines[0])["success"].get<bool>());
|
||||
ASSERT_FALSE(nlohmann::json::parse(json_lines[1])["success"].get<bool>());
|
||||
ASSERT_EQ("Field `vec` must be an array.",
|
||||
ASSERT_EQ("Field `vec` must have 4 dimensions.",
|
||||
nlohmann::json::parse(json_lines[1])["error"].get<std::string>());
|
||||
}
|
||||
|
||||
|
Loading…
x
Reference in New Issue
Block a user