diff --git a/bazel/onnx.patch b/bazel/onnx.patch index 121a941d..0ceeb6db 100644 --- a/bazel/onnx.patch +++ b/bazel/onnx.patch @@ -11,6 +11,19 @@ index 88b46890b7..d090499971 100644 __cmake_contentNameLower __cmake_contentName + +--- cmake/CMakeLists.txt ++++ cmake/CMakeLists.txt +@@ -1268,6 +1268,7 @@ if (onnxruntime_USE_CUDA) + set(CMAKE_CUDA_FLAGS "${CMAKE_CUDA_FLAGS} -gencode=arch=compute_53,code=sm_53") # TX1, Nano + set(CMAKE_CUDA_FLAGS "${CMAKE_CUDA_FLAGS} -gencode=arch=compute_62,code=sm_62") # TX2 + set(CMAKE_CUDA_FLAGS "${CMAKE_CUDA_FLAGS} -gencode=arch=compute_72,code=sm_72") # AGX Xavier, NX Xavier ++ set(CMAKE_CUDA_FLAGS "${CMAKE_CUDA_FLAGS} -gencode=arch=compute_75,code=sm_75") # T4 + if (CMAKE_CUDA_COMPILER_VERSION VERSION_GREATER_EQUAL 11) + set(CMAKE_CUDA_FLAGS "${CMAKE_CUDA_FLAGS} -gencode=arch=compute_87,code=sm_87") # AGX Orin, NX Orin + endif() + + diff --git a/.gitmodules b/.gitmodules index 8e4217162b..bb63b7d9c5 100644 --- .gitmodules diff --git a/src/batched_indexer.cpp b/src/batched_indexer.cpp index 93a19382..be3c9974 100644 --- a/src/batched_indexer.cpp +++ b/src/batched_indexer.cpp @@ -200,7 +200,8 @@ void BatchedIndexer::run() { config.get_disk_used_max_percentage(), config.get_memory_used_max_percentage()); - if (resource_check != cached_resource_stat_t::OK && orig_req->http_method != "DELETE") { + if (resource_check != cached_resource_stat_t::OK && + orig_req->http_method != "DELETE" && found_rpath->handler != post_health) { const std::string& err_msg = "Rejecting write: running out of resource type: " + std::string(magic_enum::enum_name(resource_check)); LOG(ERROR) << err_msg; diff --git a/src/index.cpp b/src/index.cpp index ee4021a9..e4546983 100644 --- a/src/index.cpp +++ b/src/index.cpp @@ -1155,6 +1155,7 @@ void Index::tokenize_string_array(const std::vector& strings, token_to_offsets[last_token].push_back(0); } } + void Index::initialize_facet_indexes(const field& facet_field) { facet_index_v4->initialize(facet_field.name); } diff --git a/test/collection_faceting_test.cpp b/test/collection_faceting_test.cpp index 85ace93a..cadc95c8 100644 --- a/test/collection_faceting_test.cpp +++ b/test/collection_faceting_test.cpp @@ -1055,6 +1055,78 @@ TEST_F(CollectionFacetingTest, FacetByNestedIntField) { ASSERT_EQ("companyRank", wildcard_facets[1].field_name); } +TEST_F(CollectionFacetingTest, FacetByNestedArrayField) { + nlohmann::json schema = R"({ + "name": "coll1", + "enable_nested_fields": true, + "fields": [ + {"name": "data", "type": "object", "optional": false, "facet": true } + ] + })"_json; + + auto op = collectionManager.create_collection(schema); + ASSERT_TRUE(op.ok()); + Collection* coll1 = op.get(); + + auto doc1 = R"({ + "data": {"details": [{"name": "Foo"}, {"name": "Foo"}]} + })"_json; + + auto doc2 = R"({ + "data": {"details": [{"name": "Foo"}, {"name": "Foo"}]} + })"_json; + + ASSERT_TRUE(coll1->add(doc1.dump(), CREATE).ok()); + ASSERT_TRUE(coll1->add(doc2.dump(), CREATE).ok()); + + auto results = coll1->search("*", {}, "", {"data.details.name"}, {}, {0}, 10, 1, + token_ordering::FREQUENCY, {true}, 10, spp::sparse_hash_set(), + spp::sparse_hash_set(), 10, "", 30, 4).get(); + + ASSERT_EQ(2, results["found"].get()); + ASSERT_EQ(1, results["facet_counts"].size()); + ASSERT_EQ("data.details.name", results["facet_counts"][0]["field_name"]); + ASSERT_EQ(1, results["facet_counts"][0]["counts"].size()); + ASSERT_EQ(2, results["facet_counts"][0]["counts"][0]["count"].get()); + ASSERT_EQ("Foo", results["facet_counts"][0]["counts"][0]["value"].get()); +} + +TEST_F(CollectionFacetingTest, FacetByArrayField) { + nlohmann::json schema = R"({ + "name": "coll1", + "enable_nested_fields": true, + "fields": [ + {"name": "data", "type": "string[]", "optional": false, "facet": true } + ] + })"_json; + + auto op = collectionManager.create_collection(schema); + ASSERT_TRUE(op.ok()); + Collection* coll1 = op.get(); + + auto doc1 = R"({ + "data": ["Foo", "Foo"] + })"_json; + + auto doc2 = R"({ + "data": ["Foo", "Foo"] + })"_json; + + ASSERT_TRUE(coll1->add(doc1.dump(), CREATE).ok()); + ASSERT_TRUE(coll1->add(doc2.dump(), CREATE).ok()); + + auto results = coll1->search("*", {}, "", {"data"}, {}, {0}, 10, 1, + token_ordering::FREQUENCY, {true}, 10, spp::sparse_hash_set(), + spp::sparse_hash_set(), 10, "", 30, 4).get(); + + ASSERT_EQ(2, results["found"].get()); + ASSERT_EQ(1, results["facet_counts"].size()); + ASSERT_EQ("data", results["facet_counts"][0]["field_name"]); + ASSERT_EQ(1, results["facet_counts"][0]["counts"].size()); + ASSERT_EQ(2, results["facet_counts"][0]["counts"][0]["count"].get()); + ASSERT_EQ("Foo", results["facet_counts"][0]["counts"][0]["value"].get()); +} + TEST_F(CollectionFacetingTest, FacetParseTest){ std::vector fields = { field("score", field_types::INT32, true), @@ -1875,4 +1947,4 @@ TEST_F(CollectionFacetingTest, FacetingReturnParentObject) { ASSERT_EQ(2, results["facet_counts"][0]["counts"].size()); ASSERT_EQ("{\"b\":0,\"color\":\"red\",\"g\":0,\"r\":255}", results["facet_counts"][0]["counts"][0]["value"]); ASSERT_EQ("{\"b\":255,\"color\":\"blue\",\"g\":0,\"r\":0}", results["facet_counts"][0]["counts"][1]["value"]); -} \ No newline at end of file +} diff --git a/test/collection_vector_search_test.cpp b/test/collection_vector_search_test.cpp index 7f1e7eef..84e54d03 100644 --- a/test/collection_vector_search_test.cpp +++ b/test/collection_vector_search_test.cpp @@ -775,7 +775,7 @@ TEST_F(CollectionVectorTest, HybridSearchWithExplicitVector) { ASSERT_EQ(2, search_res["found"].get()); ASSERT_EQ(2, search_res["hits"].size()); - ASSERT_FLOAT_EQ(0.0462081432, search_res["hits"][0]["vector_distance"].get()); + ASSERT_FLOAT_EQ(0.046207964, search_res["hits"][0]["vector_distance"].get()); ASSERT_FLOAT_EQ(0.1213316321, search_res["hits"][1]["vector_distance"].get()); // to pass k param @@ -825,10 +825,6 @@ TEST_F(CollectionVectorTest, HybridSearchWithExplicitVector) { ASSERT_FLOAT_EQ(2.0f, search_res["hits"][0]["vector_distance"].get()); ASSERT_FLOAT_EQ(2.0f, search_res["hits"][1]["vector_distance"].get()); ASSERT_FLOAT_EQ(2.0f, search_res["hits"][2]["vector_distance"].get()); - - ASSERT_FLOAT_EQ(2.0f, search_res["hits"][0]["hybrid_search_info"]["vector_distance"].get()); - ASSERT_FLOAT_EQ(2.0f, search_res["hits"][1]["hybrid_search_info"]["vector_distance"].get()); - ASSERT_FLOAT_EQ(2.0f, search_res["hits"][2]["hybrid_search_info"]["vector_distance"].get()); } TEST_F(CollectionVectorTest, HybridSearchOnlyVectorMatches) {