Auto vector generation & Hybrid Search

This commit is contained in:
ozanarmagan 2023-02-16 14:45:43 +03:00 committed by Harpreet Sangar
parent 94add54c43
commit 2672b1ebd6
8 changed files with 25 additions and 365 deletions

View File

@ -13,10 +13,7 @@ bazel_compdb_deps()
http_archive(
name = "rules_foreign_cc",
patches = ["//bazel:foreign_cc.patch", "//bazel:foreign_cc_version_compiler.patch"],
patch_args = [
"-p1",
],
patches = ["//bazel:foreign_cc.patch"],
sha256 = "2a4d07cd64b0719b39a7c12218a3e507672b82a97b98c6a89d38565894cf7c51",
strip_prefix = "rules_foreign_cc-0.9.0",
url = "https://github.com/bazelbuild/rules_foreign_cc/archive/refs/tags/0.9.0.tar.gz",

View File

@ -1,5 +1,5 @@
--- a/foreign_cc/private/configure_script.bzl
+++ b/foreign_cc/private/configure_script.bzl
--- foreign_cc/private/configure_script.bzl
+++ foreign_cc/private/configure_script.bzl
@@ -70,7 +70,7 @@
).lstrip())

View File

@ -1,283 +0,0 @@
diff --git a/foreign_cc/private/cc_toolchain_util.bzl b/foreign_cc/private/cc_toolchain_util.bzl
index fd7fa4d..188dc5f 100644
--- a/foreign_cc/private/cc_toolchain_util.bzl
+++ b/foreign_cc/private/cc_toolchain_util.bzl
@@ -265,15 +265,24 @@ def get_tools_info(ctx):
cc_toolchain = cc_toolchain,
)
+ cxx = cc_common.get_tool_for_action(
+ feature_configuration = feature_configuration,
+ action_name = ACTION_NAMES.cpp_compile,
+ )
+ cxx_splitted = cxx.split("/")
+ if(cxx_splitted[-1] == "gcc"):
+ cxx_splitted[-1] = "g++"
+ cxx = "/".join(cxx_splitted)
+ if(cxx_splitted[-1] == "clang"):
+ cxx_splitted = "clang++"
+ cxx = "/".join(cxx_splitted)
+
return CxxToolsInfo(
cc = cc_common.get_tool_for_action(
feature_configuration = feature_configuration,
action_name = ACTION_NAMES.c_compile,
),
- cxx = cc_common.get_tool_for_action(
- feature_configuration = feature_configuration,
- action_name = ACTION_NAMES.cpp_compile,
- ),
+ cxx = cxx,
cxx_linker_static = cc_common.get_tool_for_action(
feature_configuration = feature_configuration,
action_name = ACTION_NAMES.cpp_link_static_library,
diff --git a/toolchains/built_toolchains.bzl b/toolchains/built_toolchains.bzl
index 5e59e79..ddf63a5 100644
--- a/toolchains/built_toolchains.bzl
+++ b/toolchains/built_toolchains.bzl
@@ -28,6 +28,7 @@ _CMAKE_SRCS = {
"3.22.4": [["https://github.com/Kitware/CMake/releases/download/v3.22.4/cmake-3.22.4.tar.gz"], "cmake-3.22.4", "5c55d0b0bc4c191549e3502b8f99a4fe892077611df22b4178cc020626e22a47"],
"3.23.1": [["https://github.com/Kitware/CMake/releases/download/v3.23.1/cmake-3.23.1.tar.gz"], "cmake-3.23.1", "33fd10a8ec687a4d0d5b42473f10459bb92b3ae7def2b745dc10b192760869f3"],
"3.23.2": [["https://github.com/Kitware/CMake/releases/download/v3.23.2/cmake-3.23.2.tar.gz"], "cmake-3.23.2", "f316b40053466f9a416adf981efda41b160ca859e97f6a484b447ea299ff26aa"],
+ "3.25.0": [["https://github.com/Kitware/CMake/releases/download/v3.25.0/cmake-3.25.0.tar.gz"], "cmake-3.25.0", "306463f541555da0942e6f5a0736560f70c487178b9d94a5ae7f34d0538cdd48"],
}
# buildifier: disable=unnamed-macro
@@ -438,6 +439,18 @@ def _ninja_toolchain(version, register_toolchains):
native.register_toolchains(
"@rules_foreign_cc//toolchains:built_ninja_toolchain",
)
+ if version == "1.11.1":
+ maybe(
+ http_archive,
+ name = "ninja_build_src",
+ build_file_content = _ALL_CONTENT,
+ sha256 = "31747ae633213f1eda3842686f83c2aa1412e0f5691d1c14dbbcc67fe7400cea",
+ strip_prefix = "ninja-1.11.1",
+ urls = [
+ "https://github.com/ninja-build/ninja/archive/v1.11.1.tar.gz",
+ ],
+ )
+ return
if version == "1.11.0":
maybe(
http_archive,
diff --git a/toolchains/prebuilt_toolchains.bzl b/toolchains/prebuilt_toolchains.bzl
index dabfb95..d9c38b4 100644
--- a/toolchains/prebuilt_toolchains.bzl
+++ b/toolchains/prebuilt_toolchains.bzl
@@ -67,6 +67,115 @@ def prebuilt_toolchains(cmake_version, ninja_version, register_toolchains):
_make_toolchains(register_toolchains)
def _cmake_toolchains(version, register_toolchains):
+ if "3.25.0" == version:
+ maybe(
+ http_archive,
+ name = "cmake-3.25.0-linux-aarch64",
+ urls = [
+ "https://github.com/Kitware/CMake/releases/download/v3.25.0/cmake-3.25.0-linux-aarch64.tar.gz",
+ ],
+ sha256 = "27da36d6debe9b30f5c498554ae40cd621a55736f5f2ae2618ed95722a59965a",
+ strip_prefix = "cmake-3.25.0-linux-aarch64",
+ build_file_content = _CMAKE_BUILD_FILE.format(
+ bin = "cmake",
+ env = "{}",
+ ),
+ )
+
+ maybe(
+ http_archive,
+ name = "cmake-3.25.0-linux-x86_64",
+ urls = [
+ "https://github.com/Kitware/CMake/releases/download/v3.25.0/cmake-3.25.0-linux-x86_64.tar.gz",
+ ],
+ sha256 = "ac634d6f0a81d7089adc7be5acff66a6bee3b08615f9a947858ce92a9ef59c8b",
+ strip_prefix = "cmake-3.25.0-linux-x86_64",
+ build_file_content = _CMAKE_BUILD_FILE.format(
+ bin = "cmake",
+ env = "{}",
+ ),
+ )
+
+ maybe(
+ http_archive,
+ name = "cmake-3.25.0-macos-universal",
+ urls = [
+ "https://github.com/Kitware/CMake/releases/download/v3.25.0/cmake-3.25.0-macos-universal.tar.gz",
+ ],
+ sha256 = "c088e761534a2078cd9d0581d39f02d3f9ed05302e33135b55c6d619b263b4c3",
+ strip_prefix = "cmake-3.25.0-macos-universal/CMake.app/Contents",
+ build_file_content = _CMAKE_BUILD_FILE.format(
+ bin = "cmake",
+ env = "{}",
+ ),
+ )
+
+ maybe(
+ http_archive,
+ name = "cmake-3.25.0-windows-i386",
+ urls = [
+ "https://github.com/Kitware/CMake/releases/download/v3.25.0/cmake-3.25.0-windows-i386.zip",
+ ],
+ sha256 = "ddd115257a19ff3dd18fc63f32a00ae742f8b62d2e39bc354629903512f99783",
+ strip_prefix = "cmake-3.25.0-windows-i386",
+ build_file_content = _CMAKE_BUILD_FILE.format(
+ bin = "cmake.exe",
+ env = "{}",
+ ),
+ )
+
+ maybe(
+ http_archive,
+ name = "cmake-3.25.0-windows-x86_64",
+ urls = [
+ "https://github.com/Kitware/CMake/releases/download/v3.25.0/cmake-3.25.0-windows-x86_64.zip",
+ ],
+ sha256 = "b46030c10cab1170355952f9ac59f7e6dabc248070fc53f15dff11d4ed2910f8",
+ strip_prefix = "cmake-3.25.0-windows-x86_64",
+ build_file_content = _CMAKE_BUILD_FILE.format(
+ bin = "cmake.exe",
+ env = "{}",
+ ),
+ )
+
+ # buildifier: leave-alone
+ maybe(
+ prebuilt_toolchains_repository,
+ name = "cmake_3.25.0_toolchains",
+ repos = {
+ "cmake-3.25.0-linux-aarch64": [
+ "@platforms//cpu:aarch64",
+ "@platforms//os:linux",
+ ],
+ "cmake-3.25.0-linux-x86_64": [
+ "@platforms//cpu:x86_64",
+ "@platforms//os:linux",
+ ],
+ "cmake-3.25.0-macos-universal": [
+ "@platforms//os:macos",
+ ],
+ "cmake-3.25.0-windows-i386": [
+ "@platforms//cpu:x86_32",
+ "@platforms//os:windows",
+ ],
+ "cmake-3.25.0-windows-x86_64": [
+ "@platforms//cpu:x86_64",
+ "@platforms//os:windows",
+ ],
+ },
+ tool = "cmake",
+ )
+
+ if register_toolchains:
+ native.register_toolchains(
+ "@cmake_3.25.0_toolchains//:cmake-3.25.0-linux-aarch64_toolchain",
+ "@cmake_3.25.0_toolchains//:cmake-3.25.0-linux-x86_64_toolchain",
+ "@cmake_3.25.0_toolchains//:cmake-3.25.0-macos-universal_toolchain",
+ "@cmake_3.25.0_toolchains//:cmake-3.25.0-windows-i386_toolchain",
+ "@cmake_3.25.0_toolchains//:cmake-3.25.0-windows-x86_64_toolchain",
+ )
+
+ return
if "3.23.2" == version:
maybe(
http_archive,
@@ -4196,6 +4305,78 @@ def _cmake_toolchains(version, register_toolchains):
fail("Unsupported version: " + str(version))
def _ninja_toolchains(version, register_toolchains):
+ if "1.11.1" == version:
+ maybe(
+ http_archive,
+ name = "ninja_1.11.1_linux",
+ urls = [
+ "https://github.com/ninja-build/ninja/releases/download/v1.11.1/ninja-linux.zip",
+ ],
+ sha256 = "b901ba96e486dce377f9a070ed4ef3f79deb45f4ffe2938f8e7ddc69cfb3df77",
+ strip_prefix = "",
+ build_file_content = _NINJA_BUILD_FILE.format(
+ bin = "ninja",
+ env = "{\"NINJA\": \"$(execpath :ninja_bin)\"}",
+ ),
+ )
+
+ maybe(
+ http_archive,
+ name = "ninja_1.11.1_mac",
+ urls = [
+ "https://github.com/ninja-build/ninja/releases/download/v1.11.1/ninja-mac.zip",
+ ],
+ sha256 = "482ecb23c59ae3d4f158029112de172dd96bb0e97549c4b1ca32d8fad11f873e",
+ strip_prefix = "",
+ build_file_content = _NINJA_BUILD_FILE.format(
+ bin = "ninja",
+ env = "{\"NINJA\": \"$(execpath :ninja_bin)\"}",
+ ),
+ )
+
+ maybe(
+ http_archive,
+ name = "ninja_1.11.1_win",
+ urls = [
+ "https://github.com/ninja-build/ninja/releases/download/v1.11.1/ninja-win.zip",
+ ],
+ sha256 = "524b344a1a9a55005eaf868d991e090ab8ce07fa109f1820d40e74642e289abc",
+ strip_prefix = "",
+ build_file_content = _NINJA_BUILD_FILE.format(
+ bin = "ninja.exe",
+ env = "{\"NINJA\": \"$(execpath :ninja_bin)\"}",
+ ),
+ )
+
+ # buildifier: leave-alone
+ maybe(
+ prebuilt_toolchains_repository,
+ name = "ninja_1.11.1_toolchains",
+ repos = {
+ "ninja_1.11.1_linux": [
+ "@platforms//cpu:x86_64",
+ "@platforms//os:linux",
+ ],
+ "ninja_1.11.1_mac": [
+ "@platforms//cpu:x86_64",
+ "@platforms//os:macos",
+ ],
+ "ninja_1.11.1_win": [
+ "@platforms//cpu:x86_64",
+ "@platforms//os:windows",
+ ],
+ },
+ tool = "ninja",
+ )
+
+ if register_toolchains:
+ native.register_toolchains(
+ "@ninja_1.11.1_toolchains//:ninja_1.11.1_linux_toolchain",
+ "@ninja_1.11.1_toolchains//:ninja_1.11.1_mac_toolchain",
+ "@ninja_1.11.1_toolchains//:ninja_1.11.1_win_toolchain",
+ )
+
+ return
if "1.11.0" == version:
maybe(
http_archive,
diff --git a/toolchains/prebuilt_toolchains.py b/toolchains/prebuilt_toolchains.py
index 5288b27..a193021 100755
--- a/toolchains/prebuilt_toolchains.py
+++ b/toolchains/prebuilt_toolchains.py
@@ -10,6 +10,7 @@ CMAKE_SHA256_URL_TEMPLATE = "https://cmake.org/files/v{minor}/cmake-{full}-SHA-2
CMAKE_URL_TEMPLATE = "https://github.com/Kitware/CMake/releases/download/v{full}/{file}"
CMAKE_VERSIONS = [
+ "3.25.0",
"3.23.2",
"3.23.1",
"3.22.4",
@@ -116,6 +117,7 @@ NINJA_TARGETS = {
}
NINJA_VERSIONS = (
+ "1.11.1",
"1.10.2",
"1.10.1",
"1.10.0",

18
cmake/patch.sh Normal file
View File

@ -0,0 +1,18 @@
#! /bin/sh
set +x
set -euo pipefail
patch="$1"; shift
# ignore the error if the patch is already applied
if ! out=$(patch -p1 -N -r "rejects.bin" < "$patch")
then
echo "$out" | grep -q "Reversed (or previously applied) patch detected! Skipping patch."
test -s "rejects.bin" # Make sure we have rejects.
else
test -f "rejects.bin" && ! test -s "rejects.bin" # Make sure we have no rejects.
fi
rm -f "rejects.bin"

View File

@ -463,14 +463,6 @@ public:
Option<bool> validate_reference_filter(const std::string& filter_query) const;
Option<std::string> get_reference_field(const std::string & collection_name) const;
Option<bool> get_reference_filter_ids(const std::string & filter_query,
filter_result_t& filter_result,
const std::string & collection_name) const;
Option<bool> validate_reference_filter(const std::string& filter_query) const;
Option<nlohmann::json> get(const std::string & id) const;
Option<std::string> remove(const std::string & id, bool remove_from_store = true);

View File

@ -411,7 +411,7 @@ struct field {
static Option<bool> json_field_to_field(bool enable_nested_fields, nlohmann::json& field_json,
std::vector<field>& the_fields,
string& fallback_field_type, size_t& num_auto_detect_fields);
string& fallback_field_type, size_t& num_auto_detect_fields,const nlohmann::json& all_fields_json = nlohmann::json());
static Option<bool> json_fields_to_fields(bool enable_nested_fields,
nlohmann::json& fields_json,
@ -475,7 +475,7 @@ struct field {
}
auto op = json_field_to_field(enable_nested_fields,
field_json, the_fields, fallback_field_type, num_auto_detect_fields);
field_json, the_fields, fallback_field_type, num_auto_detect_fields, fields_json);
if(!op.ok()) {
return op;
}

View File

@ -246,6 +246,7 @@ nlohmann::json Collection::get_summary_json() const {
field_json[fields::reference] = coll_field.reference;
}
fields_arr.push_back(field_json);
}
@ -2580,71 +2581,6 @@ Option<bool> Collection::validate_reference_filter(const std::string& filter_que
return Option<bool>(true);
}
Option<std::string> Collection::get_reference_field(const std::string & collection_name) const {
std::shared_lock lock(mutex);
std::string reference_field_name;
for (auto const& pair: reference_fields) {
auto reference_pair = pair.second;
if (reference_pair.collection == collection_name) {
reference_field_name = reference_pair.field;
break;
}
}
if (reference_field_name.empty()) {
return Option<std::string>(400, "Could not find any field in `" + name + "` referencing the collection `"
+ collection_name + "`.");
}
return Option(reference_field_name);
}
Option<bool> Collection::get_reference_filter_ids(const std::string & filter_query,
filter_result_t& filter_result,
const std::string & collection_name) const {
auto reference_field_op = get_reference_field(collection_name);
if (!reference_field_op.ok()) {
return Option<bool>(reference_field_op.code(), reference_field_op.error());
}
std::shared_lock lock(mutex);
const std::string doc_id_prefix = std::to_string(collection_id) + "_" + DOC_ID_PREFIX + "_";
filter_node_t* filter_tree_root = nullptr;
Option<bool> parse_op = filter::parse_filter_query(filter_query, search_schema,
store, doc_id_prefix, filter_tree_root);
if(!parse_op.ok()) {
return parse_op;
}
// Reference helper field has the sequence id of other collection's documents.
auto field_name = reference_field_op.get() + REFERENCE_HELPER_FIELD_SUFFIX;
auto filter_op = index->do_reference_filtering_with_lock(filter_tree_root, filter_result, field_name);
if (!filter_op.ok()) {
return filter_op;
}
delete filter_tree_root;
return Option<bool>(true);
}
Option<bool> Collection::validate_reference_filter(const std::string& filter_query) const {
std::shared_lock lock(mutex);
const std::string doc_id_prefix = std::to_string(collection_id) + "_" + DOC_ID_PREFIX + "_";
filter_node_t* filter_tree_root = nullptr;
Option<bool> filter_op = filter::parse_filter_query(filter_query, search_schema,
store, doc_id_prefix, filter_tree_root);
if(!filter_op.ok()) {
return filter_op;
}
delete filter_tree_root;
return Option<bool>(true);
}
bool Collection::facet_value_to_string(const facet &a_facet, const facet_count_t &facet_count,
const nlohmann::json &document, std::string &value) const {

View File

@ -523,7 +523,7 @@ Option<bool> filter::parse_filter_query(const std::string& filter_query,
Option<bool> field::json_field_to_field(bool enable_nested_fields, nlohmann::json& field_json,
std::vector<field>& the_fields,
string& fallback_field_type, size_t& num_auto_detect_fields) {
string& fallback_field_type, size_t& num_auto_detect_fields, const nlohmann::json& all_fields_json) {
if(field_json["name"] == "id") {
// No field should exist with the name "id" as it is reserved for internal use