mirror of
https://github.com/typesense/typesense.git
synced 2025-05-22 14:55:26 +08:00
add test and refactor
This commit is contained in:
parent
e37b9bf775
commit
6fda5c9821
100
src/core_api.cpp
100
src/core_api.cpp
@ -2807,66 +2807,68 @@ bool put_conversation_model(const std::shared_ptr<http_req>& req, const std::sha
|
||||
|
||||
bool get_click_events(const std::shared_ptr<http_req>& req, const std::shared_ptr<http_res>& res) {
|
||||
auto analytics_store = AnalyticsManager::get_instance().get_analytics_store();
|
||||
if(analytics_store) {
|
||||
export_state_t *export_state = nullptr;
|
||||
auto click_event_prefix = std::string(AnalyticsManager::CLICK_EVENT) + "_";
|
||||
if(req->data == nullptr) {
|
||||
export_state = new export_state_t();
|
||||
req->data = export_state;
|
||||
if (!analytics_store) {
|
||||
LOG(ERROR) << "Analytics store not initialized.";
|
||||
return true;
|
||||
}
|
||||
|
||||
export_state->iter_upper_bound_key = std::string(AnalyticsManager::CLICK_EVENT) + "`";
|
||||
export_state->iter_upper_bound = new rocksdb::Slice(export_state->iter_upper_bound_key);
|
||||
export_state->it = analytics_store->scan(click_event_prefix, export_state->iter_upper_bound);
|
||||
} else {
|
||||
export_state = dynamic_cast<export_state_t*>(req->data);
|
||||
}
|
||||
export_state_t *export_state = nullptr;
|
||||
auto click_event_prefix = std::string(AnalyticsManager::CLICK_EVENT) + "_";
|
||||
if (req->data == nullptr) {
|
||||
export_state = new export_state_t();
|
||||
req->data = export_state;
|
||||
|
||||
if (export_state->it != nullptr) {
|
||||
rocksdb::Iterator *it = export_state->it;
|
||||
size_t batch_counter = 0;
|
||||
std::string().swap(res->body);
|
||||
export_state->iter_upper_bound_key = std::string(AnalyticsManager::CLICK_EVENT) + "`";
|
||||
export_state->iter_upper_bound = new rocksdb::Slice(export_state->iter_upper_bound_key);
|
||||
export_state->it = analytics_store->scan(click_event_prefix, export_state->iter_upper_bound);
|
||||
} else {
|
||||
export_state = dynamic_cast<export_state_t *>(req->data);
|
||||
}
|
||||
|
||||
if(!it->Valid()) {
|
||||
LOG(ERROR) << "No click events found in db.";
|
||||
req->last_chunk_aggregate = true;
|
||||
res->final = true;
|
||||
res->set_404();
|
||||
stream_response(req, res);
|
||||
return false;
|
||||
}
|
||||
if (export_state->it != nullptr) {
|
||||
rocksdb::Iterator *it = export_state->it;
|
||||
size_t batch_counter = 0;
|
||||
std::string().swap(res->body);
|
||||
|
||||
while (it->Valid() && it->key().ToString().compare(0, click_event_prefix.size(), click_event_prefix) == 0) {
|
||||
res->body += it->value().ToString();
|
||||
it->Next();
|
||||
|
||||
// append a new line character if there is going to be one more record to send
|
||||
if (it->Valid() &&
|
||||
it->key().ToString().compare(0, click_event_prefix.size(), click_event_prefix) == 0) {
|
||||
res->body += "\n";
|
||||
req->last_chunk_aggregate = false;
|
||||
res->final = false;
|
||||
} else {
|
||||
req->last_chunk_aggregate = true;
|
||||
res->final = true;
|
||||
}
|
||||
|
||||
batch_counter++;
|
||||
if (batch_counter == export_state->export_batch_size) {
|
||||
break;
|
||||
}
|
||||
}
|
||||
} else {
|
||||
if (!it->Valid()) {
|
||||
LOG(ERROR) << "No click events found in db.";
|
||||
req->last_chunk_aggregate = true;
|
||||
res->final = true;
|
||||
res->set_404();
|
||||
stream_response(req, res);
|
||||
return false;
|
||||
}
|
||||
|
||||
res->content_type_header = "text/plain; charset=utf-8";
|
||||
res->status_code = 200;
|
||||
while (it->Valid() && it->key().ToString().compare(0, click_event_prefix.size(), click_event_prefix) == 0) {
|
||||
res->body += it->value().ToString();
|
||||
it->Next();
|
||||
|
||||
stream_response(req, res);
|
||||
// append a new line character if there is going to be one more record to send
|
||||
if (it->Valid() &&
|
||||
it->key().ToString().compare(0, click_event_prefix.size(), click_event_prefix) == 0) {
|
||||
res->body += "\n";
|
||||
req->last_chunk_aggregate = false;
|
||||
res->final = false;
|
||||
} else {
|
||||
req->last_chunk_aggregate = true;
|
||||
res->final = true;
|
||||
}
|
||||
|
||||
batch_counter++;
|
||||
if (batch_counter == export_state->export_batch_size) {
|
||||
break;
|
||||
}
|
||||
}
|
||||
} else {
|
||||
LOG(ERROR) << "Analytics store not initialized.";
|
||||
req->last_chunk_aggregate = true;
|
||||
res->final = true;
|
||||
}
|
||||
|
||||
res->content_type_header = "text/plain; charset=utf-8";
|
||||
res->status_code = 200;
|
||||
|
||||
stream_response(req, res);
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
|
@ -7,27 +7,33 @@
|
||||
#include "raft_server.h"
|
||||
#include "conversation_model_manager.h"
|
||||
#include "conversation_manager.h"
|
||||
#include <analytics_manager.h>
|
||||
|
||||
class CoreAPIUtilsTest : public ::testing::Test {
|
||||
protected:
|
||||
Store *store;
|
||||
Store *store, *analytics_store;
|
||||
CollectionManager & collectionManager = CollectionManager::get_instance();
|
||||
std::atomic<bool> quit = false;
|
||||
|
||||
std::vector<std::string> query_fields;
|
||||
std::vector<sort_by> sort_fields;
|
||||
|
||||
AnalyticsManager& analyticsManager = AnalyticsManager::get_instance();
|
||||
|
||||
void setupCollection() {
|
||||
std::string state_dir_path = "/tmp/typesense_test/core_api_utils";
|
||||
std::string analytics_db_path = "/tmp/typesense_test/analytics_db2";
|
||||
LOG(INFO) << "Truncating and creating: " << state_dir_path;
|
||||
system(("rm -rf "+state_dir_path+" && mkdir -p "+state_dir_path).c_str());
|
||||
|
||||
store = new Store(state_dir_path);
|
||||
analytics_store = new Store(analytics_db_path);
|
||||
collectionManager.init(store, 1.0, "auth_key", quit);
|
||||
collectionManager.load(8, 1000);
|
||||
|
||||
ConversationModelManager::init(store);
|
||||
ConversationManager::init(store);
|
||||
analyticsManager.init(store, analytics_store);
|
||||
}
|
||||
|
||||
virtual void SetUp() {
|
||||
@ -37,6 +43,7 @@ protected:
|
||||
virtual void TearDown() {
|
||||
collectionManager.dispose();
|
||||
delete store;
|
||||
delete analytics_store;
|
||||
}
|
||||
};
|
||||
|
||||
@ -1495,4 +1502,119 @@ TEST_F(CoreAPIUtilsTest, TestInvalidConversationModels) {
|
||||
|
||||
ASSERT_EQ(400, resp->status_code);
|
||||
ASSERT_EQ("Property `model_name` is not provided or not a string.", nlohmann::json::parse(resp->body)["message"]);
|
||||
}
|
||||
|
||||
TEST_F(CoreAPIUtilsTest, GetClickEvents) {
|
||||
//reset analytics store
|
||||
analyticsManager.resetRateLimit();
|
||||
analyticsManager.resetAnalyticsStore();
|
||||
|
||||
nlohmann::json schema = R"({
|
||||
"name": "titles",
|
||||
"fields": [
|
||||
{"name": "name", "type": "string" },
|
||||
{"name": "points", "type": "int32" }
|
||||
]
|
||||
})"_json;
|
||||
|
||||
auto op = collectionManager.create_collection(schema);
|
||||
ASSERT_TRUE(op.ok());
|
||||
Collection* titles = op.get();
|
||||
|
||||
std::shared_ptr<http_req> req = std::make_shared<http_req>();
|
||||
std::shared_ptr<http_res> res = std::make_shared<http_res>(nullptr);
|
||||
|
||||
// no events in db
|
||||
get_click_events(req, res);
|
||||
ASSERT_EQ("{\"message\": \"Not Found\"}", res->body);
|
||||
|
||||
//add some events
|
||||
nlohmann::json event1 = R"({
|
||||
"type": "query_click",
|
||||
"data": {
|
||||
"q": "technology",
|
||||
"collection": "titles",
|
||||
"doc_id": "21",
|
||||
"position": 2,
|
||||
"user_id": "13"
|
||||
}
|
||||
})"_json;
|
||||
|
||||
req->body = event1.dump();
|
||||
ASSERT_TRUE(post_create_event(req, res));
|
||||
|
||||
nlohmann::json event2 = R"({
|
||||
"type": "query_click",
|
||||
"data": {
|
||||
"q": "technology",
|
||||
"collection": "titles",
|
||||
"doc_id": "12",
|
||||
"position": 1,
|
||||
"user_id": "13"
|
||||
}
|
||||
})"_json;
|
||||
req->body = event2.dump();
|
||||
ASSERT_TRUE(post_create_event(req, res));
|
||||
|
||||
nlohmann::json event3 = R"({
|
||||
"type": "query_click",
|
||||
"data": {
|
||||
"q": "technology",
|
||||
"collection": "titles",
|
||||
"doc_id": "52",
|
||||
"position": 5,
|
||||
"user_id": "13"
|
||||
}
|
||||
})"_json;
|
||||
req->body = event3.dump();
|
||||
ASSERT_TRUE(post_create_event(req, res));
|
||||
|
||||
event1["collection_id"] = "0";
|
||||
event1["timestamp"] = 1521512521;
|
||||
event1["event_type"] = "click_events";
|
||||
event2["collection_id"] = "0";
|
||||
event2["timestamp"] = 1521514354;
|
||||
event2["event_type"] = "click_events";
|
||||
event3["collection_id"] = "0";
|
||||
event3["timestamp"] = 1521515382;
|
||||
event3["event_type"] = "click_events";
|
||||
|
||||
|
||||
nlohmann::json click_events = nlohmann::json::array();
|
||||
click_events.push_back(event1);
|
||||
click_events.push_back(event2);
|
||||
click_events.push_back(event3);
|
||||
|
||||
req->body = click_events.dump();
|
||||
ASSERT_TRUE(post_replicate_events(req, res));
|
||||
|
||||
//get click events
|
||||
req->data = nullptr;
|
||||
get_click_events(req, res);
|
||||
|
||||
std::vector<std::string> res_strs;
|
||||
StringUtils::split(res->body, res_strs, "\n");
|
||||
|
||||
auto result = nlohmann::json::array();
|
||||
result.push_back(nlohmann::json::parse(res_strs[0]));
|
||||
result.push_back(nlohmann::json::parse(res_strs[1]));
|
||||
result.push_back(nlohmann::json::parse(res_strs[2]));
|
||||
|
||||
ASSERT_EQ("0", result[0]["collection_id"]);
|
||||
ASSERT_EQ("13", result[0]["data"]["user_id"]);
|
||||
ASSERT_EQ("21", result[0]["data"]["doc_id"]);
|
||||
ASSERT_EQ(2, result[0]["data"]["position"]);
|
||||
ASSERT_EQ("technology", result[0]["data"]["q"]);
|
||||
|
||||
ASSERT_EQ("0", result[1]["collection_id"]);
|
||||
ASSERT_EQ("13", result[1]["data"]["user_id"]);
|
||||
ASSERT_EQ("12", result[1]["data"]["doc_id"]);
|
||||
ASSERT_EQ(1, result[1]["data"]["position"]);
|
||||
ASSERT_EQ("technology", result[1]["data"]["q"]);
|
||||
|
||||
ASSERT_EQ("0", result[2]["collection_id"]);
|
||||
ASSERT_EQ("13", result[2]["data"]["user_id"]);
|
||||
ASSERT_EQ("52", result[2]["data"]["doc_id"]);
|
||||
ASSERT_EQ(5, result[2]["data"]["position"]);
|
||||
ASSERT_EQ("technology", result[2]["data"]["q"]);
|
||||
}
|
Loading…
x
Reference in New Issue
Block a user