提交 d97f1bd7 编写于 作者: X xj.lin

Merge branch 'branch-0.4.0' into new_gpureource


Former-commit-id: 559676452dbcf72cb2a3f195c5233ee45bc97ef3
......@@ -21,6 +21,9 @@ Please mark all change in change log and use the ticket from JIRA.
- MS-449 - Add vectors twice success, once with ids, the other no ids
- MS-461 - Mysql meta unittest failed
- MS-462 - Run milvus server twices, should display error
- MS-463 - Search timeout
- MS-467 - mysql db test failed
- MS-471 - code coverage run failed
## Improvement
- MS-327 - Clean code for milvus
......
......@@ -83,9 +83,13 @@ resource_config:
# enable_executor: true
# connection list, length: 0~N
# format: -${resource_name}===${resource_name}
# format: -${resource_name}===${resource_name}
connections:
- ssda===cpu
- cpu===gpu0
io:
speed: 500
endpoint: ssda===cpu
pcie:
speed: 11000
endpoint: cpu===gpu0
# - cpu===gtx1660
......@@ -12,7 +12,7 @@ FILE_INFO_OUTPUT_NEW="output_new.info"
DIR_LCOV_OUTPUT="lcov_out"
DIR_GCNO="cmake_build"
DIR_UNITTEST="milvus/bin"
DIR_UNITTEST="milvus/unittest"
MYSQL_USER_NAME=root
MYSQL_PASSWORD=Fantast1c
......@@ -77,6 +77,7 @@ for test in `ls ${DIR_UNITTEST}`; do
# run unittest
./${DIR_UNITTEST}/${test} "${args}"
if [ $? -ne 0 ]; then
echo ${args}
echo ${DIR_UNITTEST}/${test} "run failed"
fi
done
......@@ -93,6 +94,7 @@ ${LCOV_CMD} -r "${FILE_INFO_OUTPUT}" -o "${FILE_INFO_OUTPUT_NEW}" \
"/usr/*" \
"*/boost/*" \
"*/cmake_build/*_ep-prefix/*" \
"src/core/cmake_build*" \
# gen html report
${LCOV_GEN_CMD} "${FILE_INFO_OUTPUT_NEW}" --output-directory ${DIR_LCOV_OUTPUT}/
\ No newline at end of file
/*******************************************************************************
* Copyright 上海赜睿信息科技有限公司(Zilliz) - All Rights Reserved
* Unauthorized copying of this file, via any medium is strictly prohibited.
* Proprietary and confidential.
******************************************************************************/
#include "DBImpl.h"
#include "Factories.h"
namespace zilliz {
namespace milvus {
namespace engine {
DB::~DB() = default;
void DB::Open(const Options& options, DB** dbptr) {
*dbptr = DBFactory::Build(options);
}
} // namespace engine
} // namespace milvus
} // namespace zilliz
......@@ -20,7 +20,11 @@ class Env;
class DB {
public:
static void Open(const Options& options, DB** dbptr);
DB() = default;
DB(const DB&) = delete;
DB& operator=(const DB&) = delete;
virtual ~DB() = default;
virtual Status Start() = 0;
virtual Status Stop() = 0;
......@@ -55,11 +59,6 @@ public:
virtual Status DropAll() = 0;
DB() = default;
DB(const DB&) = delete;
DB& operator=(const DB&) = delete;
virtual ~DB() = 0;
}; // DB
} // namespace engine
......
......@@ -183,7 +183,7 @@ Status DBImpl::GetTableRowCount(const std::string& table_id, uint64_t& row_count
Status DBImpl::InsertVectors(const std::string& table_id_,
uint64_t n, const float* vectors, IDNumbers& vector_ids_) {
ENGINE_LOG_DEBUG << "Insert " << n << " vectors to cache";
// ENGINE_LOG_DEBUG << "Insert " << n << " vectors to cache";
Status status;
zilliz::milvus::server::CollectInsertMetrics metrics(n, status);
......@@ -191,7 +191,7 @@ Status DBImpl::InsertVectors(const std::string& table_id_,
// std::chrono::microseconds time_span = std::chrono::duration_cast<std::chrono::microseconds>(end_time - start_time);
// double average_time = double(time_span.count()) / n;
ENGINE_LOG_DEBUG << "Insert vectors to cache finished";
// ENGINE_LOG_DEBUG << "Insert vectors to cache finished";
return status;
}
......@@ -778,6 +778,7 @@ void DBImpl::BackgroundBuildIndex() {
}
Status DBImpl::DropAll() {
Stop();
return meta_ptr_->DropAll();
}
......
......@@ -35,6 +35,7 @@ class DBImpl : public DB {
using MetaPtr = meta::Meta::Ptr;
explicit DBImpl(const Options &options);
~DBImpl();
Status Start() override;
Status Stop() override;
......@@ -91,8 +92,6 @@ class DBImpl : public DB {
Status DropIndex(const std::string& table_id) override;
~DBImpl() override;
private:
Status QueryAsync(const std::string &table_id,
const meta::TableFilesSchema &files,
......
......@@ -2001,9 +2001,6 @@ Status MySQLMetaImpl::Count(const std::string &table_id, uint64_t &result) {
}
Status MySQLMetaImpl::DropAll() {
if (boost::filesystem::is_directory(options_.path)) {
boost::filesystem::remove_all(options_.path);
}
try {
ScopedConnection connectionPtr(*mysql_connection_pool_, safe_grab);
......
......@@ -1205,9 +1205,7 @@ Status SqliteMetaImpl::Count(const std::string &table_id, uint64_t &result) {
}
Status SqliteMetaImpl::DropAll() {
if (boost::filesystem::is_directory(options_.path)) {
boost::filesystem::remove_all(options_.path);
}
return Status::OK();
}
......
......@@ -29,17 +29,34 @@ class Metrics {
private:
static MetricsBase &CreateMetricsCollector();
};
////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
class CollectMetricsBase {
protected:
CollectMetricsBase() {
start_time_ = METRICS_NOW_TIME;
}
virtual ~CollectMetricsBase() = default;
double TimeFromBegine() {
auto end_time = METRICS_NOW_TIME;
return METRICS_MICROSECONDS(start_time_, end_time);
}
protected:
using TIME_POINT = std::chrono::system_clock::time_point;
TIME_POINT start_time_;
};
class CollectInsertMetrics {
////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
class CollectInsertMetrics : CollectMetricsBase {
public:
CollectInsertMetrics(size_t n, engine::Status& status) : n_(n), status_(status) {
start_time_ = METRICS_NOW_TIME;
}
~CollectInsertMetrics() {
if(n_ > 0) {
auto end_time = METRICS_NOW_TIME;
auto total_time = METRICS_MICROSECONDS(start_time_, end_time);
auto total_time = TimeFromBegine();
double avg_time = total_time / n_;
for (int i = 0; i < n_; ++i) {
Metrics::GetInstance().AddVectorsDurationHistogramOberve(avg_time);
......@@ -57,22 +74,19 @@ public:
}
private:
using TIME_POINT = std::chrono::system_clock::time_point;
TIME_POINT start_time_;
size_t n_;
engine::Status& status_;
};
class CollectQueryMetrics {
////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
class CollectQueryMetrics : CollectMetricsBase {
public:
CollectQueryMetrics(size_t nq) : nq_(nq) {
start_time_ = METRICS_NOW_TIME;
}
~CollectQueryMetrics() {
if(nq_ > 0) {
auto end_time = METRICS_NOW_TIME;
auto total_time = METRICS_MICROSECONDS(start_time_, end_time);
auto total_time = TimeFromBegine();
for (int i = 0; i < nq_; ++i) {
server::Metrics::GetInstance().QueryResponseSummaryObserve(total_time);
}
......@@ -83,112 +97,90 @@ public:
}
private:
using TIME_POINT = std::chrono::system_clock::time_point;
TIME_POINT start_time_;
size_t nq_;
};
class CollectMergeFilesMetrics {
////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
class CollectMergeFilesMetrics : CollectMetricsBase {
public:
CollectMergeFilesMetrics() {
start_time_ = METRICS_NOW_TIME;
}
~CollectMergeFilesMetrics() {
auto end_time = METRICS_NOW_TIME;
auto total_time = METRICS_MICROSECONDS(start_time_, end_time);
auto total_time = TimeFromBegine();
server::Metrics::GetInstance().MemTableMergeDurationSecondsHistogramObserve(total_time);
}
private:
using TIME_POINT = std::chrono::system_clock::time_point;
TIME_POINT start_time_;
};
class CollectBuildIndexMetrics {
////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
class CollectBuildIndexMetrics : CollectMetricsBase {
public:
CollectBuildIndexMetrics() {
start_time_ = METRICS_NOW_TIME;
}
~CollectBuildIndexMetrics() {
auto end_time = METRICS_NOW_TIME;
auto total_time = METRICS_MICROSECONDS(start_time_, end_time);
auto total_time = TimeFromBegine();
server::Metrics::GetInstance().BuildIndexDurationSecondsHistogramObserve(total_time);
}
private:
using TIME_POINT = std::chrono::system_clock::time_point;
TIME_POINT start_time_;
};
class CollectExecutionEngineMetrics {
////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
class CollectExecutionEngineMetrics : CollectMetricsBase {
public:
CollectExecutionEngineMetrics(double physical_size) : physical_size_(physical_size) {
start_time_ = METRICS_NOW_TIME;
}
~CollectExecutionEngineMetrics() {
auto end_time = METRICS_NOW_TIME;
auto total_time = METRICS_MICROSECONDS(start_time_, end_time);
auto total_time = TimeFromBegine();
server::Metrics::GetInstance().FaissDiskLoadDurationSecondsHistogramObserve(total_time);
server::Metrics::GetInstance().FaissDiskLoadSizeBytesHistogramObserve(physical_size_);
server::Metrics::GetInstance().FaissDiskLoadIOSpeedGaugeSet(physical_size_ / double(total_time));
}
private:
using TIME_POINT = std::chrono::system_clock::time_point;
TIME_POINT start_time_;
double physical_size_;
};
class CollectSerializeMetrics {
////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
class CollectSerializeMetrics : CollectMetricsBase {
public:
CollectSerializeMetrics(size_t size) : size_(size) {
start_time_ = METRICS_NOW_TIME;
}
~CollectSerializeMetrics() {
auto end_time = METRICS_NOW_TIME;
auto total_time = METRICS_MICROSECONDS(start_time_, end_time);
auto total_time = TimeFromBegine();
server::Metrics::GetInstance().DiskStoreIOSpeedGaugeSet((double) size_ / total_time);
}
private:
using TIME_POINT = std::chrono::system_clock::time_point;
TIME_POINT start_time_;
size_t size_;
};
class CollectAddMetrics {
////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
class CollectAddMetrics : CollectMetricsBase {
public:
CollectAddMetrics(size_t n, uint16_t dimension) : n_(n), dimension_(dimension) {
start_time_ = METRICS_NOW_TIME;
}
~CollectAddMetrics() {
auto end_time = METRICS_NOW_TIME;
auto total_time = METRICS_MICROSECONDS(start_time_, end_time);
auto total_time = TimeFromBegine();
server::Metrics::GetInstance().AddVectorsPerSecondGaugeSet(static_cast<int>(n_),
static_cast<int>(dimension_),
total_time);
}
private:
using TIME_POINT = std::chrono::system_clock::time_point;
TIME_POINT start_time_;
size_t n_;
uint16_t dimension_;
};
class CollectDurationMetrics {
////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
class CollectDurationMetrics : CollectMetricsBase {
public:
CollectDurationMetrics(int index_type) : index_type_(index_type) {
start_time_ = METRICS_NOW_TIME;
}
~CollectDurationMetrics() {
auto end_time = METRICS_NOW_TIME;
auto total_time = METRICS_MICROSECONDS(start_time_, end_time);
auto total_time = TimeFromBegine();
switch (index_type_) {
case engine::meta::TableFileSchema::RAW: {
server::Metrics::GetInstance().SearchRawDataDurationSecondsHistogramObserve(total_time);
......@@ -205,20 +197,17 @@ public:
}
}
private:
using TIME_POINT = std::chrono::system_clock::time_point;
TIME_POINT start_time_;
int index_type_;
};
class CollectSearchTaskMetrics {
////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
class CollectSearchTaskMetrics : CollectMetricsBase {
public:
CollectSearchTaskMetrics(int index_type) : index_type_(index_type) {
start_time_ = METRICS_NOW_TIME;
}
~CollectSearchTaskMetrics() {
auto end_time = METRICS_NOW_TIME;
auto total_time = METRICS_MICROSECONDS(start_time_, end_time);
auto total_time = TimeFromBegine();
switch(index_type_) {
case engine::meta::TableFileSchema::RAW: {
server::Metrics::GetInstance().SearchRawDataDurationSecondsHistogramObserve(total_time);
......@@ -236,27 +225,20 @@ public:
}
private:
using TIME_POINT = std::chrono::system_clock::time_point;
TIME_POINT start_time_;
int index_type_;
};
class MetricCollector {
////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
class MetricCollector : CollectMetricsBase {
public:
MetricCollector() {
server::Metrics::GetInstance().MetaAccessTotalIncrement();
start_time_ = METRICS_NOW_TIME;
}
~MetricCollector() {
auto end_time = METRICS_NOW_TIME;
auto total_time = METRICS_MICROSECONDS(start_time_, end_time);
auto total_time = TimeFromBegine();
server::Metrics::GetInstance().MetaAccessDurationSecondsHistogramObserve(total_time);
}
private:
using TIME_POINT = std::chrono::system_clock::time_point;
TIME_POINT start_time_;
};
......
......@@ -6,6 +6,7 @@
#include "DBWrapper.h"
#include "ServerConfig.h"
#include "db/Factories.h"
#include "utils/CommonUtil.h"
#include "utils/Log.h"
#include "utils/StringHelpFunctions.h"
......@@ -95,8 +96,7 @@ ServerError DBWrapper::StartService() {
//create db instance
std::string msg = opt.meta.path;
try {
engine::DB* db = nullptr;
zilliz::milvus::engine::DB::Open(opt, &db);
engine::DB* db = engine::DBFactory::Build(opt);
db_.reset(db);
} catch(std::exception& ex) {
msg = ex.what();
......
......@@ -58,7 +58,7 @@ static const char* CONFIG_RESOURCE_ENABLE_LOADER = "enable_loader";
static const char* CONFIG_RESOURCE_ENABLE_EXECUTOR = "enable_executor";
static const char* CONFIG_RESOURCE_CONNECTIONS = "connections";
static const char* CONFIG_SPEED_CONNECTIONS = "speed";
static const char* CONFIG_ENDPOINT_CONNECTIONS = "connections";
static const char* CONFIG_ENDPOINT_CONNECTIONS = "endpoint";
class ServerConfig {
......
......@@ -40,7 +40,7 @@ set(unittest_libs
add_subdirectory(server)
add_subdirectory(db)
add_subdirectory(knowhere)
#add_subdirectory(knowhere)
add_subdirectory(metrics)
#add_subdirectory(scheduler)
#add_subdirectory(storage)
\ No newline at end of file
......@@ -90,5 +90,5 @@ endif()
target_link_libraries(db_test ${db_libs} ${knowhere_libs} ${unittest_libs})
install(TARGETS db_test DESTINATION bin)
install(TARGETS db_test DESTINATION unittest)
......@@ -112,7 +112,7 @@ TEST_F(MySQLDBTest, DB_TEST) {
std::this_thread::sleep_for(std::chrono::seconds(3));
}
std::cout << "Search AAA done" << std::endl;
std::cout << "All search done!" << std::endl;
});
int loop = INSERT_LOOP;
......@@ -257,13 +257,13 @@ TEST_F(MySQLDBTest, DELETE_TEST) {
std::this_thread::sleep_for(std::chrono::microseconds(1));
}
std::vector<engine::meta::DateT> dates;
stat = db_->DeleteTable(TABLE_NAME, dates);
// std::cout << "5 sec start" << std::endl;
std::this_thread::sleep_for(std::chrono::seconds(5));
// std::cout << "5 sec finish" << std::endl;
ASSERT_TRUE(stat.ok());
db_->HasTable(TABLE_NAME, has_table);
ASSERT_FALSE(has_table);
// std::vector<engine::meta::DateT> dates;
// stat = db_->DeleteTable(TABLE_NAME, dates);
//// std::cout << "5 sec start" << std::endl;
// std::this_thread::sleep_for(std::chrono::seconds(5));
//// std::cout << "5 sec finish" << std::endl;
// ASSERT_TRUE(stat.ok());
//
// db_->HasTable(TABLE_NAME, has_table);
// ASSERT_FALSE(has_table);
};
......@@ -12,6 +12,7 @@
#include "utils.h"
#include "db/Factories.h"
#include "db/Options.h"
#include "server/ServerConfig.h"
INITIALIZE_EASYLOGGINGPP
......@@ -60,6 +61,9 @@ engine::Options DBTest::GetOptions() {
void DBTest::SetUp() {
InitLog();
server::ConfigNode& config = server::ServerConfig::GetInstance().GetConfig(server::CONFIG_CACHE);
config.AddSequenceItem(server::CONFIG_GPU_IDS, "0");
auto res_mgr = engine::ResMgrInst::GetInstance();
res_mgr->Clear();
res_mgr->Add(engine::ResourceFactory::Create("disk", "DISK", 0, true, false));
......@@ -78,6 +82,7 @@ void DBTest::SetUp() {
}
void DBTest::TearDown() {
db_->DropAll();
delete db_;
engine::ResMgrInst::GetInstance()->Stop();
......@@ -120,7 +125,12 @@ zilliz::milvus::engine::DBMetaOptions MySQLTest::getDBMetaOptions() {
zilliz::milvus::engine::Options MySQLDBTest::GetOptions() {
auto options = engine::OptionsFactory::Build();
options.meta.path = "/tmp/milvus_test";
options.meta.backend_uri = "mysql://root:Fantast1c@192.168.1.194:3306/";
options.meta.backend_uri = DBTestEnvironment::getURI();
if(options.meta.backend_uri.empty()) {
options.meta.backend_uri = "mysql://root:Fantast1c@192.168.1.194:3306/";
}
return options;
}
......
......@@ -22,4 +22,4 @@ set(knowhere_libs
add_executable(knowhere_test knowhere_test.cpp ${knowhere_src} ${helper})
target_link_libraries(knowhere_test ${knowhere_libs} ${unittest_libs})
install(TARGETS knowhere_test DESTINATION bin)
\ No newline at end of file
install(TARGETS knowhere_test DESTINATION unittest)
\ No newline at end of file
......@@ -104,4 +104,4 @@ else()
openblas)
endif()
install(TARGETS metrics_test DESTINATION bin)
\ No newline at end of file
install(TARGETS metrics_test DESTINATION unittest)
\ No newline at end of file
......@@ -91,5 +91,5 @@ endif ()
target_link_libraries(scheduler_test ${scheduler_libs} ${knowhere_libs} ${unittest_libs})
install(TARGETS scheduler_test DESTINATION bin)
install(TARGETS scheduler_test DESTINATION unittest)
......@@ -65,7 +65,7 @@ target_link_libraries(server_test
${unittest_libs}
)
install(TARGETS server_test DESTINATION bin)
install(TARGETS server_test DESTINATION unittest)
configure_file(appendix/server_config.yaml
"${CMAKE_CURRENT_BINARY_DIR}/milvus/conf/server_config.yaml"
......
......@@ -6,6 +6,7 @@
#include <gtest/gtest.h>
#include "cache/CpuCacheMgr.h"
#include "cache/GpuCacheMgr.h"
#include "server/ServerConfig.h"
#include "utils/Error.h"
#include "wrapper/knowhere/vec_index.h"
......@@ -146,6 +147,9 @@ TEST(CacheTest, CPU_CACHE_TEST) {
}
TEST(CacheTest, GPU_CACHE_TEST) {
server::ConfigNode& config = server::ServerConfig::GetInstance().GetConfig(server::CONFIG_CACHE);
config.AddSequenceItem(server::CONFIG_GPU_IDS, "0");
cache::CacheMgr* gpu_mgr = cache::GpuCacheMgr::GetInstance(0);
const int dim = 256;
......
......@@ -38,4 +38,4 @@ target_link_libraries(s3_test
curl
crypto)
install(TARGETS s3_test DESTINATION bin)
\ No newline at end of file
install(TARGETS s3_test DESTINATION unittest)
\ No newline at end of file
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册