未验证 提交 abd91eab 编写于 作者: T Tinkerrr 提交者: GitHub

Fix HNSW crash (#1262)

* fix
Signed-off-by: Nxiaojun.lin <xiaojun.lin@zilliz.com>

* update.
Signed-off-by: Nxiaojun.lin <xiaojun.lin@zilliz.com>
上级 b62369fa
......@@ -22,6 +22,7 @@ Please mark all change in change log and use the issue from GitHub
- \#1075 - improve error message when page size or offset is illegal
- \#1082 - check page_size or offset value to avoid float
- \#1115 - http server support load table into memory
- \#1211 - Server down caused by searching with index_type: HNSW
- \#1240 - Update license declaration
## Feature
......
......@@ -77,11 +77,14 @@ IndexHNSW::Search(const DatasetPtr& dataset, const Config& config) {
auto p_dist = (float*)malloc(dist_size * rows);
using P = std::pair<float, int64_t>;
auto compare = [](P v1, P v2) { return v1.first < v2.first; };
auto compare = [](P& v1, P& v2) { return v1.first < v2.first; };
#pragma omp parallel for
for (unsigned int i = 0; i < rows; ++i) {
const float* single_query = p_data + i * dim;
std::vector<std::pair<float, int64_t>> ret = index_->searchKnn(single_query, config->k, compare);
while (ret.size() < config->k) {
ret.push_back(std::make_pair(-1, -1));
}
std::vector<float> dist;
std::vector<int64_t> ids;
std::transform(ret.begin(), ret.end(), std::back_inserter(dist),
......
......@@ -268,8 +268,8 @@ HNSWConfAdapter::Match(const TempMetaConf& metaconf) {
conf->d = metaconf.dim;
conf->metric_type = metaconf.metric_type;
conf->ef = 100; // ef can be auto-configured by using sample data.
conf->M = 16; // A reasonable range of M is from 5 to 48.
conf->ef = 200; // ef can be auto-configured by using sample data.
conf->M = 32; // A reasonable range of M is from 5 to 48.
return conf;
}
......
......@@ -77,18 +77,19 @@ INSTANTIATE_TEST_CASE_P(
Values(
//["Index type", "Generator type", "dim", "nb", "nq", "k", "build config", "search config"]
#ifdef MILVUS_GPU_VERSION
std::make_tuple(milvus::engine::IndexType::FAISS_IVFFLAT_GPU, "Default", DIM, NB, 10, 10),
std::make_tuple(milvus::engine::IndexType::FAISS_IVFFLAT_MIX, "Default", 64, 1000, 10, 10),
std::make_tuple(milvus::engine::IndexType::FAISS_IVFSQ8_GPU, "Default", DIM, NB, 10, 10),
std::make_tuple(milvus::engine::IndexType::FAISS_IVFSQ8_MIX, "Default", DIM, NB, 10, 10),
std::make_tuple(milvus::engine::IndexType::FAISS_IVFPQ_MIX, "Default", 64, 1000, 10, 10),
std::make_tuple(milvus::engine::IndexType::FAISS_IVFFLAT_GPU, "Default", DIM, NB, 10, 10),
std::make_tuple(milvus::engine::IndexType::FAISS_IVFFLAT_MIX, "Default", 64, 1000, 10, 10),
std::make_tuple(milvus::engine::IndexType::FAISS_IVFSQ8_GPU, "Default", DIM, NB, 10, 10),
std::make_tuple(milvus::engine::IndexType::FAISS_IVFSQ8_MIX, "Default", DIM, NB, 10, 10),
std::make_tuple(milvus::engine::IndexType::FAISS_IVFPQ_MIX, "Default", 64, 1000, 10, 10),
// std::make_tuple(milvus::engine::IndexType::NSG_MIX, "Default", 128, 250000, 10, 10),
#endif
// std::make_tuple(milvus::engine::IndexType::SPTAG_KDT_RNT_CPU, "Default", 128, 100, 10, 10),
// std::make_tuple(milvus::engine::IndexType::SPTAG_BKT_RNT_CPU, "Default", 128, 100, 10, 10),
std::make_tuple(milvus::engine::IndexType::FAISS_IDMAP, "Default", 64, 1000, 10, 10),
std::make_tuple(milvus::engine::IndexType::FAISS_IVFFLAT_CPU, "Default", 64, 1000, 10, 10),
std::make_tuple(milvus::engine::IndexType::FAISS_IVFSQ8_CPU, "Default", DIM, NB, 10, 10)));
std::make_tuple(milvus::engine::IndexType::HNSW, "Default", 64, 10000, 10, 10),
std::make_tuple(milvus::engine::IndexType::FAISS_IDMAP, "Default", 64, 1000, 10, 10),
std::make_tuple(milvus::engine::IndexType::FAISS_IVFFLAT_CPU, "Default", 64, 1000, 10, 10),
std::make_tuple(milvus::engine::IndexType::FAISS_IVFSQ8_CPU, "Default", DIM, NB, 10, 10)));
#ifdef MILVUS_GPU_VERSION
TEST_P(KnowhereWrapperTest, WRAPPER_EXCEPTION_TEST) {
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册