提交 4a4ba1e8 编写于 作者: P peng.xu

Merge branch 'track_0.6.0' into 0.6.0

......@@ -17,10 +17,12 @@ Please mark all change in change log and use the ticket from JIRA.
# Milvus 0.5.2 (TODO)
## Bug
- \#194 - Search faild: message="Table file doesn't exist"
## Feature
## Improvement
- \#190 - Update default config:use_blas_threshold to 1100 and server version printout to 0.52
## Task
......
......@@ -8,20 +8,16 @@
| ------------- | ------------------------------------------------------------ |
| Apache Arrow | [Apache License 2.0](https://github.com/apache/arrow/blob/master/LICENSE.txt) |
| Boost | [Boost Software License](https://github.com/boostorg/boost/blob/master/LICENSE_1_0.txt) |
| BZip2 | [BZip2](http://www.bzip.org/) |
| FAISS | [MIT](https://github.com/facebookresearch/faiss/blob/master/LICENSE) |
| Gtest | [BSD 3-Clause](https://github.com/google/googletest/blob/master/LICENSE) |
| LAPACK | [LAPACK](https://github.com/Reference-LAPACK/lapack/blob/master/LICENSE) |
| LZ4 | [BSD 2-Clause](https://github.com/Blosc/c-blosc/blob/master/LICENSES/LZ4.txt) |
| MySQLPP | [LGPL 2.1](https://tangentsoft.com/mysqlpp/artifact/b128a66dab867923) |
| OpenBLAS | [BSD 3-Clause](https://github.com/xianyi/OpenBLAS/blob/develop/LICENSE) |
| Prometheus | [Apache License 2.0](https://github.com/prometheus/prometheus/blob/master/LICENSE) |
| Snappy | [BSD](https://github.com/google/snappy/blob/master/COPYING) |
| SQLite | [Public Domain](https://www.sqlite.org/copyright.html) |
| SQLite-ORM | [BSD 3-Clause](https://github.com/fnc12/sqlite_orm/blob/master/LICENSE) |
| yaml-cpp | [MIT](https://github.com/jbeder/yaml-cpp/blob/master/LICENSE) |
| ZLIB | [ZLIB](http://zlib.net/zlib_license.html) |
| ZSTD | [BSD](https://github.com/facebook/zstd/blob/dev/LICENSE) |
| libunwind | [MIT](https://github.com/libunwind/libunwind/blob/master/LICENSE) |
| gperftools | [BSD 3-Clause](https://github.com/gperftools/gperftools/blob/master/COPYING) |
| grpc | [Apache 2.0](https://github.com/grpc/grpc/blob/master/LICENSE) |
......
ruamel.yaml==0.16.5
ruamel.yaml.clib==0.2.0
#!/usr/bin/env python3
import sys
import argparse
from argparse import Namespace
import os, shutil
import getopt
from ruamel.yaml import YAML, yaml_object
from ruamel.yaml.comments import CommentedSeq, CommentedMap
from ruamel.yaml.tokens import CommentToken
##
yaml = YAML(typ="rt")
## format yaml file
yaml.indent(mapping=2, sequence=4, offset=2)
############################################
# Comment operation
#
############################################
def _extract_comment(_comment):
"""
remove '#' at start of comment
"""
# if _comment is empty, do nothing
if not _comment:
return _comment
# str_ = _comment.lstrip(" ")
str_ = _comment.strip()
str_ = str_.lstrip("#")
return str_
def _add_eol_comment(element, *args, **kwargs):
"""
add_eol_comment
args --> (comment, key)
"""
if element is None or \
(not isinstance(element, CommentedMap) and
not isinstance(element, CommentedSeq)) or \
args[0] is None or \
len(args[0]) == 0:
return
comment = args[0]
# comment is empty, do nothing
if not comment:
return
key = args[1]
try:
element.yaml_add_eol_comment(*args, **kwargs)
except Exception:
element.ca.items.pop(key, None)
element.yaml_add_eol_comment(*args, **kwargs)
def _map_comment(_element, _key):
origin_comment = ""
token = _element.ca.items.get(_key, None)
if token is not None:
try:
origin_comment = token[2].value
except Exception:
try:
# comment is below element, add profix "#\n"
col = _element.lc.col + 2
space_list = [" " for i in range(col)]
space_str = "".join(space_list)
origin_comment = "\n" + "".join([space_str + t.value for t in token[3]])
except Exception:
pass
return origin_comment
def _seq_comment(_element, _index):
# get target comment
_comment = ""
token = _element.ca.items.get(_index, None)
if token is not None:
_comment = token[0].value
return _comment
def _start_comment(_element):
_comment = ""
cmt = _element.ca.comment
try:
_comment = cmt[1][0].value
except Exception:
pass
return _comment
def _comment_counter(_comment):
"""
counter comment tips and split into list
"""
x = lambda l: l.strip().strip("#").strip()
_counter = []
if _comment.startswith("\n"):
_counter.append("")
_counter.append(x(_comment[1:]))
return _counter
elif _comment.startswith("#\n"):
_counter.append("")
_counter.append(x(_comment[2:]))
else:
index = _comment.find("\n")
_counter.append(x(_comment[:index]))
_counter.append(x(_comment[index + 1:]))
return _counter
def _obtain_comment(_m_comment, _t_comment):
if not _m_comment or not _t_comment:
return _m_comment or _t_comment
_m_counter = _comment_counter(_m_comment)
_t_counter = _comment_counter(_t_comment)
if not _m_counter[0] and not _t_counter[1]:
comment = _t_comment + _m_comment
elif not _m_counter[1] and not _t_counter[0]:
comment = _m_comment + _t_comment
elif _t_counter[0] and _t_counter[1]:
comment = _t_comment
elif not _t_counter[0] and not _t_counter[1]:
comment = _m_comment
elif not _m_counter[0] and not _m_counter[1]:
comment = _t_comment
else:
if _t_counter[0]:
comment = _m_comment.replace(_m_counter[0], _t_counter[0], 1)
else:
comment = _m_comment.replace(_m_counter[1], _t_counter[1], 1)
i = comment.find("\n\n")
while i >= 0:
comment = comment.replace("\n\n\n", "\n\n", 1)
i = comment.find("\n\n\n")
return comment
############################################
# Utils
#
############################################
def _get_update_par(_args):
_dict = _args.__dict__
# file path
_in_file = _dict.get("f", None) or _dict.get("file", None)
# tips
_tips = _dict.get('tips', None) or "Input \"-h\" for more information"
# update
_u = _dict.get("u", None) or _dict.get("update", None)
# apppend
_a = _dict.get('a', None) or _dict.get('append', None)
# out stream group
_i = _dict.get("i", None) or _dict.get("inplace", None)
_o = _dict.get("o", None) or _dict.get("out_file", None)
return _in_file, _u, _a, _i, _o, _tips
############################################
# Element operation
#
############################################
def update_map_element(element, key, value, comment, _type):
"""
element:
key:
value:
comment:
_type: value type.
"""
if element is None or not isinstance(element, CommentedMap):
print("Only key-value update support")
sys.exit(1)
origin_comment = _map_comment(element, key)
sub_element = element.get(key, None)
if isinstance(sub_element, CommentedMap) or isinstance(sub_element, CommentedSeq):
print("Only support update a single value")
element.update({key: value})
comment = _obtain_comment(origin_comment, comment)
_add_eol_comment(element, _extract_comment(comment), key)
def update_seq_element(element, value, comment, _type):
if element is None or not isinstance(element, CommentedSeq):
print("Param `-a` only use to append yaml list")
sys.exit(1)
element.append(str(value))
comment = _obtain_comment("", comment)
_add_eol_comment(element, _extract_comment(comment), len(element) - 1)
def run_update(code, keys, value, comment, _app):
key_list = keys.split(".")
space_str = ":\n "
key_str = "{}".format(key_list[0])
for key in key_list[1:]:
key_str = key_str + space_str + key
space_str = space_str + " "
if not _app:
yaml_str = """{}: {}""".format(key_str, value)
else:
yaml_str = "{}{}- {}".format(key_str, space_str, value)
if comment:
yaml_str = "{} # {}".format(yaml_str, comment)
mcode = yaml.load(yaml_str)
_merge(code, mcode)
def _update(code, _update, _app, _tips):
if not _update:
return code
_update_list = [l.strip() for l in _update.split(",")]
for l in _update_list:
try:
variant, comment = l.split("#")
except ValueError:
variant = l
comment = None
try:
keys, value = variant.split("=")
run_update(code, keys, value, comment, _app)
except ValueError:
print("Invalid format. print command \"--help\" get more info.")
sys.exit(1)
return code
def _backup(in_file_p):
backup_p = in_file_p + ".bak"
if os.path.exists(backup_p):
os.remove(backup_p)
if not os.path.exists(in_file_p):
print("File {} not exists.".format(in_file_p))
sys.exit(1)
shutil.copyfile(in_file_p, backup_p) # 复制文件
def _recovery(in_file_p):
backup_p = in_file_p + ".bak"
if not os.path.exists(in_file_p):
print("File {} not exists.".format(in_file_p))
sys.exit(1)
elif not os.path.exists(backup_p):
print("Backup file not exists")
sys.exit(0)
os.remove(in_file_p)
os.rename(backup_p, in_file_p)
# master merge target
def _merge(master, target):
if type(master) != type(target):
print("yaml format not match:\n")
yaml.dump(master, sys.stdout)
print("\n&&\n")
yaml.dump(target, sys.stdout)
sys.exit(1)
## item is a sequence
if isinstance(target, CommentedSeq):
for index in range(len(target)):
# get target comment
target_comment = _seq_comment(target, index)
master_index = len(master)
target_item = target[index]
if isinstance(target_item, CommentedMap):
merge_flag = False
for idx in range(len(master)):
if isinstance(master[idx], CommentedMap):
if master[idx].keys() == target_item.keys():
_merge(master[idx], target_item)
# nonlocal merge_flag
master_index = idx
merge_flag = True
break
if merge_flag is False:
master.append(target_item)
elif target_item not in master:
master.append(target[index])
else:
# merge(master[index], target[index])
pass
# # remove enter signal in previous item
previous_comment = _seq_comment(master, master_index - 1)
_add_eol_comment(master, _extract_comment(previous_comment), master_index - 1)
origin_comment = _seq_comment(master, master_index)
comment = _obtain_comment(origin_comment, target_comment)
if len(comment) > 0:
_add_eol_comment(master, _extract_comment(comment) + "\n\n", len(master) - 1)
## item is a map
elif isinstance(target, CommentedMap):
for item in target:
if item == "flag":
print("")
origin_comment = _map_comment(master, item)
target_comment = _map_comment(target, item)
# get origin start comment
origin_start_comment = _start_comment(master)
# get target start comment
target_start_comment = _start_comment(target)
m = master.get(item, default=None)
if m is None or \
(not (isinstance(m, CommentedMap) or
isinstance(m, CommentedSeq))):
master.update({item: target[item]})
else:
_merge(master[item], target[item])
comment = _obtain_comment(origin_comment, target_comment)
if len(comment) > 0:
_add_eol_comment(master, _extract_comment(comment), item)
start_comment = _obtain_comment(origin_start_comment, target_start_comment)
if len(start_comment) > 0:
master.yaml_set_start_comment(_extract_comment(start_comment))
def _save(_code, _file):
with open(_file, 'w') as wf:
yaml.dump(_code, wf)
def _load(_file):
with open(_file, 'r') as rf:
code = yaml.load(rf)
return code
############################################
# sub parser process operation
#
############################################
def merge_yaml(_args):
_dict = _args.__dict__
_m_file = _dict.get("merge_file", None)
_in_file, _u, _a, _i, _o, _tips = _get_update_par(_args)
if not (_in_file and _m_file):
print(_tips)
sys.exit(1)
code = _load(_in_file)
mcode = _load(_m_file)
_merge(code, mcode)
_update(code, _u, _a, _tips)
if _i:
_backup(_in_file)
_save(code, _in_file)
elif _o:
_save(code, _o)
else:
print(_tips)
sys.exit(1)
def update_yaml(_args):
_in_file, _u, _a, _i, _o, _tips = _get_update_par(_args)
if not _in_file or not _u:
print(_tips)
sys.exit(1)
code = _load(_in_file)
if _i and _o:
print(_tips)
sys.exit(1)
_update(code, _u, _a, _tips)
if _i:
_backup(_in_file)
_save(code, _in_file)
elif _o:
_save(code, _o)
def reset(_args):
_dict = _args.__dict__
_f = _dict.get('f', None) or _dict.get('file', None)
if _f:
_recovery(_f)
else:
_t = _dict.get('tips', None) or "Input \"-h\" for more information"
print(_t)
############################################
# Cli operation
#
############################################
def _set_merge_parser(_parsers):
"""
config merge parser
"""
merge_parser = _parsers.add_parser("merge", help="merge with another yaml file")
_set_merge_parser_arg(merge_parser)
_set_update_parser_arg(merge_parser)
merge_parser.set_defaults(
function=merge_yaml,
tips=merge_parser.format_help()
)
def _set_merge_parser_arg(_parser):
"""
config parser argument for merging
"""
_parser.add_argument("-m", "--merge-file", help="indicate merge yaml file")
def _set_update_parser(_parsers):
"""
config merge parser
"""
update_parser = _parsers.add_parser("update", help="update with another yaml file")
_set_update_parser_arg(update_parser)
update_parser.set_defaults(
function=update_yaml,
tips=update_parser.format_help()
)
def _set_update_parser_arg(_parser):
"""
config parser argument for updating
"""
_parser.add_argument("-f", "--file", help="source yaml file")
_parser.add_argument('-u', '--update', help="update with args, instance as \"a.b.c=d# d comment\"")
_parser.add_argument('-a', '--append', action="store_true", help="append to a seq")
group = _parser.add_mutually_exclusive_group()
group.add_argument("-o", "--out-file", help="indicate output yaml file")
group.add_argument("-i", "--inplace", action="store_true", help="indicate whether result store in origin file")
def _set_reset_parser(_parsers):
"""
config merge parser
"""
reset_parser = _parsers.add_parser("reset", help="reset yaml file")
# indicate yaml file
reset_parser.add_argument('-f', '--file', help="indicate input yaml file")
reset_parser.set_defaults(
function=reset,
tips=reset_parser.format_help()
)
def main():
parser = argparse.ArgumentParser()
sub_parsers = parser.add_subparsers()
# set merge command
_set_merge_parser(sub_parsers)
# set update command
_set_update_parser(sub_parsers)
# set reset command
_set_reset_parser(sub_parsers)
# parse argument and run func
args = parser.parse_args()
args.function(args)
if __name__ == '__main__':
main()
timeout(time: 5, unit: 'MINUTES') {
dir ("ci/jenkins/scripts") {
sh "pip3 install -r requirements.txt"
sh "./yaml_processor.py merge -f /opt/milvus/conf/server_config.yaml -m ../yaml/update_server_config.yaml -i && rm /opt/milvus/conf/server_config.yaml.bak"
}
sh "tar -zcvf ./${PROJECT_NAME}-${PACKAGE_VERSION}.tar.gz -C /opt/ milvus"
withCredentials([usernamePassword(credentialsId: "${params.JFROG_CREDENTIALS_ID}", usernameVariable: 'JFROG_USERNAME', passwordVariable: 'JFROG_PASSWORD')]) {
def uploadStatus = sh(returnStatus: true, script: "curl -u${JFROG_USERNAME}:${JFROG_PASSWORD} -T ./${PROJECT_NAME}-${PACKAGE_VERSION}.tar.gz ${params.JFROG_ARTFACTORY_URL}/milvus/package/${PROJECT_NAME}-${PACKAGE_VERSION}.tar.gz")
......
db_config:
primary_path: /opt/milvus
milvus/
conf/server_config.yaml
conf/log_config.conf
src/config.h
version.h
lcov_out/
base.info
......
......@@ -68,8 +68,8 @@ if (MILVUS_VERSION_MAJOR STREQUAL ""
OR MILVUS_VERSION_MINOR STREQUAL ""
OR MILVUS_VERSION_PATCH STREQUAL "")
message(WARNING "Failed to determine Milvus version from git branch name")
set(MILVUS_VERSION "0.5.0")
endif ()
set(MILVUS_VERSION "0.6.0")
endif()
message(STATUS "Build version = ${MILVUS_VERSION}")
configure_file(${CMAKE_CURRENT_SOURCE_DIR}/src/config.h.in ${CMAKE_CURRENT_SOURCE_DIR}/src/config.h @ONLY)
......
......@@ -37,12 +37,12 @@ cache_config:
gpu_cache_threshold: 0.85 # percentage of data that will be kept when cache cleanup is triggered, must be in range (0.0, 1.0]
engine_config:
use_blas_threshold: 20 # if nq < use_blas_threshold, use SSE, faster with fluctuated response times
use_blas_threshold: 1100 # if nq < use_blas_threshold, use SSE, faster with fluctuated response times
# if nq >= use_blas_threshold, use OpenBlas, slower with stable response times
gpu_search_threshold: 1000 # threshold beyond which the search computation is executed on GPUs only
resource_config:
search_resources: # define the CPU / GPUs used for search computation, must be in format: cpu / gpux
search_resources: # define the devices used for search computation, must be in format: cpu or gpux
- cpu
- gpu0
index_build_device: gpu0 # CPU / GPU used for building index, must be in format: cpu / gpux
......@@ -40,12 +40,15 @@ class Cache {
return usage_;
}
// unit: BYTE
int64_t
capacity() const {
return capacity_;
} // unit: BYTE
}
// unit: BYTE
void
set_capacity(int64_t capacity); // unit: BYTE
set_capacity(int64_t capacity);
double
freemem_percent() const {
......@@ -59,16 +62,22 @@ class Cache {
size_t
size() const;
bool
exists(const std::string& key);
ItemObj
get(const std::string& key);
void
insert(const std::string& key, const ItemObj& item);
void
erase(const std::string& key);
void
print();
void
clear();
......
......@@ -53,13 +53,16 @@ class CacheMgr {
int64_t
CacheUsage() const;
int64_t
CacheCapacity() const;
void
SetCapacity(int64_t capacity);
protected:
CacheMgr();
virtual ~CacheMgr();
protected:
......
// Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
#define MILVUS_VERSION "0.5.0"
#define BUILD_TYPE "Debug"
#define BUILD_TIME "2019-11-05 18:49.05"
......@@ -173,6 +173,7 @@ MySQLMetaImpl::~MySQLMetaImpl() {
Status
MySQLMetaImpl::NextTableId(std::string& table_id) {
std::lock_guard<std::mutex> lock(genid_mutex_); // avoid duplicated id
std::stringstream ss;
SimpleIDGenerator g;
ss << g.GetNextIDNumber();
......@@ -182,6 +183,7 @@ MySQLMetaImpl::NextTableId(std::string& table_id) {
Status
MySQLMetaImpl::NextFileId(std::string& file_id) {
std::lock_guard<std::mutex> lock(genid_mutex_); // avoid duplicated id
std::stringstream ss;
SimpleIDGenerator g;
ss << g.GetNextIDNumber();
......
......@@ -136,6 +136,7 @@ class MySQLMetaImpl : public Meta {
std::shared_ptr<MySQLConnectionPool> mysql_connection_pool_;
bool safe_grab_ = false;
std::mutex genid_mutex_;
// std::mutex connectionMutex_;
}; // DBMetaImpl
......
......@@ -91,6 +91,7 @@ SqliteMetaImpl::~SqliteMetaImpl() {
Status
SqliteMetaImpl::NextTableId(std::string& table_id) {
std::lock_guard<std::mutex> lock(genid_mutex_); // avoid duplicated id
std::stringstream ss;
SimpleIDGenerator g;
ss << g.GetNextIDNumber();
......@@ -100,6 +101,7 @@ SqliteMetaImpl::NextTableId(std::string& table_id) {
Status
SqliteMetaImpl::NextFileId(std::string& file_id) {
std::lock_guard<std::mutex> lock(genid_mutex_); // avoid duplicated id
std::stringstream ss;
SimpleIDGenerator g;
ss << g.GetNextIDNumber();
......
......@@ -132,6 +132,7 @@ class SqliteMetaImpl : public Meta {
private:
const DBMetaOptions options_;
std::mutex meta_mutex_;
std::mutex genid_mutex_;
}; // DBMetaImpl
} // namespace meta
......
......@@ -246,12 +246,13 @@ if (CUSTOMIZATION)
# set(FAISS_MD5 "c89ea8e655f5cdf58f42486f13614714") # commit-id 9c28a1cbb88f41fa03b03d7204106201ad33276b branch-0.2.1
# set(FAISS_MD5 "87fdd86351ffcaf3f80dc26ade63c44b") # commit-id 841a156e67e8e22cd8088e1b58c00afbf2efc30b branch-0.2.1
# set(FAISS_MD5 "f3b2ce3364c3fa7febd3aa7fdd0fe380") # commit-id 694e03458e6b69ce8a62502f71f69a614af5af8f branch-0.3.0
set(FAISS_MD5 "bb30722c22390ce5f6759ccb216c1b2a") # commit-id d324db297475286afe107847c7fb7a0f9dc7e90e branch-0.3.0
endif ()
else ()
set(FAISS_SOURCE_URL "https://github.com/milvus-io/faiss/archive/1.6.0.tar.gz")
set(FAISS_MD5 "eb96d84f98b078a9eec04a796f5c792e")
endif ()
# set(FAISS_MD5 "bb30722c22390ce5f6759ccb216c1b2a") # commit-id d324db297475286afe107847c7fb7a0f9dc7e90e branch-0.3.0
set(FAISS_MD5 "2293cdb209c3718e3b19f3edae8b32b3") # commit-id a13c1205dc52977a9ad3b33a14efa958604a8bff branch-0.3.0
endif()
else()
set(FAISS_SOURCE_URL "https://github.com/JinHai-CN/faiss/archive/1.6.0.tar.gz")
set(FAISS_MD5 "b02c1a53234f5acc9bea1b0c55524f50")
endif()
message(STATUS "FAISS URL = ${FAISS_SOURCE_URL}")
if (DEFINED ENV{KNOWHERE_ARROW_URL})
......
......@@ -689,7 +689,7 @@ Config::CheckResourceConfigMode(const std::string& value) {
}
Status
CheckResource(const std::string& value) {
CheckGpuDevice(const std::string& value) {
std::string s = value;
std::transform(s.begin(), s.end(), s.begin(), ::tolower);
#ifdef MILVUS_CPU_VERSION
......@@ -697,22 +697,20 @@ CheckResource(const std::string& value) {
return Status(SERVER_INVALID_ARGUMENT, "Invalid CPU resource: " + s);
}
#else
const std::regex pat("cpu|gpu(\\d+)");
std::smatch m;
if (!std::regex_match(s, m, pat)) {
std::string msg = "Invalid search resource: " + value +
". Possible reason: resource_config.search_resources is not in the format of cpux or gpux";
const std::regex pat("gpu(\\d+)");
std::cmatch m;
if (!std::regex_match(value.c_str(), m, pat)) {
std::string msg = "Invalid gpu device: " + value +
". Possible reason: resource_config.search_resources does not match your hardware.";
return Status(SERVER_INVALID_ARGUMENT, msg);
}
if (s.compare(0, 3, "gpu") == 0) {
int32_t gpu_index = std::stoi(s.substr(3));
int32_t gpu_index = std::stoi(value.substr(3));
if (!ValidationUtil::ValidateGpuIndex(gpu_index).ok()) {
std::string msg = "Invalid search resource: " + value +
std::string msg = "Invalid gpu device: " + value +
". Possible reason: resource_config.search_resources does not match your hardware.";
return Status(SERVER_INVALID_ARGUMENT, msg);
}
}
#endif
return Status::OK();
}
......@@ -726,20 +724,38 @@ Config::CheckResourceConfigSearchResources(const std::vector<std::string>& value
return Status(SERVER_INVALID_ARGUMENT, msg);
}
for (auto& resource : value) {
auto status = CheckResource(resource);
if (!status.ok()) {
return Status(SERVER_INVALID_ARGUMENT, status.message());
bool cpu_found = false, gpu_found = false;
for (auto& device : value) {
if (device == "cpu") {
cpu_found = true;
continue;
}
if (CheckGpuDevice(device).ok()) {
gpu_found = true;
} else {
std::string msg = "Invalid search resource: " + device +
". Possible reason: resource_config.search_resources does not match your hardware.";
return Status(SERVER_INVALID_ARGUMENT, msg);
}
}
if (cpu_found && !gpu_found) {
std::string msg =
"Invalid search resource. Possible reason: resource_config.search_resources has only CPU resource.";
return Status(SERVER_INVALID_ARGUMENT, msg);
}
return Status::OK();
}
Status
Config::CheckResourceConfigIndexBuildDevice(const std::string& value) {
auto status = CheckResource(value);
if (!status.ok()) {
return Status(SERVER_INVALID_ARGUMENT, status.message());
// if (value == "cpu") {
// return Status::OK();
// }
if (!CheckGpuDevice(value).ok()) {
std::string msg = "Invalid index build device: " + value +
". Possible reason: resource_config.index_build_device does not match your hardware.";
return Status(SERVER_INVALID_ARGUMENT, msg);
}
return Status::OK();
}
......
......@@ -185,7 +185,7 @@ TEST_F(MySqlMetaTest, ARCHIVE_TEST_DISK) {
options.archive_conf_ = milvus::engine::ArchiveConf("delete", "disk:11");
int mode = milvus::engine::DBOptions::MODE::SINGLE;
auto impl = milvus::engine::meta::MySQLMetaImpl(options, mode);
milvus::engine::meta::MySQLMetaImpl impl(options, mode);
auto table_id = "meta_test_group";
milvus::engine::meta::TableSchema table;
......
......@@ -9,7 +9,7 @@ RUN apt-get update && apt-get install -y --no-install-recommends wget && \
sh -c 'echo deb https://apt.repos.intel.com/mkl all main > /etc/apt/sources.list.d/intel-mkl.list' && \
apt-get update && apt-get install -y --no-install-recommends \
git flex bison gfortran lsb-core \
curl libtool automake libboost1.58-all-dev libssl-dev pkg-config libcurl4-openssl-dev \
curl libtool automake libboost1.58-all-dev libssl-dev pkg-config libcurl4-openssl-dev python3-pip \
clang-format-6.0 clang-tidy-6.0 \
lcov mysql-client libmysqlclient-dev intel-mkl-gnu-2019.4-243 intel-mkl-core-2019.4-243 && \
apt-get remove --purge -y && \
......
......@@ -9,7 +9,7 @@ RUN apt-get update && apt-get install -y --no-install-recommends wget && \
sh -c 'echo deb https://apt.repos.intel.com/mkl all main > /etc/apt/sources.list.d/intel-mkl.list' && \
apt-get update && apt-get install -y --no-install-recommends \
git flex bison gfortran lsb-core \
curl libtool automake libboost-all-dev libssl-dev pkg-config libcurl4-openssl-dev \
curl libtool automake libboost-all-dev libssl-dev pkg-config libcurl4-openssl-dev python3-pip \
clang-format-6.0 clang-tidy-6.0 \
lcov mysql-client libmysqlclient-dev intel-mkl-gnu-2019.4-243 intel-mkl-core-2019.4-243 && \
apt-get remove --purge -y && \
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册