未验证 提交 34636638 编写于 作者: B binbin 提交者: GitHub

[skip ci] Update test case. (#5678)

* [skip ci] Update test case.
Signed-off-by: NBinbin Lv <binbin.lv@zilliz.com>

* [skip ci] Refine test case.
Signed-off-by: NBinbin Lv <binbin.lv@zilliz.com>

* [skip ci] Resolve conflicts
Signed-off-by: NBinbin Lv <binbin.lv@zilliz.com>
上级 f6dd2443
......@@ -136,3 +136,34 @@ class TestcaseBase(Base):
check_task=check_task, check_items=check_items,
**kwargs)
return partition_wrap
def init_collection_general(self, prefix, insert_data=False, nb=3000, partition_num=0, is_binary=False):
"""
target: create specified collections
method: 1. create collections (binary/non-binary)
2. create partitions if specified
3. insert specified binary/non-binary data
into each partition if any
expected: return collection and raw data
"""
log.info("Test case of search interface: initialize before test case")
self._connect()
collection_name = cf.gen_unique_str(prefix)
vectors = []
binary_raw_vectors = []
# 1 create collection
if is_binary:
default_schema = cf.gen_default_binary_collection_schema()
else:
default_schema = cf.gen_default_collection_schema()
log.info("init_data: collection creation")
collection_w = self.init_collection_wrap(name=collection_name,
schema=default_schema)
# 2 add extra partitions if specified (default is 1 partition named "_default")
if partition_num > 0:
cf.gen_partitions(collection_w, partition_num)
# 3 insert data if specified
if insert_data:
collection_w, vectors, binary_raw_vectors = cf.insert_data(collection_w, nb, is_binary)
return collection_w, vectors, binary_raw_vectors
......@@ -67,16 +67,26 @@ class ApiCollectionWrapper:
# return res, check_result
def drop(self, check_task=None, check_items=None, **kwargs):
log.info("Dropping collection")
func_name = sys._getframe().f_code.co_name
res, check = api_request([self.collection.drop], **kwargs)
check_result = ResponseChecker(res, func_name, check_task, check_items, check, **kwargs).run()
if check_result:
log.info("Dropped collection")
else:
log.info("Dropping collection failed")
return res, check_result
def load(self, field_names=None, index_names=None, partition_names=None, check_task=None, check_items=None, **kwargs):
log.info("loading data")
func_name = sys._getframe().f_code.co_name
res, check = api_request([self.collection.load, field_names, index_names, partition_names], **kwargs)
check_result = ResponseChecker(res, func_name, check_task, check_items, check, field_names=field_names, index_names=index_names,
partition_names=partition_names, **kwargs).run()
if check_result:
log.info("loaded data")
else:
log.info("loading failed")
return res, check_result
def release(self, check_task=None, check_items=None, **kwargs):
......@@ -135,6 +145,13 @@ class ApiCollectionWrapper:
check_result = ResponseChecker(res, func_name, check_task, check_items, check, partition_name=partition_name, **kwargs).run()
return res, check_result
def create_partition(self, partition_name, check_task=None, check_items=None, **kwargs):
func_name = sys._getframe().f_code.co_name
res, check = api_request([self.collection.create_partition, partition_name], **kwargs)
check_result = ResponseChecker(res, func_name, check_task, check_items, check,
partition_name=partition_name, **kwargs).run()
return res, check_result
@property
def indexes(self, check_task=None, check_items=None):
return self.collection.indexes
......
......@@ -39,6 +39,9 @@ class ResponseChecker:
elif self.check_task == CheckTasks.check_partition_property:
result = self.check_partition_property(self.response, self.func_name, self.check_items)
elif self.check_task == CheckTasks.check_search_results:
result = self.check_search_results(self.response, self.check_items)
# Add check_items here if something new need verify
return result
......@@ -117,3 +120,32 @@ class ResponseChecker:
if check_items.get("num_entities", None):
assert partition.num_entities == check_items["num_entities"]
return True
@staticmethod
def check_search_results(search_res, check_items):
"""
target: check the search results
method: 1. check the query number
2. check the limit(topK)
3. check the distance
expected: check the search is ok
"""
log.info("search_results_check: checking the searching results")
if len(search_res) != check_items["nq"]:
log.error("search_results_check: Numbers of query searched (%d) "
"is not equal with expected (%d)"
% (len(search_res), check_items["nq"]))
assert len(search_res) == check_items["nq"]
else:
log.info("search_results_check: Numbers of query searched is correct")
for hits in search_res:
if len(hits) != check_items["limit"]:
log.error("search_results_check: limit(topK) searched (%d) "
"is not equal with expected (%d)"
% (len(hits), check_items["limit"]))
assert len(hits) == check_items["limit"]
assert len(hits.ids) == check_items["limit"]
else:
log.info("search_results_check: limit (topK) "
"searched for each query is correct")
log.info("search_results_check: search_results_check: checked the searching results")
......@@ -236,7 +236,6 @@ def modify_file(file_path_list, is_modify=False, input_content=""):
f.close()
log.info("[modify_file] file(%s) modification is complete." % file_path_list)
def index_to_dict(index):
return {
"collection_name": index.collection_name,
......@@ -245,6 +244,47 @@ def index_to_dict(index):
"params": index.params
}
def assert_equal_index(index_1, index_2):
return index_to_dict(index_1) == index_to_dict(index_2)
\ No newline at end of file
return index_to_dict(index_1) == index_to_dict(index_2)
def gen_partitions(collection_w, partition_num=1):
"""
target: create extra partitions except for _default
method: create more than one partitions
expected: return collection and raw data
"""
log.info("gen_partitions: creating partitions")
for i in range(partition_num):
partition_name = "search_partition_" + str(i)
collection_w.create_partition(partition_name=partition_name,
description="search partition")
par = collection_w.partitions
assert len(par) == (partition_num + 1)
log.info("gen_partitions: created partitions %s" % par)
def insert_data(collection_w, nb=3000, is_binary=False):
"""
target: insert non-binary/binary data
method: insert non-binary/binary data into partitions if any
expected: return collection and raw data
"""
par = collection_w.partitions
num = len(par)
vectors = []
binary_raw_vectors = []
log.info("insert_data: inserting data into collection %s (num_entities: %s)"
% (collection_w.name, nb))
for i in range(num):
if is_binary:
default_data, binary_raw_data = gen_default_binary_dataframe_data(nb // num)
binary_raw_vectors.extend(binary_raw_data)
else:
default_data = gen_default_dataframe_data(nb // num)
collection_w.insert(default_data, par[i].name)
vectors.extend(default_data)
log.info("insert_data: inserted data into collection %s (num_entities: %s)"
% (collection_w.name, nb))
collection_w.load()
assert collection_w.is_empty == False
assert collection_w.num_entities == nb
return collection_w, vectors, binary_raw_vectors
......@@ -80,6 +80,7 @@ class CheckTasks:
check_list_count = "check_list_count"
check_collection_property = "check_collection_property"
check_partition_property = "check_partition_property"
check_search_results = "check_search_results"
class CaseLabel:
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册