未验证 提交 69eeb4df 编写于 作者: D del-zhenwu 提交者: GitHub

rename interfaces in cases (#2298)

Signed-off-by: Nzw <zw@zilliz.com>
上级 15a603fa
......@@ -108,7 +108,7 @@ def collection(request, connect):
pytest.exit("collection can not be created, exit pytest ...")
def teardown():
status, collection_names = connect.show_collections()
status, collection_names = connect.list_collections()
for collection_name in collection_names:
connect.drop_collection(collection_name)
......@@ -134,7 +134,7 @@ def ip_collection(request, connect):
pytest.exit("collection can not be created, exit pytest ...")
def teardown():
status, collection_names = connect.show_collections()
status, collection_names = connect.list_collections()
for collection_name in collection_names:
connect.drop_collection(collection_name)
......@@ -160,7 +160,7 @@ def jac_collection(request, connect):
pytest.exit("collection can not be created, exit pytest ...")
def teardown():
status, collection_names = connect.show_collections()
status, collection_names = connect.list_collections()
for collection_name in collection_names:
connect.drop_collection(collection_name)
......@@ -185,7 +185,7 @@ def ham_collection(request, connect):
pytest.exit("collection can not be created, exit pytest ...")
def teardown():
status, collection_names = connect.show_collections()
status, collection_names = connect.list_collections()
for collection_name in collection_names:
connect.drop_collection(collection_name)
......@@ -210,7 +210,7 @@ def tanimoto_collection(request, connect):
pytest.exit("collection can not be created, exit pytest ...")
def teardown():
status, collection_names = connect.show_collections()
status, collection_names = connect.list_collections()
for collection_name in collection_names:
connect.drop_collection(collection_name)
......@@ -234,7 +234,7 @@ def substructure_collection(request, connect):
pytest.exit("collection can not be created, exit pytest ...")
def teardown():
status, collection_names = connect.show_collections()
status, collection_names = connect.list_collections()
for collection_name in collection_names:
connect.drop_collection(collection_name)
......@@ -258,7 +258,7 @@ def superstructure_collection(request, connect):
pytest.exit("collection can not be created, exit pytest ...")
def teardown():
status, collection_names = connect.show_collections()
status, collection_names = connect.list_collections()
for collection_name in collection_names:
connect.drop_collection(collection_name)
......
......@@ -615,7 +615,7 @@ class TestAddBase:
for p in processes:
p.join()
time.sleep(2)
status, count = milvus.count_collection(collection)
status, count = milvus.count_entities(collection)
assert count == process_num * loop_num
@pytest.mark.level(2)
......@@ -624,7 +624,7 @@ class TestAddBase:
'''
target: test collection rows_count is correct or not with multi threading
method: create collection and add vectors in it(idmap),
assert the value returned by count_collection method is equal to length of vectors
assert the value returned by count_entities method is equal to length of vectors
expected: the count is equal to the length of vectors
'''
if args["handler"] == "HTTP":
......@@ -652,7 +652,7 @@ class TestAddBase:
x.start()
for th in threads:
th.join()
status, res = milvus.count_collection(collection)
status, res = milvus.count_entities(collection)
assert res == thread_num * nb
def test_add_vector_multi_collections(self, connect):
......@@ -755,7 +755,7 @@ class TestAddAsync:
assert status.OK()
assert len(result) == nb
connect.flush([collection])
status, count = connect.count_collection(collection)
status, count = connect.count_entities(collection)
assert status.OK()
logging.getLogger().info(status)
logging.getLogger().info(count)
......
......@@ -42,7 +42,7 @@ class TestCollectionInfoBase:
'''
collection_name = None
with pytest.raises(Exception) as e:
status, info = connect.collection_info(collection_name)
status, info = connect.get_collection_stats(collection_name)
@pytest.mark.timeout(INFO_TIMEOUT)
def test_get_collection_info_name_not_existed(self, connect, collection):
......@@ -52,7 +52,7 @@ class TestCollectionInfoBase:
expected: status not ok
'''
collection_name = gen_unique_str("not_existed_collection")
status, info = connect.collection_info(collection_name)
status, info = connect.get_collection_stats(collection_name)
assert not status.OK()
@pytest.fixture(
......@@ -70,7 +70,7 @@ class TestCollectionInfoBase:
expected: status not ok
'''
collection_name = get_collection_name
status, info = connect.collection_info(collection_name)
status, info = connect.get_collection_stats(collection_name)
assert not status.OK()
@pytest.mark.timeout(INFO_TIMEOUT)
......@@ -85,16 +85,16 @@ class TestCollectionInfoBase:
assert status.OK()
status = connect.flush([collection])
assert status.OK()
status, info = connect.collection_info(collection)
status, info = connect.get_collection_stats(collection)
assert status.OK()
assert info["row_count"] == nb
# delete a few vectors
delete_ids = [ids[0], ids[-1]]
status = connect.delete_by_id(collection, delete_ids)
status = connect.delete_entity_by_id(collection, delete_ids)
assert status.OK()
status = connect.flush([collection])
assert status.OK()
status, info = connect.collection_info(collection)
status, info = connect.get_collection_stats(collection)
assert status.OK()
assert info["row_count"] == nb - 2
......@@ -110,7 +110,7 @@ class TestCollectionInfoBase:
assert status.OK()
status = connect.flush([collection])
assert status.OK()
status, info = connect.collection_info(collection)
status, info = connect.get_collection_stats(collection)
assert status.OK()
logging.getLogger().info(info)
assert len(info["partitions"]) == 1
......@@ -131,7 +131,7 @@ class TestCollectionInfoBase:
assert status.OK()
status = connect.flush([collection])
assert status.OK()
status, info = connect.collection_info(collection)
status, info = connect.get_collection_stats(collection)
assert status.OK()
logging.getLogger().info(info)
assert len(info["partitions"]) == 2
......@@ -155,7 +155,7 @@ class TestCollectionInfoBase:
assert status.OK()
status = connect.flush([collection])
assert status.OK()
status, info = connect.collection_info(collection)
status, info = connect.get_collection_stats(collection)
assert status.OK()
logging.getLogger().info(info)
for partition in info["partitions"]:
......@@ -183,7 +183,7 @@ class TestCollectionInfoBase:
assert status.OK()
status = connect.flush([collection])
assert status.OK()
status, info = connect.collection_info(collection)
status, info = connect.get_collection_stats(collection)
assert status.OK()
assert info["row_count"] == nb * 2
for partition in info["partitions"]:
......@@ -222,7 +222,7 @@ class TestCollectionInfoBase:
assert status.OK()
status = connect.flush([collection])
assert status.OK()
status, info = connect.collection_info(collection)
status, info = connect.get_collection_stats(collection)
assert status.OK()
logging.getLogger().info(info)
index_string = info["partitions"][0]["segments"][0]["index_name"]
......@@ -248,7 +248,7 @@ class TestCollectionInfoBase:
status = connect.create_index(collection, index_type, index_param)
status = connect.create_index(collection, index_type, index_param)
assert status.OK()
status, info = connect.collection_info(collection)
status, info = connect.get_collection_stats(collection)
assert status.OK()
logging.getLogger().info(info)
index_string = info["partitions"][0]["segments"][0]["index_name"]
......@@ -272,7 +272,7 @@ class TestCollectionInfoBase:
for index_type in [IndexType.FLAT, IndexType.IVFLAT, IndexType.IVF_SQ8]:
status = connect.create_index(collection, index_type, index_param)
assert status.OK()
status, info = connect.collection_info(collection)
status, info = connect.get_collection_stats(collection)
assert status.OK()
logging.getLogger().info(info)
index_string = info["partitions"][0]["segments"][0]["index_name"]
......
......@@ -208,7 +208,7 @@ class TestConnect:
b. data_set not too large incase disconnection happens when data is underd-preparing
c. data_set not too small incase disconnection happens when data has already been transferred
d. make sure disconnection happens when data is in-transport
Expected: Failure, count_collection == 0
Expected: Failure, count_entities == 0
'''
pass
......
......@@ -22,7 +22,7 @@ nb = 6000
class TestDeleteBase:
"""
******************************************************************
The following cases are used to test `delete_by_id` function
The following cases are used to test `delete_entity_by_id` function
******************************************************************
"""
......@@ -52,7 +52,7 @@ class TestDeleteBase:
assert status.OK()
status = connect.flush([collection])
assert status.OK()
status = connect.delete_by_id(collection, ids)
status = connect.delete_entity_by_id(collection, ids)
assert status.OK()
status = connect.flush([collection])
search_param = get_search_param(index_type)
......@@ -76,7 +76,7 @@ class TestDeleteBase:
status = connect.flush([collection])
# Bloom filter error
assert status.OK()
status = connect.delete_by_id(collection, [1])
status = connect.delete_entity_by_id(collection, [1])
assert status.OK()
status = connect.flush([collection])
search_param = get_search_param(index_type)
......@@ -96,10 +96,10 @@ class TestDeleteBase:
assert status.OK()
status = connect.flush([collection])
assert status.OK()
status = connect.delete_by_id(collection, ids)
status = connect.delete_entity_by_id(collection, ids)
assert status.OK()
status = connect.flush([collection])
status, res = connect.count_collection(collection)
status, res = connect.count_entities(collection)
assert status.OK()
assert res == 0
......@@ -116,10 +116,10 @@ class TestDeleteBase:
assert status.OK()
status = connect.flush([collection])
assert status.OK()
status = connect.delete_by_id(collection, ids)
status = connect.delete_entity_by_id(collection, ids)
assert status.OK()
time.sleep(2)
status, res = connect.count_collection(collection)
status, res = connect.count_entities(collection)
assert status.OK()
assert res == 0
......@@ -136,7 +136,7 @@ class TestDeleteBase:
assert status.OK()
status = connect.flush([collection])
assert status.OK()
status = connect.delete_by_id(collection, [0])
status = connect.delete_entity_by_id(collection, [0])
assert status.OK()
status = connect.flush([collection])
search_param = get_search_param(index_type)
......@@ -156,7 +156,7 @@ class TestDeleteBase:
status = connect.flush([collection])
assert status.OK()
collection_new = gen_unique_str()
status = connect.delete_by_id(collection_new, [0])
status = connect.delete_entity_by_id(collection_new, [0])
assert not status.OK()
def test_add_vectors_delete_vector(self, connect, collection, get_simple_index):
......@@ -173,7 +173,7 @@ class TestDeleteBase:
assert status.OK()
delete_ids = [ids[0], ids[-1]]
query_vecs = [vectors[0], vectors[1], vectors[-1]]
status = connect.delete_by_id(collection, delete_ids)
status = connect.delete_entity_by_id(collection, delete_ids)
assert status.OK()
status = connect.flush([collection])
search_param = get_search_param(index_type)
......@@ -199,7 +199,7 @@ class TestDeleteBase:
assert status.OK()
delete_ids = [ids[0], ids[-1]]
query_vecs = [vectors[0], vectors[1], vectors[-1]]
status = connect.delete_by_id(collection, delete_ids)
status = connect.delete_entity_by_id(collection, delete_ids)
assert status.OK()
status = connect.flush([collection])
status = connect.create_index(collection, index_type, index_param)
......@@ -231,7 +231,7 @@ class TestDeleteBase:
assert status.OK()
delete_ids = [ids[0], ids[-1]]
query_vecs = [vectors[0], vectors[1], vectors[-1]]
status = connect.delete_by_id(collection, delete_ids)
status = connect.delete_entity_by_id(collection, delete_ids)
assert status.OK()
status = connect.flush([collection])
status, tmp_ids = connect.add_vectors(collection, [vectors[0], vectors[-1]])
......@@ -259,11 +259,11 @@ class TestDeleteBase:
assert status.OK()
delete_ids = [ids[0], ids[-1]]
query_vecs = [vectors[0], vectors[1], vectors[-1]]
status = connect.delete_by_id(collection, delete_ids)
status = connect.delete_entity_by_id(collection, delete_ids)
assert status.OK()
status = connect.flush([collection])
for i in range(10):
status = connect.delete_by_id(collection, delete_ids)
status = connect.delete_entity_by_id(collection, delete_ids)
assert status.OK()
def test_delete_no_flush_multiable_times(self, connect, collection):
......@@ -278,10 +278,10 @@ class TestDeleteBase:
assert status.OK()
delete_ids = [ids[0], ids[-1]]
query_vecs = [vectors[0], vectors[1], vectors[-1]]
status = connect.delete_by_id(collection, delete_ids)
status = connect.delete_entity_by_id(collection, delete_ids)
assert status.OK()
for i in range(10):
status = connect.delete_by_id(collection, delete_ids)
status = connect.delete_entity_by_id(collection, delete_ids)
assert status.OK()
assert status.OK()
......@@ -289,7 +289,7 @@ class TestDeleteBase:
class TestDeleteIndexedVectors:
"""
******************************************************************
The following cases are used to test `delete_by_id` function
The following cases are used to test `delete_entity_by_id` function
******************************************************************
"""
@pytest.fixture(
......@@ -320,7 +320,7 @@ class TestDeleteIndexedVectors:
assert status.OK()
status = connect.create_index(collection, index_type, index_param)
assert status.OK()
status = connect.delete_by_id(collection, ids)
status = connect.delete_entity_by_id(collection, ids)
assert status.OK()
status = connect.flush([collection])
search_param = get_search_param(index_type)
......@@ -347,7 +347,7 @@ class TestDeleteIndexedVectors:
assert status.OK()
delete_ids = [ids[0], ids[-1]]
query_vecs = [vectors[0], vectors[1], vectors[-1]]
status = connect.delete_by_id(collection, delete_ids)
status = connect.delete_entity_by_id(collection, delete_ids)
assert status.OK()
status = connect.flush([collection])
search_param = get_search_param(index_type)
......@@ -365,7 +365,7 @@ class TestDeleteIndexedVectors:
class TestDeleteBinary:
"""
******************************************************************
The following cases are used to test `delete_by_id` function
The following cases are used to test `delete_entity_by_id` function
******************************************************************
"""
@pytest.fixture(
......@@ -392,7 +392,7 @@ class TestDeleteBinary:
assert status.OK()
status = connect.flush([jac_collection])
assert status.OK()
status = connect.delete_by_id(jac_collection, ids)
status = connect.delete_entity_by_id(jac_collection, ids)
assert status.OK()
status = connect.flush([jac_collection])
search_param = get_search_param(index_type)
......@@ -415,10 +415,10 @@ class TestDeleteBinary:
assert status.OK()
status = connect.flush([jac_collection])
assert status.OK()
status = connect.delete_by_id(jac_collection, ids)
status = connect.delete_entity_by_id(jac_collection, ids)
assert status.OK()
status = connect.flush([jac_collection])
status, res = connect.count_collection(jac_collection)
status, res = connect.count_entities(jac_collection)
assert status.OK()
assert res == 0
......@@ -435,7 +435,7 @@ class TestDeleteBinary:
assert status.OK()
status = connect.flush([jac_collection])
assert status.OK()
status = connect.delete_by_id(jac_collection, [0])
status = connect.delete_entity_by_id(jac_collection, [0])
assert status.OK()
status = connect.flush([jac_collection])
status = connect.flush([jac_collection])
......@@ -456,9 +456,9 @@ class TestDeleteBinary:
status = connect.flush([jac_collection])
assert status.OK()
collection_new = gen_unique_str()
status = connect.delete_by_id(collection_new, [0])
status = connect.delete_entity_by_id(collection_new, [0])
collection_new = gen_unique_str()
status = connect.delete_by_id(collection_new, [0])
status = connect.delete_entity_by_id(collection_new, [0])
assert not status.OK()
def test_add_vectors_delete_vector(self, connect, jac_collection, get_simple_index):
......@@ -475,7 +475,7 @@ class TestDeleteBinary:
assert status.OK()
delete_ids = [ids[0], ids[-1]]
query_vecs = [vectors[0], vectors[1], vectors[-1]]
status = connect.delete_by_id(jac_collection, delete_ids)
status = connect.delete_entity_by_id(jac_collection, delete_ids)
assert status.OK()
status = connect.flush([jac_collection])
search_param = get_search_param(index_type)
......@@ -500,7 +500,7 @@ class TestDeleteBinary:
assert status.OK()
delete_ids = [ids[0], ids[-1]]
query_vecs = [vectors[0], vectors[1], vectors[-1]]
status = connect.delete_by_id(jac_collection, delete_ids)
status = connect.delete_entity_by_id(jac_collection, delete_ids)
assert status.OK()
status = connect.flush([jac_collection])
status, tmp_ids = connect.add_vectors(jac_collection, [vectors[0], vectors[-1]])
......@@ -533,13 +533,13 @@ class TestDeleteIdsIngalid(object):
def test_delete_vector_id_invalid(self, connect, collection, gen_invalid_id):
invalid_id = gen_invalid_id
with pytest.raises(Exception) as e:
status = connect.delete_by_id(collection, [invalid_id])
status = connect.delete_entity_by_id(collection, [invalid_id])
@pytest.mark.level(2)
def test_delete_vector_ids_invalid(self, connect, collection, gen_invalid_id):
invalid_id = gen_invalid_id
with pytest.raises(Exception) as e:
status = connect.delete_by_id(collection, [1, invalid_id])
status = connect.delete_entity_by_id(collection, [1, invalid_id])
class TestCollectionNameInvalid(object):
......@@ -556,6 +556,6 @@ class TestCollectionNameInvalid(object):
@pytest.mark.level(2)
def test_delete_vectors_with_invalid_collection_name(self, connect, get_collection_name):
collection_name = get_collection_name
status = connect.delete_by_id(collection_name, [1])
status = connect.delete_entity_by_id(collection_name, [1])
assert not status.OK()
......@@ -65,13 +65,13 @@ class TestFlushBase:
ids = [i for i in range(nb)]
status, ids = connect.insert(collection, vectors, ids)
status = connect.flush([collection])
result, res = connect.count_collection(collection)
result, res = connect.count_entities(collection)
assert res == nb
status, ids = connect.insert(collection, vectors, ids, partition_tag=tag)
assert status.OK()
status = connect.flush([collection])
assert status.OK()
result, res = connect.count_collection(collection)
result, res = connect.count_entities(collection)
assert res == 2 * nb
def test_add_partitions_flush(self, connect, collection):
......@@ -91,7 +91,7 @@ class TestFlushBase:
assert status.OK()
status = connect.flush([collection])
assert status.OK()
result, res = connect.count_collection(collection)
result, res = connect.count_entities(collection)
assert res == 2 * nb
def test_add_collections_flush(self, connect, collection):
......@@ -116,9 +116,9 @@ class TestFlushBase:
status = connect.flush([collection])
status = connect.flush([collection_new])
assert status.OK()
result, res = connect.count_collection(collection)
result, res = connect.count_entities(collection)
assert res == nb
result, res = connect.count_collection(collection_new)
result, res = connect.count_entities(collection_new)
assert res == nb
def test_add_flush_multiable_times(self, connect, collection):
......@@ -150,7 +150,7 @@ class TestFlushBase:
start_time = time.time()
while (time.time()-start_time < timeout):
time.sleep(1)
status, res = connect.count_collection(collection)
status, res = connect.count_entities(collection)
if res == nb:
assert status.OK()
break
......@@ -180,7 +180,7 @@ class TestFlushBase:
status, ids = connect.add_vectors(collection, vectors, ids)
status = connect.flush([collection])
assert status.OK()
status, res = connect.count_collection(collection)
status, res = connect.count_entities(collection)
assert status.OK()
assert res == nb
......@@ -192,7 +192,7 @@ class TestFlushBase:
vectors = gen_vectors(nb, dim)
status, ids = connect.add_vectors(collection, vectors)
assert status.OK()
status = connect.delete_by_id(collection, [ids[-1]])
status = connect.delete_entity_by_id(collection, [ids[-1]])
assert status.OK()
for i in range(10):
status = connect.flush([collection])
......@@ -204,7 +204,7 @@ class TestFlushBase:
# TODO: CI fail, LOCAL pass
def _test_collection_count_during_flush(self, connect, args):
'''
method: flush collection at background, call `count_collection`
method: flush collection at background, call `count_entities`
expected: status ok
'''
collection = gen_unique_str()
......@@ -218,16 +218,16 @@ class TestFlushBase:
status, ids = milvus.add_vectors(collection, vectors, ids=[i for i in range(nb)])
def flush(collection_name):
milvus = get_milvus(args["ip"], args["port"], handler=args["handler"])
status = milvus.delete_by_id(collection_name, [i for i in range(nb)])
status = milvus.delete_entity_by_id(collection_name, [i for i in range(nb)])
assert status.OK()
status = milvus.flush([collection_name])
assert status.OK()
p = Process(target=flush, args=(collection, ))
p.start()
status, res = milvus.count_collection(collection)
status, res = milvus.count_entities(collection)
assert status.OK()
p.join()
status, res = milvus.count_collection(collection)
status, res = milvus.count_entities(collection)
assert status.OK()
logging.getLogger().info(res)
assert res == 0
......
......@@ -11,7 +11,7 @@ from utils import *
dim = 128
index_file_size = 10
collection_id = "get_vectors_by_ids"
collection_id = "get_entity_by_id"
DELETE_TIMEOUT = 60
nprobe = 1
tag = "1970-01-01"
......@@ -22,12 +22,12 @@ tag = "tag"
class TestGetBase:
"""
******************************************************************
The following cases are used to test .get_vectors_by_ids` function
The following cases are used to test .get_entity_by_id` function
******************************************************************
"""
def test_get_vector_A(self, connect, collection):
'''
target: test.get_vectors_by_ids
target: test.get_entity_by_id
method: add vector, and get
expected: status ok, vector returned
'''
......@@ -36,13 +36,13 @@ class TestGetBase:
assert status.OK()
status = connect.flush([collection])
assert status.OK()
status, res = connect.get_vectors_by_ids(collection, ids)
status, res = connect.get_entity_by_id(collection, ids)
assert status.OK()
assert_equal_vector(res[0], vector[0])
def test_get_vector_B(self, connect, collection):
'''
target: test.get_vectors_by_ids
target: test.get_entity_by_id
method: add vector, and get
expected: status ok, vector returned
'''
......@@ -52,14 +52,14 @@ class TestGetBase:
status = connect.flush([collection])
assert status.OK()
length = 100
status, res = connect.get_vectors_by_ids(collection, ids[:length])
status, res = connect.get_entity_by_id(collection, ids[:length])
assert status.OK()
for i in range(length):
assert_equal_vector(res[i], vectors[i])
def test_get_vector_C_limit(self, connect, collection):
'''
target: test.get_vectors_by_ids
target: test.get_entity_by_id
method: add vector, and get, limit > 1000
expected: status ok, vector returned
'''
......@@ -68,12 +68,12 @@ class TestGetBase:
assert status.OK()
status = connect.flush([collection])
assert status.OK()
status, res = connect.get_vectors_by_ids(collection, ids)
status, res = connect.get_entity_by_id(collection, ids)
assert not status.OK()
def test_get_vector_partition(self, connect, collection):
'''
target: test.get_vectors_by_ids
target: test.get_entity_by_id
method: add vector, and get
expected: status ok, vector returned
'''
......@@ -85,14 +85,14 @@ class TestGetBase:
status = connect.flush([collection])
assert status.OK()
length = 100
status, res = connect.get_vectors_by_ids(collection, ids[:length])
status, res = connect.get_entity_by_id(collection, ids[:length])
assert status.OK()
for i in range(length):
assert_equal_vector(res[i], vectors[i])
def test_get_vector_multi_same_ids(self, connect, collection):
'''
target: test.get_vectors_by_ids
target: test.get_entity_by_id
method: add vectors, with the same id, get vector by the given id
expected: status ok, get one vector
'''
......@@ -102,7 +102,7 @@ class TestGetBase:
status, ids = connect.add_vectors(collection, vectors, ids=ids)
status = connect.flush([collection])
assert status.OK()
status, res = connect.get_vectors_by_ids(collection, [0])
status, res = connect.get_entity_by_id(collection, [0])
assert status.OK()
assert_equal_vector(res[0], vectors[0])
......@@ -121,7 +121,7 @@ class TestGetBase:
def test_get_vector_after_delete(self, connect, collection, get_id):
'''
target: test.get_vectors_by_ids
target: test.get_entity_by_id
method: add vectors, and delete, get vector by the given id
expected: status ok, get one vector
'''
......@@ -131,17 +131,17 @@ class TestGetBase:
status = connect.flush([collection])
assert status.OK()
id = get_id
status = connect.delete_by_id(collection, [ids[id]])
status = connect.delete_entity_by_id(collection, [ids[id]])
assert status.OK()
status = connect.flush([collection])
assert status.OK()
status, res = connect.get_vectors_by_ids(collection, [ids[id]])
status, res = connect.get_entity_by_id(collection, [ids[id]])
assert status.OK()
assert not len(res[0])
def test_get_vector_after_delete_with_partition(self, connect, collection, get_id):
'''
target: test.get_vectors_by_ids
target: test.get_entity_by_id
method: add vectors into partition, and delete, get vector by the given id
expected: status ok, get one vector
'''
......@@ -152,11 +152,11 @@ class TestGetBase:
status = connect.flush([collection])
assert status.OK()
id = get_id
status = connect.delete_by_id(collection, [ids[id]])
status = connect.delete_entity_by_id(collection, [ids[id]])
assert status.OK()
status = connect.flush([collection])
assert status.OK()
status, res = connect.get_vectors_by_ids(collection, [ids[id]])
status, res = connect.get_entity_by_id(collection, [ids[id]])
assert status.OK()
assert not len(res[0])
......@@ -171,7 +171,7 @@ class TestGetBase:
assert status.OK()
status = connect.flush([collection])
assert status.OK()
status, res = connect.get_vectors_by_ids(collection, [1])
status, res = connect.get_entity_by_id(collection, [1])
assert status.OK()
assert not len(res[0])
......@@ -187,14 +187,14 @@ class TestGetBase:
status = connect.flush([collection])
assert status.OK()
collection_new = gen_unique_str()
status, res = connect.get_vectors_by_ids(collection_new, [1])
status, res = connect.get_entity_by_id(collection_new, [1])
assert not status.OK()
class TestGetIndexedVectors:
"""
******************************************************************
The following cases are used to test .get_vectors_by_ids` function
The following cases are used to test .get_entity_by_id` function
******************************************************************
"""
@pytest.fixture(
......@@ -240,13 +240,13 @@ class TestGetIndexedVectors:
status = connect.create_index(collection, index_type, index_param)
assert status.OK()
id = get_id
status, res = connect.get_vectors_by_ids(collection, [ids[id]])
status, res = connect.get_entity_by_id(collection, [ids[id]])
assert status.OK()
assert_equal_vector(res[0], vectors[id])
def test_get_vector_after_delete(self, connect, collection, get_simple_index, get_id):
'''
target: test.get_vectors_by_ids
target: test.get_entity_by_id
method: add vectors, and delete, get vector by the given id
expected: status ok, get one vector
'''
......@@ -260,17 +260,17 @@ class TestGetIndexedVectors:
status = connect.create_index(collection, index_type, index_param)
assert status.OK()
id = get_id
status = connect.delete_by_id(collection, [ids[id]])
status = connect.delete_entity_by_id(collection, [ids[id]])
assert status.OK()
status = connect.flush([collection])
assert status.OK()
status, res = connect.get_vectors_by_ids(collection, [ids[id]])
status, res = connect.get_entity_by_id(collection, [ids[id]])
assert status.OK()
assert not len(res[0])
def test_get_vector_partition(self, connect, collection, get_simple_index, get_id):
'''
target: test.get_vectors_by_ids
target: test.get_entity_by_id
method: add vector, and get
expected: status ok, vector returned
'''
......@@ -286,7 +286,7 @@ class TestGetIndexedVectors:
status = connect.create_index(collection, index_type, index_param)
assert status.OK()
id = get_id
status, res = connect.get_vectors_by_ids(collection, [ids[id]])
status, res = connect.get_entity_by_id(collection, [ids[id]])
assert status.OK()
assert_equal_vector(res[0], vectors[id])
......@@ -294,12 +294,12 @@ class TestGetIndexedVectors:
class TestGetBinary:
"""
******************************************************************
The following cases are used to test .get_vectors_by_ids` function
The following cases are used to test .get_entity_by_id` function
******************************************************************
"""
def test_get_vector_A(self, connect, jac_collection):
'''
target: test.get_vectors_by_ids
target: test.get_entity_by_id
method: add vector, and get
expected: status ok, vector returned
'''
......@@ -308,13 +308,13 @@ class TestGetBinary:
assert status.OK()
status = connect.flush([jac_collection])
assert status.OK()
status, res = connect.get_vectors_by_ids(jac_collection, [ids[0]])
status, res = connect.get_entity_by_id(jac_collection, [ids[0]])
assert status.OK()
assert_equal_vector(res[0], vector[0])
def test_get_vector_B(self, connect, jac_collection):
'''
target: test.get_vectors_by_ids
target: test.get_entity_by_id
method: add vector, and get
expected: status ok, vector returned
'''
......@@ -323,13 +323,13 @@ class TestGetBinary:
assert status.OK()
status = connect.flush([jac_collection])
assert status.OK()
status, res = connect.get_vectors_by_ids(jac_collection, [ids[0]])
status, res = connect.get_entity_by_id(jac_collection, [ids[0]])
assert status.OK()
assert_equal_vector(res[0], vectors[0])
def test_get_vector_multi_same_ids(self, connect, jac_collection):
'''
target: test.get_vectors_by_ids
target: test.get_entity_by_id
method: add vectors, with the same id, get vector by the given id
expected: status ok, get one vector
'''
......@@ -339,7 +339,7 @@ class TestGetBinary:
status, ids = connect.add_vectors(jac_collection, vectors, ids=ids)
status = connect.flush([jac_collection])
assert status.OK()
status, res = connect.get_vectors_by_ids(jac_collection, [0])
status, res = connect.get_entity_by_id(jac_collection, [0])
assert status.OK()
assert_equal_vector(res[0], vectors[0])
......@@ -354,7 +354,7 @@ class TestGetBinary:
assert status.OK()
status = connect.flush([jac_collection])
assert status.OK()
status, res = connect.get_vectors_by_ids(jac_collection, [1])
status, res = connect.get_entity_by_id(jac_collection, [1])
assert status.OK()
assert not len(res[0])
......@@ -370,12 +370,12 @@ class TestGetBinary:
status = connect.flush([jac_collection])
assert status.OK()
collection_new = gen_unique_str()
status, res = connect.get_vectors_by_ids(collection_new, [1])
status, res = connect.get_entity_by_id(collection_new, [1])
assert not status.OK()
def test_get_vector_partition(self, connect, jac_collection):
'''
target: test.get_vectors_by_ids
target: test.get_entity_by_id
method: add vector, and get
expected: status ok, vector returned
'''
......@@ -385,7 +385,7 @@ class TestGetBinary:
assert status.OK()
status = connect.flush([jac_collection])
assert status.OK()
status, res = connect.get_vectors_by_ids(jac_collection, [ids[0]])
status, res = connect.get_entity_by_id(jac_collection, [ids[0]])
assert status.OK()
assert_equal_vector(res[0], vectors[0])
......@@ -407,7 +407,7 @@ class TestGetVectorIdIngalid(object):
def test_get_vector_id_invalid(self, connect, collection, gen_invalid_id):
invalid_id = gen_invalid_id
with pytest.raises(Exception) as e:
status = connect.get_vectors_by_ids(collection, [invalid_id])
status = connect.get_entity_by_id(collection, [invalid_id])
class TestCollectionNameInvalid(object):
......@@ -425,5 +425,5 @@ class TestCollectionNameInvalid(object):
def test_get_vectors_with_invalid_collection_name(self, connect, get_collection_name):
collection_name = get_collection_name
vectors = gen_vectors(1, dim)
status, result = connect.get_vectors_by_ids(collection_name, [1])
status, result = connect.get_entity_by_id(collection_name, [1])
assert not status.OK()
......@@ -48,7 +48,7 @@ class TestMixBase:
logging.getLogger().info("In create index")
status = milvus_instance.create_index(collection, index_params)
logging.getLogger().info(status)
status, result = milvus_instance.describe_index(collection)
status, result = milvus_instance.get_index_info(collection)
logging.getLogger().info(result)
def add_vectors(milvus_instance):
logging.getLogger().info("In add vectors")
......@@ -130,17 +130,17 @@ class TestMixBase:
#describe index
for i in range(10):
status, result = connect.describe_index(collection_list[i])
status, result = connect.get_index_info(collection_list[i])
assert result._index_type == IndexType.FLAT
status, result = connect.describe_index(collection_list[10 + i])
status, result = connect.get_index_info(collection_list[10 + i])
assert result._index_type == IndexType.IVFLAT
status, result = connect.describe_index(collection_list[20 + i])
status, result = connect.get_index_info(collection_list[20 + i])
assert result._index_type == IndexType.IVF_SQ8
status, result = connect.describe_index(collection_list[30 + i])
status, result = connect.get_index_info(collection_list[30 + i])
assert result._index_type == IndexType.FLAT
status, result = connect.describe_index(collection_list[40 + i])
status, result = connect.get_index_info(collection_list[40 + i])
assert result._index_type == IndexType.IVFLAT
status, result = connect.describe_index(collection_list[50 + i])
status, result = connect.get_index_info(collection_list[50 + i])
assert result._index_type == IndexType.IVF_SQ8
#search
......
......@@ -88,7 +88,7 @@ class TestCreateBase:
tag_name = gen_unique_str()
status = connect.create_partition(collection, tag_name)
assert status.OK()
status, res = connect.show_partitions(collection)
status, res = connect.list_partitions(collection)
assert status.OK()
tag_list = []
for item in res:
......@@ -157,7 +157,7 @@ class TestCreateBase:
assert status.OK()
status = connect.flush([collection])
assert status.OK()
status, res = connect.count_collection(collection)
status, res = connect.count_entities(collection)
assert res == nq * 2
def test_create_partition_insert_same_tags_two_collections(self, connect, collection):
......@@ -183,9 +183,9 @@ class TestCreateBase:
status, ids = connect.insert(collection_new, vectors, ids, partition_tag=tag)
status = connect.flush([collection, collection_new])
assert status.OK()
status, res = connect.count_collection(collection)
status, res = connect.count_entities(collection)
assert res == nq
status, res = connect.count_collection(collection_new)
status, res = connect.count_entities(collection_new)
assert res == nq
......@@ -193,38 +193,38 @@ class TestShowBase:
"""
******************************************************************
The following cases are used to test `show_partitions` function
The following cases are used to test `list_partitions` function
******************************************************************
"""
def test_show_partitions(self, connect, collection):
def test_list_partitions(self, connect, collection):
'''
target: test show partitions, check status and partitions returned
method: create partition first, then call function: show_partitions
method: create partition first, then call function: list_partitions
expected: status ok, partition correct
'''
status = connect.create_partition(collection, tag)
status, res = connect.show_partitions(collection)
status, res = connect.list_partitions(collection)
assert status.OK()
def test_show_partitions_no_partition(self, connect, collection):
def test_list_partitions_no_partition(self, connect, collection):
'''
target: test show partitions with collection name, check status and partitions returned
method: call function: show_partitions
method: call function: list_partitions
expected: status ok, partitions correct
'''
status, res = connect.show_partitions(collection)
status, res = connect.list_partitions(collection)
assert status.OK()
def test_show_multi_partitions(self, connect, collection):
'''
target: test show partitions, check status and partitions returned
method: create partitions first, then call function: show_partitions
method: create partitions first, then call function: list_partitions
expected: status ok, partitions correct
'''
tag_new = gen_unique_str()
status = connect.create_partition(collection, tag)
status = connect.create_partition(collection, tag_new)
status, res = connect.show_partitions(collection)
status, res = connect.list_partitions(collection)
assert status.OK()
......@@ -316,7 +316,7 @@ class TestDropBase:
status = connect.create_partition(collection, tag)
status = connect.drop_partition(collection, tag)
assert status.OK()
status, res = connect.show_partitions(collection)
status, res = connect.list_partitions(collection)
tag_list = []
for item in res:
tag_list.append(item.tag)
......@@ -356,7 +356,7 @@ class TestDropBase:
status = connect.drop_partition(collection, tag)
time.sleep(2)
assert not status.OK()
status, res = connect.show_partitions(collection)
status, res = connect.list_partitions(collection)
tag_list = []
for item in res:
tag_list.append(item.tag)
......@@ -373,7 +373,7 @@ class TestDropBase:
time.sleep(2)
status = connect.create_partition(collection, tag)
assert status.OK()
status, res = connect.show_partitions(collection)
status, res = connect.list_partitions(collection)
tag_list = []
for item in res:
tag_list.append(item.tag)
......@@ -417,13 +417,13 @@ class TestNameInvalid(object):
status = connect.drop_partition(collection, tag_name)
assert not status.OK()
def test_show_partitions_with_invalid_collection_name(self, connect, collection, get_collection_name):
def test_list_partitions_with_invalid_collection_name(self, connect, collection, get_collection_name):
'''
target: test show partitions, with invalid collection name, check status returned
method: call function: show_partitions
method: call function: list_partitions
expected: status not ok
'''
collection_name = get_collection_name
status = connect.create_partition(collection, tag)
status, res = connect.show_partitions(collection_name)
status, res = connect.list_partitions(collection_name)
assert not status.OK()
......@@ -220,7 +220,7 @@ class TestSearchBase:
query_ids = non_exist_id
logging.getLogger().info(query_ids)
logging.getLogger().info(collection)
logging.getLogger().info(connect.describe_collection(collection))
logging.getLogger().info(connect.get_collection_info(collection))
status, result = connect.search_by_ids(collection, query_ids, top_k, params={})
assert not status.OK()
......@@ -295,7 +295,7 @@ class TestSearchBase:
def test_search_index_delete(self, connect, collection):
vectors, ids = self.init_data(connect, collection)
query_ids = ids[0:nq]
status = connect.delete_by_id(collection, [query_ids[0]])
status = connect.delete_entity_by_id(collection, [query_ids[0]])
assert status.OK()
status = connect.flush([collection])
status, result = connect.search_by_ids(collection, query_ids, top_k, params={})
......
......@@ -585,7 +585,7 @@ class TestSearchBase:
"nlist": 16384
}
connect.create_index(ip_collection, index_type, index_param)
logging.getLogger().info(connect.describe_index(ip_collection))
logging.getLogger().info(connect.get_index_info(ip_collection))
query_vecs = [[0.50 for i in range(dim)]]
distance_0 = numpy.inner(numpy.array(query_vecs[0]), numpy.array(vectors[0]))
distance_1 = numpy.inner(numpy.array(query_vecs[0]), numpy.array(vectors[1]))
......@@ -607,8 +607,8 @@ class TestSearchBase:
"nlist": 16384
}
connect.create_index(jac_collection, index_type, index_param)
logging.getLogger().info(connect.describe_collection(jac_collection))
logging.getLogger().info(connect.describe_index(jac_collection))
logging.getLogger().info(connect.get_collection_info(jac_collection))
logging.getLogger().info(connect.get_index_info(jac_collection))
query_int_vectors, query_vecs, tmp_ids = self.init_binary_data(connect, jac_collection, nb=1, insert=False)
distance_0 = jaccard(query_int_vectors[0], int_vectors[0])
distance_1 = jaccard(query_int_vectors[0], int_vectors[1])
......@@ -632,8 +632,8 @@ class TestSearchBase:
"nlist": 16384
}
connect.create_index(ham_collection, index_type, index_param)
logging.getLogger().info(connect.describe_collection(ham_collection))
logging.getLogger().info(connect.describe_index(ham_collection))
logging.getLogger().info(connect.get_collection_info(ham_collection))
logging.getLogger().info(connect.get_index_info(ham_collection))
query_int_vectors, query_vecs, tmp_ids = self.init_binary_data(connect, ham_collection, nb=1, insert=False)
distance_0 = hamming(query_int_vectors[0], int_vectors[0])
distance_1 = hamming(query_int_vectors[0], int_vectors[1])
......@@ -657,8 +657,8 @@ class TestSearchBase:
"nlist": 16384
}
connect.create_index(substructure_collection, index_type, index_param)
logging.getLogger().info(connect.describe_collection(substructure_collection))
logging.getLogger().info(connect.describe_index(substructure_collection))
logging.getLogger().info(connect.get_collection_info(substructure_collection))
logging.getLogger().info(connect.get_index_info(substructure_collection))
query_int_vectors, query_vecs, tmp_ids = self.init_binary_data(connect, substructure_collection, nb=1, insert=False)
distance_0 = substructure(query_int_vectors[0], int_vectors[0])
distance_1 = substructure(query_int_vectors[0], int_vectors[1])
......@@ -683,8 +683,8 @@ class TestSearchBase:
"nlist": 16384
}
connect.create_index(substructure_collection, index_type, index_param)
logging.getLogger().info(connect.describe_collection(substructure_collection))
logging.getLogger().info(connect.describe_index(substructure_collection))
logging.getLogger().info(connect.get_collection_info(substructure_collection))
logging.getLogger().info(connect.get_index_info(substructure_collection))
query_int_vectors, query_vecs = gen_binary_sub_vectors(int_vectors, 2)
search_param = get_search_param(index_type)
status, result = connect.search_vectors(substructure_collection, top_k, query_vecs, params=search_param)
......@@ -711,8 +711,8 @@ class TestSearchBase:
"nlist": 16384
}
connect.create_index(superstructure_collection, index_type, index_param)
logging.getLogger().info(connect.describe_collection(superstructure_collection))
logging.getLogger().info(connect.describe_index(superstructure_collection))
logging.getLogger().info(connect.get_collection_info(superstructure_collection))
logging.getLogger().info(connect.get_index_info(superstructure_collection))
query_int_vectors, query_vecs, tmp_ids = self.init_binary_data(connect, superstructure_collection, nb=1, insert=False)
distance_0 = superstructure(query_int_vectors[0], int_vectors[0])
distance_1 = superstructure(query_int_vectors[0], int_vectors[1])
......@@ -737,8 +737,8 @@ class TestSearchBase:
"nlist": 16384
}
connect.create_index(superstructure_collection, index_type, index_param)
logging.getLogger().info(connect.describe_collection(superstructure_collection))
logging.getLogger().info(connect.describe_index(superstructure_collection))
logging.getLogger().info(connect.get_collection_info(superstructure_collection))
logging.getLogger().info(connect.get_index_info(superstructure_collection))
query_int_vectors, query_vecs = gen_binary_super_vectors(int_vectors, 2)
search_param = get_search_param(index_type)
status, result = connect.search_vectors(superstructure_collection, top_k, query_vecs, params=search_param)
......@@ -765,8 +765,8 @@ class TestSearchBase:
"nlist": 16384
}
connect.create_index(tanimoto_collection, index_type, index_param)
logging.getLogger().info(connect.describe_collection(tanimoto_collection))
logging.getLogger().info(connect.describe_index(tanimoto_collection))
logging.getLogger().info(connect.get_collection_info(tanimoto_collection))
logging.getLogger().info(connect.get_index_info(tanimoto_collection))
query_int_vectors, query_vecs, tmp_ids = self.init_binary_data(connect, tanimoto_collection, nb=1, insert=False)
distance_0 = tanimoto(query_int_vectors[0], int_vectors[0])
distance_1 = tanimoto(query_int_vectors[0], int_vectors[1])
......@@ -790,7 +790,7 @@ class TestSearchBase:
pytest.skip("rnsg not support in ip")
vectors, ids = self.init_data(connect, ip_collection, nb=2)
connect.create_index(ip_collection, index_type, index_param)
logging.getLogger().info(connect.describe_index(ip_collection))
logging.getLogger().info(connect.get_index_info(ip_collection))
query_vecs = [[0.50 for i in range(dim)]]
search_param = get_search_param(index_type)
status, result = connect.search_vectors(ip_collection, top_k, query_vecs, params=search_param)
......
......@@ -32,10 +32,10 @@ class TestWalBase:
assert status.OK()
status = connect.flush([collection])
assert status.OK()
status, res = connect.count_collection(collection)
status, res = connect.count_entities(collection)
assert status.OK()
assert res == nb
status, res = connect.get_vectors_by_ids(collection, [ids[0]])
status, res = connect.get_entity_by_id(collection, [ids[0]])
logging.getLogger().info(res)
assert status.OK()
assert_equal_vector(res[0], vectors[0])
......@@ -51,13 +51,13 @@ class TestWalBase:
status, ids = connect.add_vectors(collection, vectors)
assert status.OK()
connect.flush([collection])
status, res = connect.count_collection(collection)
status, res = connect.count_entities(collection)
assert status.OK()
status = connect.delete_by_id(collection, ids)
status = connect.delete_entity_by_id(collection, ids)
assert status.OK()
status = connect.flush([collection])
assert status.OK()
status, res = connect.count_collection(collection)
status, res = connect.count_entities(collection)
assert status.OK()
assert res == 0
......@@ -72,10 +72,10 @@ class TestWalBase:
status, ids = connect.add_vectors(collection, vector)
assert status.OK()
connect.flush([collection])
status = connect.delete_by_id(collection, [0])
status = connect.delete_entity_by_id(collection, [0])
assert status.OK()
status = connect.flush([collection])
status, res = connect.count_collection(collection)
status, res = connect.count_entities(collection)
assert status.OK()
assert res == 1
......@@ -90,14 +90,14 @@ class TestWalBase:
status, ids = connect.add_vectors(collection, vectors)
assert status.OK()
status = connect.flush([collection])
status = connect.delete_by_id(collection, [0])
status = connect.delete_entity_by_id(collection, [0])
connect.flush([collection])
collection_new = gen_unique_str()
status = connect.delete_by_id(collection_new, ids)
status = connect.delete_entity_by_id(collection_new, ids)
assert not status.OK()
status = connect.flush([collection])
assert status.OK()
status, res = connect.count_collection(collection)
status, res = connect.count_entities(collection)
assert status.OK()
assert res == nb
......@@ -112,7 +112,7 @@ class TestWalBase:
status, ids = connect.add_vectors(collection, vector)
assert status.OK()
status = connect.flush([collection])
status, res = connect.count_collection(collection)
status, res = connect.count_entities(collection)
assert status.OK()
logging.getLogger().info(res) # should be 0 because no auto flush
logging.getLogger().info("Stop server and restart")
......@@ -120,10 +120,10 @@ class TestWalBase:
# time.sleep(15)
status = connect.flush([collection])
assert status.OK()
status, res = connect.count_collection(collection)
status, res = connect.count_entities(collection)
assert status.OK()
assert res == 1
status, res = connect.get_vectors_by_ids(collection, [ids[0]])
status, res = connect.get_entity_by_id(collection, [ids[0]])
logging.getLogger().info(res)
assert status.OK()
assert_equal_vector(res[0], vector[0])
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册