未验证 提交 f3fb0f8b 编写于 作者: X Xiangyu Wang 提交者: GitHub

Change partition_tag to partition_name (#5514)

Signed-off-by: NXiangyu Wang <xiangyu.wang@zilliz.com>
上级 316b6fbd
......@@ -58,7 +58,7 @@ class TestCollectionCount:
'''
entities = gen_entities(insert_count)
connect.create_partition(collection, tag)
ids = connect.insert(collection, entities, partition_tag=tag)
ids = connect.insert(collection, entities, partition_name=tag)
assert len(ids) == insert_count
connect.flush([collection])
stats = connect.get_collection_stats(collection)
......@@ -93,7 +93,7 @@ class TestCollectionCount:
# entities = gen_entities(insert_count)
# connect.create_partition(collection, tag)
# connect.create_partition(collection, new_tag)
# res_ids = connect.insert(collection, entities, partition_tag=tag)
# res_ids = connect.insert(collection, entities, partition_name=tag)
# connect.flush([collection])
# # res = connect.count_entities(collection)
# # assert res == insert_count
......@@ -112,7 +112,7 @@ class TestCollectionCount:
# connect.create_partition(collection, tag)
# connect.create_partition(collection, new_tag)
# res_ids = connect.insert(collection, entities)
# res_ids_2 = connect.insert(collection, entities, partition_tag=tag)
# res_ids_2 = connect.insert(collection, entities, partition_name=tag)
# connect.flush([collection])
# # res = connect.count_entities(collection)
# # assert res == insert_count * 2
......@@ -130,8 +130,8 @@ class TestCollectionCount:
# entities = gen_entities(insert_count)
# connect.create_partition(collection, tag)
# connect.create_partition(collection, new_tag)
# res_ids = connect.insert(collection, entities, partition_tag=tag)
# res_ids2 = connect.insert(collection, entities, partition_tag=new_tag)
# res_ids = connect.insert(collection, entities, partition_name=tag)
# res_ids2 = connect.insert(collection, entities, partition_name=new_tag)
# connect.flush([collection])
# # res = connect.count_entities(collection)
# # assert res == insert_count * 2
......@@ -288,7 +288,7 @@ class TestCollectionCountBinary:
'''
raw_vectors, entities = gen_binary_entities(insert_count)
connect.create_partition(binary_collection, tag)
connect.insert(binary_collection, entities, partition_tag=tag)
connect.insert(binary_collection, entities, partition_name=tag)
connect.flush([binary_collection])
stats = connect.get_collection_stats(binary_collection)
assert stats[row_count] == insert_count
......@@ -324,7 +324,7 @@ class TestCollectionCountBinary:
# raw_vectors, entities = gen_binary_entities(insert_count)
# connect.create_partition(binary_collection, tag)
# connect.create_partition(binary_collection, new_tag)
# res_ids = connect.insert(binary_collection, entities, partition_tag=tag)
# res_ids = connect.insert(binary_collection, entities, partition_name=tag)
# connect.flush([binary_collection])
# # res = connect.count_entities(binary_collection)
# # assert res == insert_count
......@@ -343,7 +343,7 @@ class TestCollectionCountBinary:
# connect.create_partition(binary_collection, tag)
# connect.create_partition(binary_collection, new_tag)
# res_ids = connect.insert(binary_collection, entities)
# res_ids_2 = connect.insert(binary_collection, entities, partition_tag=tag)
# res_ids_2 = connect.insert(binary_collection, entities, partition_name=tag)
# connect.flush([binary_collection])
# # res = connect.count_entities(binary_collection)
# # assert res == insert_count * 2
......@@ -362,8 +362,8 @@ class TestCollectionCountBinary:
# raw_vectors, entities = gen_binary_entities(insert_count)
# connect.create_partition(binary_collection, tag)
# connect.create_partition(binary_collection, new_tag)
# res_ids = connect.insert(binary_collection, entities, partition_tag=tag)
# res_ids2 = connect.insert(binary_collection, entities, partition_tag=new_tag)
# res_ids = connect.insert(binary_collection, entities, partition_name=tag)
# res_ids2 = connect.insert(binary_collection, entities, partition_name=new_tag)
# connect.flush([binary_collection])
# # res = connect.count_entities(binary_collection)
# # assert res == insert_count * 2
......
......@@ -195,7 +195,7 @@ class TestGetCollectionStats:
expected: status ok, vectors added to partition
'''
connect.create_partition(collection, default_tag)
ids = connect.insert(collection, default_entities, partition_tag=default_tag)
ids = connect.insert(collection, default_entities, partition_name=default_tag)
assert len(ids) == default_nb
connect.flush([collection])
stats = connect.get_collection_stats(collection)
......@@ -211,11 +211,11 @@ class TestGetCollectionStats:
new_tag = "new_tag"
connect.create_partition(collection, default_tag)
connect.create_partition(collection, new_tag)
connect.insert(collection, default_entities, partition_tag=default_tag)
connect.insert(collection, default_entities, partition_name=default_tag)
connect.flush([collection])
stats = connect.get_collection_stats(collection)
assert stats[row_count] == default_nb
connect.insert(collection, default_entities, partition_tag=new_tag)
connect.insert(collection, default_entities, partition_name=new_tag)
connect.flush([collection])
stats = connect.get_collection_stats(collection)
assert stats[row_count] == default_nb * 2
......@@ -251,7 +251,7 @@ class TestGetCollectionStats:
entities = gen_entities(insert_count)
connect.create_partition(collection, default_tag)
connect.create_partition(collection, new_tag)
connect.insert(collection, entities, partition_tag=default_tag)
connect.insert(collection, entities, partition_name=default_tag)
connect.flush([collection])
stats = connect.get_collection_stats(collection)
assert stats[row_count] == insert_count
......@@ -269,7 +269,7 @@ class TestGetCollectionStats:
connect.create_partition(collection, default_tag)
connect.create_partition(collection, new_tag)
connect.insert(collection, entities)
connect.insert(collection, entities, partition_tag=default_tag)
connect.insert(collection, entities, partition_name=default_tag)
connect.flush([collection])
stats = connect.get_collection_stats(collection)
assert stats[row_count] == insert_count*2
......@@ -285,8 +285,8 @@ class TestGetCollectionStats:
entities = gen_entities(insert_count)
connect.create_partition(collection, default_tag)
connect.create_partition(collection, new_tag)
connect.insert(collection, entities, partition_tag=default_tag)
connect.insert(collection, entities, partition_tag=new_tag)
connect.insert(collection, entities, partition_name=default_tag)
connect.insert(collection, entities, partition_name=new_tag)
connect.flush([collection])
stats = connect.get_collection_stats(collection)
assert stats[row_count] == insert_count*2
......
......@@ -227,14 +227,14 @@ class TestLoadCollection:
ids = connect.insert(collection, default_entities)
assert len(ids) == default_nb
connect.create_partition(collection, default_tag)
ids = connect.insert(collection, default_entities, partition_tag=default_tag)
ids = connect.insert(collection, default_entities, partition_name=default_tag)
assert len(ids) == default_nb
connect.flush([collection])
connect.load_collection(collection)
connect.release_partitions(collection, [default_tag])
with pytest.raises(Exception) as e:
connect.search(collection, default_single_query, partition_tags=[default_tag])
res = connect.search(collection, default_single_query, partition_tags=[default_partition_name])
connect.search(collection, default_single_query, partition_names=[default_tag])
res = connect.search(collection, default_single_query, partition_names=[default_partition_name])
assert len(res[0]) == default_top_k
def test_load_collection_release_all_partitions(self, connect, collection):
......@@ -246,7 +246,7 @@ class TestLoadCollection:
ids = connect.insert(collection, default_entities)
assert len(ids) == default_nb
connect.create_partition(collection, default_tag)
ids = connect.insert(collection, default_entities, partition_tag=default_tag)
ids = connect.insert(collection, default_entities, partition_name=default_tag)
assert len(ids) == default_nb
connect.flush([collection])
connect.load_collection(collection)
......@@ -262,7 +262,7 @@ class TestLoadCollection:
expected: search result empty
"""
connect.create_partition(collection, default_tag)
ids = connect.insert(collection, default_entities, partition_tag=default_tag)
ids = connect.insert(collection, default_entities, partition_name=default_tag)
assert len(ids) == default_nb
connect.flush([collection])
connect.load_partitions(collection, [default_tag])
......@@ -302,7 +302,7 @@ class TestReleaseAdvanced:
top_k = 1
connect.create_partition(collection, default_tag)
query, _ = gen_query_vectors(field_name, default_entities, top_k, nq)
connect.insert(collection, default_entities, partition_tag=default_tag)
connect.insert(collection, default_entities, partition_name=default_tag)
connect.flush([collection])
connect.load_partitions(collection, [default_tag])
res = connect.search(collection, query, _async=True)
......@@ -321,7 +321,7 @@ class TestReleaseAdvanced:
top_k = 1
connect.create_partition(collection, default_tag)
query, _ = gen_query_vectors(field_name, default_entities, top_k, nq)
connect.insert(collection, default_entities, partition_tag=default_tag)
connect.insert(collection, default_entities, partition_name=default_tag)
connect.flush([collection])
connect.load_partitions(collection, [default_tag])
res = connect.search(collection, query, _async=True)
......@@ -354,7 +354,7 @@ class TestReleaseAdvanced:
expected:
"""
connect.create_partition(collection, default_tag)
connect.insert(collection, default_entities, partition_tag=default_tag)
connect.insert(collection, default_entities, partition_name=default_tag)
connect.flush([collection])
def load():
......@@ -464,14 +464,14 @@ class TestLoadPartition:
expected: no error raised
'''
connect.create_partition(collection, default_tag)
ids = connect.insert(collection, default_entities, partition_tag=default_tag)
ids = connect.insert(collection, default_entities, partition_name=default_tag)
assert len(ids) == default_nb
connect.flush([collection])
connect.create_index(collection, default_float_vec_field_name, get_simple_index)
search_param = get_search_param(get_simple_index["index_type"])
query, vecs = gen_query_vectors(field_name, default_entities, default_top_k, nq=1, search_params=search_param)
connect.load_partitions(collection, [default_tag])
res = connect.search(collection, query, partition_tags=[default_tag])
res = connect.search(collection, query, partition_names=[default_tag])
assert len(res[0]) == default_top_k
@pytest.mark.level(2)
......@@ -483,7 +483,7 @@ class TestLoadPartition:
expected: no error raised
'''
connect.create_partition(binary_collection, default_tag)
ids = connect.insert(binary_collection, default_binary_entities, partition_tag=default_tag)
ids = connect.insert(binary_collection, default_binary_entities, partition_name=default_tag)
assert len(ids) == default_nb
connect.flush([binary_collection])
for metric_type in binary_metrics():
......@@ -563,7 +563,7 @@ class TestLoadPartition:
expected: raise exception
"""
connect.create_partition(collection, default_tag)
ids = connect.insert(collection, default_entities, partition_tag=default_tag)
ids = connect.insert(collection, default_entities, partition_name=default_tag)
assert len(ids) == default_nb
connect.flush([collection])
connect.release_partitions(collection, [default_tag])
......@@ -571,7 +571,7 @@ class TestLoadPartition:
@pytest.mark.level(2)
def test_load_release_after_drop(self, connect, collection):
connect.create_partition(collection, default_tag)
ids = connect.insert(collection, default_entities, partition_tag=default_tag)
ids = connect.insert(collection, default_entities, partition_name=default_tag)
assert len(ids) == default_nb
connect.flush([collection])
connect.load_partitions(collection, [default_tag])
......@@ -601,7 +601,7 @@ class TestLoadPartition:
expected: raise exception
"""
connect.create_partition(collection, default_tag)
ids = connect.insert(collection, default_entities, partition_tag=default_tag)
ids = connect.insert(collection, default_entities, partition_name=default_tag)
assert len(ids) == default_nb
connect.flush([collection])
connect.load_partitions(collection, [default_tag])
......@@ -622,7 +622,7 @@ class TestLoadPartition:
expected: raise exception
"""
connect.create_partition(collection, default_tag)
ids = connect.insert(collection, default_entities, partition_tag=default_tag)
ids = connect.insert(collection, default_entities, partition_name=default_tag)
assert len(ids) == default_nb
connect.flush([collection])
connect.load_partitions(collection, [default_tag])
......
......@@ -323,7 +323,7 @@ default_single_query = {
# expected: entities deleted
# '''
# connect.create_partition(collection, default_tag)
# ids = connect.bulk_insert(collection, default_entities, partition_tag=default_tag)
# ids = connect.bulk_insert(collection, default_entities, partition_name=default_tag)
# connect.flush([collection])
# delete_ids = [ids[0], ids[-1]]
# status = connect.delete_entity_by_id(collection, delete_ids)
......@@ -355,8 +355,8 @@ default_single_query = {
# tag_new = "tag_new"
# connect.create_partition(collection, default_tag)
# connect.create_partition(collection, tag_new)
# ids = connect.bulk_insert(collection, default_entities, partition_tag=default_tag)
# ids_new = connect.bulk_insert(collection, default_entities, partition_tag=tag_new)
# ids = connect.bulk_insert(collection, default_entities, partition_name=default_tag)
# ids_new = connect.bulk_insert(collection, default_entities, partition_name=tag_new)
# connect.flush([collection])
# delete_ids = [ids[0], ids_new[0]]
# status = connect.delete_entity_by_id(collection, delete_ids)
......@@ -373,8 +373,8 @@ default_single_query = {
# tag_new = "tag_new"
# connect.create_partition(collection, default_tag)
# connect.create_partition(collection, tag_new)
# ids = connect.bulk_insert(collection, default_entities, partition_tag=default_tag)
# ids_new = connect.bulk_insert(collection, default_entities, partition_tag=tag_new)
# ids = connect.bulk_insert(collection, default_entities, partition_name=default_tag)
# ids_new = connect.bulk_insert(collection, default_entities, partition_name=tag_new)
# connect.flush([collection])
# connect.create_index(collection, field_name, get_simple_index)
# delete_ids = [ids[0], ids_new[0]]
......
......@@ -178,7 +178,7 @@ default_single_query = {
# expected: entity returned
# '''
# connect.create_partition(collection, default_tag)
# ids = connect.bulk_insert(collection, default_entities, partition_tag = default_tag)
# ids = connect.bulk_insert(collection, default_entities, partition_name = default_tag)
# connect.flush([collection])
# get_ids = ids[:get_pos]
# res = connect.get_entity_by_id(collection, get_ids)
......@@ -224,7 +224,7 @@ default_single_query = {
# tag_new = "tag_new"
# connect.create_partition(collection, default_tag)
# connect.create_partition(collection, tag_new)
# ids = connect.bulk_insert(collection, default_entities, partition_tag = default_tag)
# ids = connect.bulk_insert(collection, default_entities, partition_name = default_tag)
# connect.flush([collection])
# get_ids = ids[:get_pos]
# res = connect.get_entity_by_id(collection, get_ids)
......@@ -241,8 +241,8 @@ default_single_query = {
# connect.create_partition(collection, default_tag)
# connect.create_partition(collection, tag_new)
# new_entities = gen_entities(default_nb + 1)
# ids = connect.bulk_insert(collection, default_entities, partition_tag = default_tag)
# ids_new = connect.bulk_insert(collection, new_entities, partition_tag = tag_new)
# ids = connect.bulk_insert(collection, default_entities, partition_name = default_tag)
# ids_new = connect.bulk_insert(collection, new_entities, partition_name = tag_new)
# connect.flush([collection])
# get_ids = ids[:get_pos]
# get_ids.extend(ids_new[:get_pos])
......@@ -260,7 +260,7 @@ default_single_query = {
# expected: entity returned
# '''
# connect.create_partition(collection, default_tag)
# ids = connect.bulk_insert(collection, default_entities, partition_tag = default_tag)
# ids = connect.bulk_insert(collection, default_entities, partition_name = default_tag)
# connect.flush([collection])
# connect.create_index(collection, default_float_vec_field_name, get_simple_index)
# get_ids = ids[:get_pos]
......@@ -509,7 +509,7 @@ default_single_query = {
# expected: get one entity
# '''
# connect.create_partition(collection, default_tag)
# ids = connect.bulk_insert(collection, default_entities, partition_tag = default_tag)
# ids = connect.bulk_insert(collection, default_entities, partition_name = default_tag)
# connect.flush([collection])
# status = connect.delete_entity_by_id(collection, [ids[get_pos]])
# connect.flush([collection])
......
......@@ -309,11 +309,11 @@ class TestInsertBase:
def test_insert_partition(self, connect, collection):
'''
target: test insert entities in collection created before
method: create collection and insert entities in it, with the partition_tag param
method: create collection and insert entities in it, with the partition_name param
expected: the collection row count equals to nq
'''
connect.create_partition(collection, default_tag)
ids = connect.insert(collection, default_entities, partition_tag=default_tag)
ids = connect.insert(collection, default_entities, partition_name=default_tag)
assert len(ids) == default_nb
assert connect.has_partition(collection, default_tag)
connect.flush([collection])
......@@ -326,12 +326,12 @@ class TestInsertBase:
def test_insert_partition_with_ids(self, connect, id_collection):
'''
target: test insert entities in collection created before, insert with ids
method: create collection and insert entities in it, with the partition_tag param
method: create collection and insert entities in it, with the partition_name param
expected: the collection row count equals to nq
'''
connect.create_partition(id_collection, default_tag)
ids = [i for i in range(default_nb)]
res_ids = connect.insert(id_collection, gen_entities(default_nb), ids=ids, partition_tag=default_tag)
res_ids = connect.insert(id_collection, gen_entities(default_nb), ids=ids, partition_name=default_tag)
assert res_ids == ids
logging.getLogger().info(connect.describe_collection(id_collection))
......@@ -343,7 +343,7 @@ class TestInsertBase:
method: create partition and insert info collection without tag params
expected: the collection row count equals to nb
'''
ids = connect.insert(collection, default_entities, partition_tag=default_partition_name)
ids = connect.insert(collection, default_entities, partition_name=default_partition_name)
assert len(ids) == default_nb
connect.flush([collection])
stats = connect.get_collection_stats(collection)
......@@ -353,23 +353,23 @@ class TestInsertBase:
def test_insert_partition_not_existed(self, connect, collection):
'''
target: test insert entities in collection created before
method: create collection and insert entities in it, with the not existed partition_tag param
method: create collection and insert entities in it, with the not existed partition_name param
expected: error raised
'''
tag = gen_unique_str()
with pytest.raises(Exception) as e:
connect.insert(collection, default_entities, partition_tag=tag)
connect.insert(collection, default_entities, partition_name=tag)
@pytest.mark.timeout(ADD_TIMEOUT)
def test_insert_partition_repeatedly(self, connect, collection):
'''
target: test insert entities in collection created before
method: create collection and insert entities in it repeatly, with the partition_tag param
method: create collection and insert entities in it repeatly, with the partition_name param
expected: the collection row count equals to nq
'''
connect.create_partition(collection, default_tag)
ids = connect.insert(collection, default_entities, partition_tag=default_tag)
ids = connect.insert(collection, default_entities, partition_tag=default_tag)
ids = connect.insert(collection, default_entities, partition_name=default_tag)
ids = connect.insert(collection, default_entities, partition_name=default_tag)
connect.flush([collection])
res = connect.get_collection_stats(collection)
assert res[row_count] == 2 * default_nb
......@@ -572,11 +572,11 @@ class TestInsertBinary:
def test_insert_binary_partition(self, connect, binary_collection):
'''
target: test insert entities and create partition tag
method: create collection and insert binary entities in it, with the partition_tag param
method: create collection and insert binary entities in it, with the partition_name param
expected: the collection row count equals to nb
'''
connect.create_partition(binary_collection, default_tag)
ids = connect.insert(binary_collection, default_binary_entities, partition_tag=default_tag)
ids = connect.insert(binary_collection, default_binary_entities, partition_name=default_tag)
assert len(ids) == default_nb
assert connect.has_partition(binary_collection, default_tag)
connect.flush([binary_collection])
......@@ -1013,9 +1013,9 @@ class TestInsertInvalid(object):
connect.create_partition(collection, default_tag)
if tag_name is not None:
with pytest.raises(Exception):
connect.insert(collection, default_entity, partition_tag=tag_name)
connect.insert(collection, default_entity, partition_name=tag_name)
else:
connect.insert(collection, default_entity, partition_tag=tag_name)
connect.insert(collection, default_entity, partition_name=tag_name)
def test_insert_with_invalid_field_name(self, connect, collection, get_field_name):
tmp_entity = update_field_name(copy.deepcopy(default_entity), "int64", get_field_name)
......
......@@ -120,7 +120,7 @@ uid = "list_id_in_segment"
# nb = 10
# entities = gen_entities(nb)
# connect.create_partition(collection, default_tag)
# ids = connect.bulk_insert(collection, entities, partition_tag=default_tag)
# ids = connect.bulk_insert(collection, entities, partition_name=default_tag)
# connect.flush([collection])
# stats = connect.get_collection_stats(collection)
# assert stats["partitions"][1]["tag"] == default_tag
......@@ -162,7 +162,7 @@ uid = "list_id_in_segment"
# expected: status ok
# '''
# connect.create_partition(collection, default_tag)
# ids = connect.bulk_insert(collection, default_entities, partition_tag=default_tag)
# ids = connect.bulk_insert(collection, default_entities, partition_name=default_tag)
# connect.flush([collection])
# stats = connect.get_collection_stats(collection)
# assert stats["partitions"][1]["tag"] == default_tag
......@@ -255,7 +255,7 @@ uid = "list_id_in_segment"
# connect.create_partition(binary_collection, default_tag)
# nb = 10
# vectors, entities = gen_binary_entities(nb)
# ids = connect.bulk_insert(binary_collection, entities, partition_tag=default_tag)
# ids = connect.bulk_insert(binary_collection, entities, partition_name=default_tag)
# connect.flush([binary_collection])
# stats = connect.get_collection_stats(binary_collection)
# vector_ids = connect.list_id_in_segment(binary_collection, stats["partitions"][1]["segments"][0]["id"])
......@@ -293,7 +293,7 @@ uid = "list_id_in_segment"
# expected: status ok
# '''
# connect.create_partition(binary_collection, default_tag)
# ids = connect.bulk_insert(binary_collection, default_binary_entities, partition_tag=default_tag)
# ids = connect.bulk_insert(binary_collection, default_binary_entities, partition_name=default_tag)
# connect.flush([binary_collection])
# stats = connect.get_collection_stats(binary_collection)
# assert stats["partitions"][1]["tag"] == default_tag
......
......@@ -25,7 +25,7 @@ default_binary_query, default_binary_query_vecs = gen_query_vectors(binary_field
nq)
def init_data(connect, collection, nb=3000, partition_tags=None, auto_id=True):
def init_data(connect, collection, nb=3000, partition_names=None, auto_id=True):
'''
Generate entities and add it in collection
'''
......@@ -34,21 +34,21 @@ def init_data(connect, collection, nb=3000, partition_tags=None, auto_id=True):
insert_entities = entities
else:
insert_entities = gen_entities(nb, is_normal=True)
if partition_tags is None:
if partition_names is None:
if auto_id:
ids = connect.insert(collection, insert_entities)
else:
ids = connect.insert(collection, insert_entities, ids=[i for i in range(nb)])
else:
if auto_id:
ids = connect.insert(collection, insert_entities, partition_tag=partition_tags)
ids = connect.insert(collection, insert_entities, partition_name=partition_names)
else:
ids = connect.insert(collection, insert_entities, ids=[i for i in range(nb)], partition_tag=partition_tags)
ids = connect.insert(collection, insert_entities, ids=[i for i in range(nb)], partition_name=partition_names)
connect.flush([collection])
return insert_entities, ids
def init_binary_data(connect, collection, nb=3000, insert=True, partition_tags=None):
def init_binary_data(connect, collection, nb=3000, insert=True, partition_names=None):
'''
Generate entities and add it in collection
'''
......@@ -61,10 +61,10 @@ def init_binary_data(connect, collection, nb=3000, insert=True, partition_tags=N
else:
insert_raw_vectors, insert_entities = gen_binary_entities(nb)
if insert is True:
if partition_tags is None:
if partition_names is None:
ids = connect.insert(collection, insert_entities)
else:
ids = connect.insert(collection, insert_entities, partition_tag=partition_tags)
ids = connect.insert(collection, insert_entities, partition_name=partition_names)
connect.flush([collection])
return insert_raw_vectors, insert_entities, ids
......@@ -337,7 +337,7 @@ class TestSearchBase:
assert check_id_result(res[0], ids[0])
connect.release_collection(collection)
connect.load_partitions(collection, [default_tag])
res = connect.search(collection, query, partition_tags=[default_tag])
res = connect.search(collection, query, partition_names=[default_tag])
assert len(res[0]) == 0
@pytest.mark.level(2)
......@@ -355,16 +355,16 @@ class TestSearchBase:
if index_type in skip_pq():
pytest.skip("Skip PQ")
connect.create_partition(collection, default_tag)
entities, ids = init_data(connect, collection, partition_tags=default_tag)
entities, ids = init_data(connect, collection, partition_names=default_tag)
connect.create_index(collection, field_name, get_simple_index)
search_param = get_search_param(index_type)
query, vecs = gen_query_vectors(field_name, entities, top_k, nq, search_params=search_param)
if top_k > max_top_k:
with pytest.raises(Exception) as e:
res = connect.search(collection, query, partition_tags=[default_tag])
res = connect.search(collection, query, partition_names=[default_tag])
else:
connect.load_partitions(collection, [default_tag])
res = connect.search(collection, query, partition_tags=[default_tag])
res = connect.search(collection, query, partition_names=[default_tag])
assert len(res) == nq
assert len(res[0]) == top_k
assert res[0]._distances[0] < epsilon
......@@ -385,11 +385,11 @@ class TestSearchBase:
query, vecs = gen_query_vectors(field_name, entities, top_k, nq)
if top_k > max_top_k:
with pytest.raises(Exception) as e:
res = connect.search(collection, query, partition_tags=["new_tag"])
res = connect.search(collection, query, partition_names=["new_tag"])
else:
connect.load_collection(collection)
with pytest.raises(Exception) as e:
connect.search(collection, query, partition_tags=["new_tag"])
connect.search(collection, query, partition_names=["new_tag"])
@pytest.mark.level(2)
def test_search_index_partitions(self, connect, collection, get_simple_index, get_top_k):
......@@ -406,8 +406,8 @@ class TestSearchBase:
pytest.skip("Skip PQ")
connect.create_partition(collection, default_tag)
connect.create_partition(collection, new_tag)
entities, ids = init_data(connect, collection, partition_tags=default_tag)
new_entities, new_ids = init_data(connect, collection, nb=6001, partition_tags=new_tag)
entities, ids = init_data(connect, collection, partition_names=default_tag)
new_entities, new_ids = init_data(connect, collection, nb=6001, partition_names=new_tag)
connect.create_index(collection, field_name, get_simple_index)
search_param = get_search_param(index_type)
query, vecs = gen_query_vectors(field_name, entities, top_k, nq, search_params=search_param)
......@@ -421,7 +421,7 @@ class TestSearchBase:
assert not check_id_result(res[1], new_ids[0])
assert res[0]._distances[0] < epsilon
assert res[1]._distances[0] < epsilon
res = connect.search(collection, query, partition_tags=[new_tag])
res = connect.search(collection, query, partition_names=[new_tag])
assert res[0]._distances[0] > epsilon
assert res[1]._distances[0] > epsilon
connect.release_collection(collection)
......@@ -442,8 +442,8 @@ class TestSearchBase:
pytest.skip("Skip PQ")
connect.create_partition(collection, tag)
connect.create_partition(collection, new_tag)
entities, ids = init_data(connect, collection, partition_tags=tag)
new_entities, new_ids = init_data(connect, collection, nb=6001, partition_tags=new_tag)
entities, ids = init_data(connect, collection, partition_names=tag)
new_entities, new_ids = init_data(connect, collection, nb=6001, partition_names=new_tag)
connect.create_index(collection, field_name, get_simple_index)
search_param = get_search_param(index_type)
query, vecs = gen_query_vectors(field_name, new_entities, top_k, nq, search_params=search_param)
......@@ -452,11 +452,11 @@ class TestSearchBase:
res = connect.search(collection, query)
else:
connect.load_collection(collection)
res = connect.search(collection, query, partition_tags=["(.*)tag"])
res = connect.search(collection, query, partition_names=["(.*)tag"])
assert not check_id_result(res[0], ids[0])
assert res[0]._distances[0] < epsilon
assert res[1]._distances[0] < epsilon
res = connect.search(collection, query, partition_tags=["new(.*)"])
res = connect.search(collection, query, partition_names=["new(.*)"])
assert res[0]._distances[0] < epsilon
assert res[1]._distances[0] < epsilon
connect.release_collection(collection)
......@@ -533,7 +533,7 @@ class TestSearchBase:
assert len(res[0]) >= top_k
assert res[0]._distances[0] >= 1 - gen_inaccuracy(res[0]._distances[0])
assert check_id_result(res[0], ids[0])
res = connect.search(collection, query, partition_tags=[default_tag])
res = connect.search(collection, query, partition_names=[default_tag])
assert len(res[0]) == 0
@pytest.mark.level(2)
......@@ -552,8 +552,8 @@ class TestSearchBase:
pytest.skip("Skip PQ")
connect.create_partition(collection, default_tag)
connect.create_partition(collection, new_tag)
entities, ids = init_data(connect, collection, partition_tags=default_tag)
new_entities, new_ids = init_data(connect, collection, nb=6001, partition_tags=new_tag)
entities, ids = init_data(connect, collection, partition_names=default_tag)
new_entities, new_ids = init_data(connect, collection, nb=6001, partition_names=new_tag)
get_simple_index["metric_type"] = metric_type
connect.create_index(collection, field_name, get_simple_index)
search_param = get_search_param(index_type)
......@@ -564,7 +564,7 @@ class TestSearchBase:
assert not check_id_result(res[1], new_ids[0])
assert res[0]._distances[0] >= 1 - gen_inaccuracy(res[0]._distances[0])
assert res[1]._distances[0] >= 1 - gen_inaccuracy(res[1]._distances[0])
res = connect.search(collection, query, partition_tags=["new_tag"])
res = connect.search(collection, query, partition_names=["new_tag"])
assert res[0]._distances[0] < 1 - gen_inaccuracy(res[0]._distances[0])
# TODO:
# assert res[1]._distances[0] >= 1 - gen_inaccuracy(res[1]._distances[0])
......@@ -1659,7 +1659,7 @@ class TestSearchInvalid(object):
# tag = " "
tag = get_invalid_partition
with pytest.raises(Exception) as e:
res = connect.search(collection, default_query, partition_tags=tag)
res = connect.search(collection, default_query, partition_names=tag)
@pytest.mark.level(2)
def test_search_with_invalid_field_name(self, connect, collection, get_invalid_field):
......
......@@ -11,7 +11,7 @@ allure-pytest==2.7.0
pytest-print==0.2.1
pytest-level==0.1.1
pytest-xdist==2.2.1
pymilvus==2.0a1.dev2
pymilvus==2.0a1.dev10
pytest-rerunfailures==9.1.1
git+https://github.com/Projectplace/pytest-tags
ndg-httpsclient
......
......@@ -294,7 +294,7 @@ class TestRestartBase:
tag_tmp = gen_unique_str()
partitions.append(tag_tmp)
connect.create_partition(collection, tag_tmp)
ids = connect.bulk_insert(collection, big_entities, partition_tag=tag_tmp)
ids = connect.bulk_insert(collection, big_entities, partition_name=tag_tmp)
connect.flush([collection], _async=True)
res_count = connect.count_entities(collection)
logging.getLogger().info(res_count)
......
......@@ -237,7 +237,7 @@ from constants import *
# '''
# connect.create_partition(collection, default_tag)
# assert connect.has_partition(collection, default_tag)
# ids = connect.bulk_insert(collection, default_entities, partition_tag=default_tag)
# ids = connect.bulk_insert(collection, default_entities, partition_name=default_tag)
# connect.flush([collection])
# info = connect.get_collection_stats(collection)
# logging.getLogger().info(info["partitions"])
......
......@@ -94,7 +94,7 @@ class TestFlushBase:
connect.flush([id_collection])
res_count = connect.get_collection_stats(id_collection)
assert res_count["row_count"] == default_nb
ids = connect.insert(id_collection, default_entities, ids, partition_tag=default_tag)
ids = connect.insert(id_collection, default_entities, ids, partition_name=default_tag)
assert len(ids) == default_nb
connect.flush([id_collection])
res_count = connect.get_collection_stats(id_collection)
......@@ -110,9 +110,9 @@ class TestFlushBase:
connect.create_partition(id_collection, default_tag)
connect.create_partition(id_collection, tag_new)
ids = [i for i in range(default_nb)]
connect.insert(id_collection, default_entities, ids, partition_tag=default_tag)
connect.insert(id_collection, default_entities, ids, partition_name=default_tag)
connect.flush([id_collection])
connect.insert(id_collection, default_entities, ids, partition_tag=tag_new)
connect.insert(id_collection, default_entities, ids, partition_name=tag_new)
connect.flush([id_collection])
res = connect.get_collection_stats(id_collection)
assert res["row_count"] == 2 * default_nb
......@@ -129,10 +129,10 @@ class TestFlushBase:
connect.create_partition(id_collection, default_tag)
connect.create_partition(collection_new, default_tag)
ids = [i for i in range(default_nb)]
# ids = connect.insert(id_collection, default_entities, ids, partition_tag=default_tag)
# ids = connect.insert(collection_new, default_entities, ids, partition_tag=default_tag)
connect.insert(id_collection, default_entities, ids, partition_tag=default_tag)
connect.insert(collection_new, default_entities, ids, partition_tag=default_tag)
# ids = connect.insert(id_collection, default_entities, ids, partition_name=default_tag)
# ids = connect.insert(collection_new, default_entities, ids, partition_name=default_tag)
connect.insert(id_collection, default_entities, ids, partition_name=default_tag)
connect.insert(collection_new, default_entities, ids, partition_name=default_tag)
connect.flush([id_collection])
connect.flush([collection_new])
res = connect.get_collection_stats(id_collection)
......@@ -160,10 +160,10 @@ class TestFlushBase:
entities_new = gen_entities_by_fields(fields["fields"], nb_new, default_dim)
ids = [i for i in range(default_nb)]
ids_new = [i for i in range(nb_new)]
# ids = connect.insert(id_collection, default_entities, ids, partition_tag=default_tag)
# ids = connect.insert(collection_new, entities_new, ids_new, partition_tag=default_tag)
connect.insert(id_collection, default_entities, ids, partition_tag=default_tag)
connect.insert(collection_new, entities_new, ids_new, partition_tag=default_tag)
# ids = connect.insert(id_collection, default_entities, ids, partition_name=default_tag)
# ids = connect.insert(collection_new, entities_new, ids_new, partition_name=default_tag)
connect.insert(id_collection, default_entities, ids, partition_name=default_tag)
connect.insert(collection_new, entities_new, ids_new, partition_name=default_tag)
connect.flush([id_collection])
connect.flush([collection_new])
res = connect.get_collection_stats(id_collection)
......
......@@ -107,7 +107,7 @@ class TestIndexBase:
expected: return search success
'''
connect.create_partition(collection, default_tag)
ids = connect.insert(collection, default_entities, partition_tag=default_tag)
ids = connect.insert(collection, default_entities, partition_name=default_tag)
connect.create_index(collection, field_name, get_simple_index)
if get_simple_index["index_type"] != "FLAT":
index = connect.describe_index(collection, "")
......@@ -123,7 +123,7 @@ class TestIndexBase:
expected: return search success
'''
connect.create_partition(collection, default_tag)
ids = connect.insert(collection, default_entities, partition_tag=default_tag)
ids = connect.insert(collection, default_entities, partition_name=default_tag)
connect.flush([collection])
connect.create_index(collection, field_name, get_simple_index)
if get_simple_index["index_type"] != "FLAT":
......@@ -311,7 +311,7 @@ class TestIndexBase:
expected: return search success
'''
connect.create_partition(collection, default_tag)
ids = connect.insert(collection, default_entities, partition_tag=default_tag)
ids = connect.insert(collection, default_entities, partition_name=default_tag)
get_simple_index["metric_type"] = "IP"
connect.create_index(collection, field_name, get_simple_index)
if get_simple_index["index_type"] != "FLAT":
......@@ -328,7 +328,7 @@ class TestIndexBase:
expected: return search success
'''
connect.create_partition(collection, default_tag)
ids = connect.insert(collection, default_entities, partition_tag=default_tag)
ids = connect.insert(collection, default_entities, partition_name=default_tag)
connect.flush([collection])
get_simple_index["metric_type"] = "IP"
connect.create_index(collection, field_name, get_simple_index)
......@@ -669,7 +669,7 @@ class TestIndexBinary:
expected: return search success
'''
connect.create_partition(binary_collection, default_tag)
ids = connect.insert(binary_collection, default_binary_entities, partition_tag=default_tag)
ids = connect.insert(binary_collection, default_binary_entities, partition_name=default_tag)
connect.create_index(binary_collection, binary_field_name, get_jaccard_index)
binary_index = connect.describe_index(binary_collection, "")
create_target_index(get_jaccard_index, binary_field_name)
......@@ -740,7 +740,7 @@ class TestIndexBinary:
expected: return code 0, and index instructure
'''
connect.create_partition(binary_collection, default_tag)
ids = connect.insert(binary_collection, default_binary_entities, partition_tag=default_tag)
ids = connect.insert(binary_collection, default_binary_entities, partition_name=default_tag)
connect.flush([binary_collection])
connect.create_index(binary_collection, binary_field_name, get_jaccard_index)
stats = connect.get_collection_stats(binary_collection)
......@@ -781,7 +781,7 @@ class TestIndexBinary:
expected: return code 0, and default index param
'''
connect.create_partition(binary_collection, default_tag)
ids = connect.insert(binary_collection, default_binary_entities, partition_tag=default_tag)
ids = connect.insert(binary_collection, default_binary_entities, partition_name=default_tag)
connect.flush([binary_collection])
connect.create_index(binary_collection, binary_field_name, get_jaccard_index)
connect.drop_index(binary_collection, binary_field_name)
......
......@@ -88,7 +88,7 @@ class TestCreateBase:
assert message == "create partition failed: can't find collection: %s" % collection_name
@pytest.mark.tags(CaseLabel.tags_smoke)
def test_create_partition_tag_name_None(self, connect, collection):
def test_create_partition_name_name_None(self, connect, collection):
'''
target: test create partition, tag name set None, check status returned
method: call function: create_partition
......@@ -98,10 +98,10 @@ class TestCreateBase:
try:
connect.create_partition(collection, tag_name)
except Exception as e:
assert e.args[0] == "`partition_tag` value None is illegal"
assert e.args[0] == "`partition_name` value None is illegal"
@pytest.mark.tags(CaseLabel.tags_smoke)
def test_create_different_partition_tags(self, connect, collection):
def test_create_different_partition_names(self, connect, collection):
'''
target: test create partition twice with different names
method: call function: create_partition, and again
......@@ -133,7 +133,7 @@ class TestCreateBase:
'''
connect.create_partition(id_collection, default_tag)
ids = [i for i in range(default_nb)]
insert_ids = connect.insert(id_collection, default_entities, ids, partition_tag=default_tag)
insert_ids = connect.insert(id_collection, default_entities, ids, partition_name=default_tag)
assert len(insert_ids) == len(ids)
@pytest.mark.tags(CaseLabel.tags_smoke)
......@@ -147,7 +147,7 @@ class TestCreateBase:
connect.create_partition(collection, default_tag)
ids = [i for i in range(default_nb)]
try:
connect.insert(collection, default_entities, ids, partition_tag=tag_new)
connect.insert(collection, default_entities, ids, partition_name=tag_new)
except Exception as e:
code = getattr(e, 'code', "The exception does not contain the field of code.")
assert code == 1
......@@ -163,10 +163,10 @@ class TestCreateBase:
'''
connect.create_partition(id_collection, default_tag)
ids = [i for i in range(default_nb)]
insert_ids = connect.insert(id_collection, default_entities, ids, partition_tag=default_tag)
insert_ids = connect.insert(id_collection, default_entities, ids, partition_name=default_tag)
assert len(insert_ids) == default_nb
ids = [(i+default_nb) for i in range(default_nb)]
new_insert_ids = connect.insert(id_collection, default_entities, ids, partition_tag=default_tag)
new_insert_ids = connect.insert(id_collection, default_entities, ids, partition_name=default_tag)
assert len(new_insert_ids) == default_nb
connect.flush([id_collection])
res = connect.get_collection_stats(id_collection)
......@@ -183,9 +183,9 @@ class TestCreateBase:
collection_new = gen_unique_str()
connect.create_collection(collection_new, default_fields)
connect.create_partition(collection_new, default_tag)
ids = connect.insert(collection, default_entities, partition_tag=default_tag)
ids = connect.insert(collection, default_entities, partition_name=default_tag)
assert len(ids) == default_nb
ids_new = connect.insert(collection_new, default_entities, partition_tag=default_tag)
ids_new = connect.insert(collection_new, default_entities, partition_name=default_tag)
assert len(ids_new) == default_nb
connect.flush([collection, collection_new])
res = connect.get_collection_stats(collection)
......@@ -275,7 +275,7 @@ class TestHasBase:
assert res
@pytest.mark.tags(CaseLabel.tags_smoke)
def test_has_partition_tag_not_existed(self, connect, collection):
def test_has_partition_name_not_existed(self, connect, collection):
'''
target: test has_partition, check status and result
method: then call function: has_partition, with tag not existed
......@@ -336,7 +336,7 @@ class TestDropBase:
assert default_tag not in res2
@pytest.mark.tags(CaseLabel.tags_smoke)
def test_drop_partition_tag_not_existed(self, connect, collection):
def test_drop_partition_name_not_existed(self, connect, collection):
'''
target: test drop partition, but tag not existed
method: create partitions first, then call function: drop_partition
......@@ -353,7 +353,7 @@ class TestDropBase:
assert message == "DropPartition failed: partition %s does not exist" % new_tag
@pytest.mark.tags(CaseLabel.tags_smoke)
def test_drop_partition_tag_not_existed_A(self, connect, collection):
def test_drop_partition_name_not_existed_A(self, connect, collection):
'''
target: test drop partition, but collection not existed
method: create partitions first, then call function: drop_partition
......@@ -468,7 +468,7 @@ class TestNewCase(object):
expected: status not ok
'''
try:
connect.drop_partition(collection, partition_tag='_default')
connect.drop_partition(collection, partition_name='_default')
except Exception as e:
code = getattr(e, 'code', "The exception does not contain the field of code.")
assert code == 1
......@@ -486,7 +486,7 @@ class TestNewCase(object):
'''
connect.create_partition(collection, default_tag)
try:
connect.drop_partition(collection, partition_tag='_default')
connect.drop_partition(collection, partition_name='_default')
except Exception as e:
code = getattr(e, 'code', "The exception does not contain the field of code.")
assert code == 1
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册