未验证 提交 068cc143 编写于 作者: B binbin 提交者: GitHub

Merge collection test cases from pymilvus and orm (#7414)

Signed-off-by: NBinbin Lv <binbin.lv@zilliz.com>
上级 bd3056fc
......@@ -94,7 +94,7 @@ To run E2E tests, use these command:
```shell
MILVUS_SERVICE_IP=$(docker inspect -f '{{range .NetworkSettings.Networks}}{{.IPAddress}}{{end}}' $(docker-compose ps -q builder))
cd tests/docker
docker-compose run --rm pytest /bin/bash -c "pytest --ip ${MILVUS_SERVICE_IP}"
docker-compose run --rm pytest /bin/bash -c "pytest --host ${MILVUS_SERVICE_IP}"
```
......
......@@ -70,7 +70,7 @@ pipeline {
--install-extra-arg "--set etcd.enabled=false --set externalEtcd.enabled=true --set externalEtcd.endpoints={\$KRTE_POD_IP:2379}" \
--skip-export-logs \
--skip-cleanup \
--test-extra-arg "-x --tags smoke L0 L1" \
--test-extra-arg "-x --tags L0 L1" \
--test-timeout ${e2e_timeout_seconds}
"""
// } else if ("${MILVUS_CLIENT}" == "pymilvus-orm") {
......
......@@ -79,7 +79,7 @@ pipeline {
--install-extra-arg "--set etcd.enabled=false --set externalEtcd.enabled=true --set externalEtcd.endpoints={\$KRTE_POD_IP:2379}" \
--skip-export-logs \
--skip-cleanup \
--test-extra-arg "--tags smoke L0 L1 L2" \
--test-extra-arg "--tags L0 L1 L2" \
--test-timeout ${e2e_timeout_seconds}
"""
// } else if ("${MILVUS_CLIENT}" == "pymilvus-orm") {
......
......@@ -18,7 +18,6 @@ delete_timeout = 60
def pytest_addoption(parser):
parser.addoption("--ip", action="store", default="localhost", help="service's ip")
parser.addoption("--host", action="store", default="localhost", help="service's ip")
parser.addoption("--service", action="store", default="", help="service address")
parser.addoption("--port", action="store", default=19530, help="service's port")
......@@ -39,14 +38,8 @@ def pytest_addoption(parser):
parser.addoption('--term_expr', action='store', default="term_expr", help="expr of query quest")
parser.addoption('--check_content', action='store', default="check_content", help="content of check")
parser.addoption('--field_name', action='store', default="field_name", help="field_name of index")
parser.addoption('--dry-run', action='store_true', default=False)
parser.addoption("--http-port", action="store", default=19121)
@pytest.fixture
def ip(request):
return request.config.getoption("--ip")
@pytest.fixture
def host(request):
......@@ -233,7 +226,7 @@ def pytest_runtest_setup(item):
def pytest_runtestloop(session):
if session.config.getoption('--dry-run'):
if session.config.getoption('--dry_run'):
total_num = 0
file_num = 0
tags_num = 0
......@@ -254,13 +247,13 @@ def pytest_runtestloop(session):
def check_server_connection(request):
ip = request.config.getoption("--ip")
host = request.config.getoption("--host")
port = request.config.getoption("--port")
connected = True
if ip and (ip not in ['localhost', '127.0.0.1']):
if host and (host not in ['localhost', '127.0.0.1']):
try:
socket.getaddrinfo(ip, port, 0, 0, socket.IPPROTO_TCP)
socket.getaddrinfo(host, port, 0, 0, socket.IPPROTO_TCP)
except Exception as e:
print("Socket connnet failed: %s" % str(e))
connected = False
......@@ -297,15 +290,15 @@ def check_server_connection(request):
@pytest.fixture(scope="module")
def connect(request):
ip = request.config.getoption("--ip")
host = request.config.getoption("--host")
service_name = request.config.getoption("--service")
port = request.config.getoption("--port")
http_port = request.config.getoption("--http-port")
http_port = request.config.getoption("--http_port")
handler = request.config.getoption("--handler")
if handler == "HTTP":
port = http_port
try:
milvus = get_milvus(host=ip, port=port, handler=handler)
milvus = get_milvus(host=host, port=port, handler=handler)
# reset_build_index_threshold(milvus)
except Exception as e:
logging.getLogger().error(str(e))
......@@ -322,40 +315,40 @@ def connect(request):
@pytest.fixture(scope="module")
def dis_connect(request):
ip = request.config.getoption("--ip")
host = request.config.getoption("--host")
service_name = request.config.getoption("--service")
port = request.config.getoption("--port")
http_port = request.config.getoption("--http-port")
http_port = request.config.getoption("--http_port")
handler = request.config.getoption("--handler")
if handler == "HTTP":
port = http_port
milvus = get_milvus(host=ip, port=port, handler=handler)
milvus = get_milvus(host=host, port=port, handler=handler)
milvus.close()
return milvus
@pytest.fixture(scope="module")
def args(request):
ip = request.config.getoption("--ip")
host = request.config.getoption("--host")
service_name = request.config.getoption("--service")
port = request.config.getoption("--port")
http_port = request.config.getoption("--http-port")
http_port = request.config.getoption("--http_port")
handler = request.config.getoption("--handler")
if handler == "HTTP":
port = http_port
args = {"ip": ip, "port": port, "handler": handler, "service_name": service_name}
args = {"ip": host, "port": port, "handler": handler, "service_name": service_name}
return args
@pytest.fixture(scope="module")
def milvus(request):
ip = request.config.getoption("--ip")
host = request.config.getoption("--host")
port = request.config.getoption("--port")
http_port = request.config.getoption("--http-port")
http_port = request.config.getoption("--http_port")
handler = request.config.getoption("--handler")
if handler == "HTTP":
port = http_port
return get_milvus(host=ip, port=port, handler=handler)
return get_milvus(host=host, port=port, handler=handler)
@pytest.fixture(scope="function")
......
[pytest]
addopts = --ip localhost --host localhost --html=/tmp/ci_logs/report.html --self-contained-html -v
addopts = --host localhost --html=/tmp/ci_logs/report.html --self-contained-html -v
# -;addopts = --host 172.28.255.155 --html=/tmp/report.html
# python3 -W ignore -m pytest
......
......@@ -12,7 +12,7 @@ pytest-print==0.2.1
pytest-level==0.1.1
pytest-xdist==2.2.1
# pytest-parallel
pymilvus==2.0.0rc5.dev29
pymilvus==2.0.0rc6.dev3
pytest-rerunfailures==9.1.1
git+https://github.com/Projectplace/pytest-tags
ndg-httpsclient
......
import pdb
import pytest
import logging
import itertools
from time import sleep
from multiprocessing import Process
from utils.utils import *
uid = "collection_logic"
def create_collection(connect, **params):
connect.create_collection(params["collection_name"], const.default_fields)
def search_collection(connect, **params):
status, result = connect.search(
params["collection_name"],
params["top_k"],
params["query_vectors"],
params={"nprobe": params["nprobe"]})
return status
def load_collection(connect, **params):
connect.load_collection(params["collection_name"])
def has(connect, **params):
status, result = connect.has_collection(params["collection_name"])
return status
def show(connect, **params):
status, result = connect.list_collections()
return status
def delete(connect, **params):
status = connect.drop_collection(params["collection_name"])
return status
def describe(connect, **params):
status, result = connect.get_collection_info(params["collection_name"])
return status
def rowcount(connect, **params):
status, result = connect.count_entities(params["collection_name"])
return status
def create_index(connect, **params):
status = connect.create_index(params["collection_name"], params["index_type"], params["index_param"])
return status
func_map = {
# 0:has,
1:show,
10:create_collection,
11:describe,
12:rowcount,
13:search_collection,
14:load_collection,
15:create_index,
30:delete
}
def gen_sequence():
raw_seq = func_map.keys()
result = itertools.permutations(raw_seq)
for x in result:
yield x
class TestCollectionLogic(object):
@pytest.mark.parametrize("logic_seq", gen_sequence())
@pytest.mark.tags(CaseLabel.L2)
def _test_logic(self, connect, logic_seq, args):
if args["handler"] == "HTTP":
pytest.skip("Skip in http mode")
if self.is_right(logic_seq):
self.execute(logic_seq, connect)
else:
self.execute_with_error(logic_seq, connect)
self.tear_down(connect)
def is_right(self, seq):
if sorted(seq) == seq:
return True
not_created = True
has_deleted = False
for i in range(len(seq)):
if seq[i] > 10 and not_created:
return False
elif seq [i] > 10 and has_deleted:
return False
elif seq[i] == 10:
not_created = False
elif seq[i] == 30:
has_deleted = True
return True
def execute(self, logic_seq, connect):
basic_params = self.gen_params()
for i in range(len(logic_seq)):
# logging.getLogger().info(logic_seq[i])
f = func_map[logic_seq[i]]
status = f(connect, **basic_params)
assert status.OK()
def execute_with_error(self, logic_seq, connect):
basic_params = self.gen_params()
error_flag = False
for i in range(len(logic_seq)):
f = func_map[logic_seq[i]]
status = f(connect, **basic_params)
if not status.OK():
# logging.getLogger().info(logic_seq[i])
error_flag = True
break
assert error_flag == True
def tear_down(self, connect):
names = connect.list_collections()[1]
for name in names:
connect.drop_collection(name)
def gen_params(self):
collection_name = gen_unique_str(uid)
top_k = 1
vectors = gen_vectors(2, dim)
param = {'collection_name': collection_name,
'dimension': dim,
'metric_type': "L2",
'nprobe': 1,
'top_k': top_k,
'index_type': "IVF_SQ8",
'index_param': {
'nlist': 16384
},
'query_vectors': vectors}
return param
import time
import pdb
import threading
import logging
from multiprocessing import Pool, Process
import pytest
from utils.utils import *
from common.constants import *
uid = "get_collection_stats"
class TestGetCollectionStats:
"""
******************************************************************
The following cases are used to test `collection_stats` function
******************************************************************
"""
@pytest.fixture(
scope="function",
params=gen_invalid_strs()
)
def get_invalid_collection_name(self, request):
yield request.param
@pytest.fixture(
scope="function",
params=gen_simple_index()
)
def get_simple_index(self, request, connect):
# if str(connect._cmd("mode")) == "CPU":
# if request.param["index_type"] in index_cpu_not_support():
# pytest.skip("CPU not support index_type: ivf_sq8h")
return request.param
@pytest.fixture(
scope="function",
params=gen_binary_index()
)
def get_jaccard_index(self, request, connect):
logging.getLogger().info(request.param)
if request.param["index_type"] in binary_support():
request.param["metric_type"] = "JACCARD"
return request.param
else:
pytest.skip("Skip index Temporary")
@pytest.fixture(
scope="function",
params=[
1,
1000,
2001
],
)
def insert_count(self, request):
yield request.param
@pytest.mark.tags(CaseLabel.tags_smoke)
def test_get_collection_stats_name_not_existed(self, connect, collection):
'''
target: get collection stats where collection name does not exist
method: call collection_stats with a random collection_name, which is not in db
expected: status not ok
'''
collection_name = gen_unique_str(uid)
with pytest.raises(Exception) as e:
connect.get_collection_stats(collection_name)
@pytest.mark.tags(CaseLabel.L2)
def test_get_collection_stats_name_invalid(self, connect, get_invalid_collection_name):
'''
target: get collection stats where collection name is invalid
method: call collection_stats with invalid collection_name
expected: status not ok
'''
collection_name = get_invalid_collection_name
with pytest.raises(Exception) as e:
connect.get_collection_stats(collection_name)
@pytest.mark.tags(CaseLabel.tags_smoke)
def test_get_collection_stats_empty(self, connect, collection):
'''
target: get collection stats where no entity in collection
method: call collection_stats in empty collection
expected: segment = []
'''
stats = connect.get_collection_stats(collection)
connect.flush([collection])
assert stats[row_count] == 0
@pytest.mark.tags(CaseLabel.L2)
def test_get_collection_stats_without_connection(self, collection, dis_connect):
'''
target: test count_entities, without connection
method: calling count_entities with correct params, with a disconnected instance
expected: count_entities raise exception
'''
with pytest.raises(Exception) as e:
dis_connect.get_collection_stats(collection)
@pytest.mark.tags(CaseLabel.tags_smoke)
def test_get_collection_stats_batch(self, connect, collection):
'''
target: get row count with collection_stats
method: add entities, check count in collection info
expected: count as expected
'''
result = connect.insert(collection, default_entities)
assert len(result.primary_keys) == default_nb
connect.flush([collection])
stats = connect.get_collection_stats(collection)
assert int(stats[row_count]) == default_nb
@pytest.mark.tags(CaseLabel.tags_smoke)
def test_get_collection_stats_single(self, connect, collection):
'''
target: get row count with collection_stats
method: add entity one by one, check count in collection info
expected: count as expected
'''
nb = 10
for i in range(nb):
connect.insert(collection, default_entity)
connect.flush([collection])
stats = connect.get_collection_stats(collection)
assert stats[row_count] == nb
@pytest.mark.tags(CaseLabel.L2)
def _test_get_collection_stats_after_delete(self, connect, collection):
'''
target: get row count with collection_stats
method: add and delete entities, check count in collection info
expected: status ok, count as expected
'''
ids = connect.insert(collection, default_entities)
status = connect.flush([collection])
delete_ids = [ids[0], ids[-1]]
connect.delete_entity_by_id(collection, delete_ids)
connect.flush([collection])
stats = connect.get_collection_stats(collection)
assert stats["row_count"] == default_nb - 2
assert stats["partitions"][0]["row_count"] == default_nb - 2
assert stats["partitions"][0]["segments"][0]["data_size"] > 0
# TODO: enable
@pytest.mark.tags(CaseLabel.L2)
def _test_get_collection_stats_after_compact_parts(self, connect, collection):
'''
target: get row count with collection_stats
method: add and delete entities, and compact collection, check count in collection info
expected: status ok, count as expected
'''
delete_length = 1000
ids = connect.insert(collection, default_entities)
status = connect.flush([collection])
delete_ids = ids[:delete_length]
connect.delete_entity_by_id(collection, delete_ids)
connect.flush([collection])
stats = connect.get_collection_stats(collection)
logging.getLogger().info(stats)
assert stats["row_count"] == default_nb - delete_length
compact_before = stats["partitions"][0]["segments"][0]["data_size"]
connect.compact(collection)
stats = connect.get_collection_stats(collection)
logging.getLogger().info(stats)
compact_after = stats["partitions"][0]["segments"][0]["data_size"]
assert compact_before == compact_after
@pytest.mark.tags(CaseLabel.L2)
def _test_get_collection_stats_after_compact_delete_one(self, connect, collection):
'''
target: get row count with collection_stats
method: add and delete one entity, and compact collection, check count in collection info
expected: status ok, count as expected
'''
ids = connect.insert(collection, default_entities)
status = connect.flush([collection])
delete_ids = ids[:1]
connect.delete_entity_by_id(collection, delete_ids)
connect.flush([collection])
stats = connect.get_collection_stats(collection)
logging.getLogger().info(stats)
compact_before = stats["partitions"][0]["row_count"]
connect.compact(collection)
stats = connect.get_collection_stats(collection)
logging.getLogger().info(stats)
compact_after = stats["partitions"][0]["row_count"]
# pdb.set_trace()
assert compact_before == compact_after
@pytest.mark.tags(CaseLabel.L2)
def test_get_collection_stats_partition(self, connect, collection):
'''
target: get partition info in a collection
method: call collection_stats after partition created and check partition_stats
expected: status ok, vectors added to partition
'''
connect.create_partition(collection, default_tag)
result = connect.insert(collection, default_entities, partition_name=default_tag)
assert len(result.primary_keys) == default_nb
connect.flush([collection])
stats = connect.get_collection_stats(collection)
assert stats[row_count] == default_nb
@pytest.mark.tags(CaseLabel.tags_smoke)
def test_get_collection_stats_partitions(self, connect, collection):
'''
target: get partition info in a collection
method: create two partitions, add vectors in one of the partitions, call collection_stats and check
expected: status ok, vectors added to one partition but not the other
'''
new_tag = "new_tag"
connect.create_partition(collection, default_tag)
connect.create_partition(collection, new_tag)
connect.insert(collection, default_entities, partition_name=default_tag)
connect.flush([collection])
stats = connect.get_collection_stats(collection)
assert stats[row_count] == default_nb
connect.insert(collection, default_entities, partition_name=new_tag)
connect.flush([collection])
stats = connect.get_collection_stats(collection)
assert stats[row_count] == default_nb * 2
connect.insert(collection, default_entities)
connect.flush([collection])
stats = connect.get_collection_stats(collection)
assert stats[row_count] == default_nb * 3
@pytest.mark.tags(CaseLabel.L2)
def test_get_collection_stats_partitions_A(self, connect, collection, insert_count):
'''
target: test collection rows_count is correct or not
method: create collection, create partitions and add entities in it,
assert the value returned by count_entities method is equal to length of entities
expected: the count is equal to the length of entities
'''
new_tag = "new_tag"
entities = gen_entities(insert_count)
connect.create_partition(collection, default_tag)
connect.create_partition(collection, new_tag)
connect.insert(collection, entities)
connect.flush([collection])
stats = connect.get_collection_stats(collection)
assert stats[row_count] == insert_count
@pytest.mark.tags(CaseLabel.L2)
def test_get_collection_stats_partitions_B(self, connect, collection, insert_count):
'''
target: test collection rows_count is correct or not
method: create collection, create partitions and add entities in one of the partitions,
assert the value returned by count_entities method is equal to length of entities
expected: the count is equal to the length of entities
'''
new_tag = "new_tag"
entities = gen_entities(insert_count)
connect.create_partition(collection, default_tag)
connect.create_partition(collection, new_tag)
connect.insert(collection, entities, partition_name=default_tag)
connect.flush([collection])
stats = connect.get_collection_stats(collection)
assert stats[row_count] == insert_count
@pytest.mark.tags(CaseLabel.tags_smoke)
def test_get_collection_stats_partitions_C(self, connect, collection, insert_count):
'''
target: test collection rows_count is correct or not
method: create collection, create partitions and add entities in one of the partitions,
assert the value returned by count_entities method is equal to length of entities
expected: the count is equal to the length of vectors
'''
new_tag = "new_tag"
entities = gen_entities(insert_count)
connect.create_partition(collection, default_tag)
connect.create_partition(collection, new_tag)
connect.insert(collection, entities)
connect.insert(collection, entities, partition_name=default_tag)
connect.flush([collection])
stats = connect.get_collection_stats(collection)
assert stats[row_count] == insert_count*2
@pytest.mark.tags(CaseLabel.L2)
def test_get_collection_stats_partitions_D(self, connect, collection, insert_count):
'''
target: test collection rows_count is correct or not
method: create collection, create partitions and add entities in one of the partitions,
assert the value returned by count_entities method is equal to length of entities
expected: the collection count is equal to the length of entities
'''
new_tag = "new_tag"
entities = gen_entities(insert_count)
connect.create_partition(collection, default_tag)
connect.create_partition(collection, new_tag)
connect.insert(collection, entities, partition_name=default_tag)
connect.insert(collection, entities, partition_name=new_tag)
connect.flush([collection])
stats = connect.get_collection_stats(collection)
assert stats[row_count] == insert_count*2
# TODO: assert metric type in stats response
@pytest.mark.tags(CaseLabel.tags_smoke)
def test_get_collection_stats_after_index_created(self, connect, collection, get_simple_index):
'''
target: test collection info after index created
method: create collection, add vectors, create index and call collection_stats
expected: status ok, index created and shown in segments
'''
connect.insert(collection, default_entities)
connect.flush([collection])
connect.create_index(collection, default_float_vec_field_name, get_simple_index)
stats = connect.get_collection_stats(collection)
assert stats[row_count] == default_nb
# TODO: assert metric type in stats response
@pytest.mark.tags(CaseLabel.L2)
def test_get_collection_stats_after_index_created_ip(self, connect, collection, get_simple_index):
'''
target: test collection info after index created
method: create collection, add vectors, create index and call collection_stats
expected: status ok, index created and shown in segments
'''
get_simple_index["metric_type"] = "IP"
result = connect.insert(collection, default_entities)
assert len(result.primary_keys) == default_nb
connect.flush([collection])
get_simple_index.update({"metric_type": "IP"})
connect.create_index(collection, default_float_vec_field_name, get_simple_index)
stats = connect.get_collection_stats(collection)
assert stats[row_count] == default_nb
# TODO: assert metric type in stats response
@pytest.mark.tags(CaseLabel.L2)
def test_get_collection_stats_after_index_created_jac(self, connect, binary_collection, get_jaccard_index):
'''
target: test collection info after index created
method: create collection, add binary entities, create index and call collection_stats
expected: status ok, index created and shown in segments
'''
ids = connect.insert(binary_collection, default_binary_entities)
connect.flush([binary_collection])
connect.create_index(binary_collection, default_binary_vec_field_name, get_jaccard_index)
stats = connect.get_collection_stats(binary_collection)
assert stats[row_count] == default_nb
@pytest.mark.tags(CaseLabel.tags_smoke)
def test_get_collection_stats_after_create_different_index(self, connect, collection):
'''
target: test collection info after index created repeatedly
method: create collection, add vectors, create index and call collection_stats multiple times
expected: status ok, index info shown in segments
'''
result = connect.insert(collection, default_entities)
connect.flush([collection])
for index_type in ["IVF_FLAT", "IVF_SQ8"]:
connect.create_index(collection, default_float_vec_field_name,
{"index_type": index_type, "params": {"nlist": 1024}, "metric_type": "L2"})
stats = connect.get_collection_stats(collection)
assert stats[row_count] == default_nb
@pytest.mark.tags(CaseLabel.tags_smoke)
def test_collection_count_multi_collections(self, connect):
'''
target: test collection rows_count is correct or not with multiple collections of L2
method: create collection and add entities in it,
assert the value returned by count_entities method is equal to length of entities
expected: row count in segments
'''
collection_list = []
collection_num = 10
for i in range(collection_num):
collection_name = gen_unique_str(uid)
collection_list.append(collection_name)
connect.create_collection(collection_name, default_fields)
result = connect.insert(collection_name, default_entities)
connect.flush(collection_list)
for i in range(collection_num):
stats = connect.get_collection_stats(collection_list[i])
assert stats[row_count] == default_nb
connect.drop_collection(collection_list[i])
@pytest.mark.tags(CaseLabel.L2)
def test_collection_count_multi_collections_indexed(self, connect):
'''
target: test collection rows_count is correct or not with multiple collections of L2
method: create collection and add entities in it,
assert the value returned by count_entities method is equal to length of entities
expected: row count in segments
'''
collection_list = []
collection_num = 10
for i in range(collection_num):
collection_name = gen_unique_str(uid)
collection_list.append(collection_name)
connect.create_collection(collection_name, default_fields)
res = connect.insert(collection_name, default_entities)
connect.flush(collection_list)
index_1 = {"index_type": "IVF_SQ8", "params": {"nlist": 1024}, "metric_type": "L2"}
index_2 = {"index_type": "IVF_FLAT", "params": {"nlist": 1024}, "metric_type": "L2"}
if i % 2:
connect.create_index(collection_name, default_float_vec_field_name, index_1)
else:
connect.create_index(collection_name, default_float_vec_field_name, index_2)
for i in range(collection_num):
stats = connect.get_collection_stats(collection_list[i])
assert stats[row_count] == default_nb
index = connect.describe_index(collection_list[i], "")
if i % 2:
create_target_index(index_1, default_float_vec_field_name)
assert index == index_1
else:
create_target_index(index_2, default_float_vec_field_name)
assert index == index_2
# break
connect.drop_collection(collection_list[i])
import pdb
import copy
import logging
import itertools
import time
import threading
from multiprocessing import Process
import sklearn.preprocessing
import pytest
from utils.utils import *
from common.constants import *
uid = "create_collection"
class TestCreateCollection:
"""
******************************************************************
The following cases are used to test `create_collection` function
******************************************************************
"""
@pytest.fixture(
scope="function",
params=gen_single_filter_fields()
)
def get_filter_field(self, request):
yield request.param
@pytest.fixture(
scope="function",
params=gen_single_vector_fields()
)
def get_vector_field(self, request):
yield request.param
@pytest.fixture(
scope="function",
params=gen_segment_row_limits()
)
def get_segment_row_limit(self, request):
yield request.param
@pytest.mark.tags(CaseLabel.tags_smoke)
def test_create_collection_fields(self, connect, get_filter_field, get_vector_field):
'''
target: test create normal collection with different fields
method: create collection with diff fields: metric/field_type/...
expected: no exception raised
'''
filter_field = get_filter_field
logging.getLogger().info(filter_field)
vector_field = get_vector_field
collection_name = gen_unique_str(uid)
fields = {
"fields": [gen_primary_field(), filter_field, vector_field],
# "segment_row_limit": default_segment_row_limit
}
logging.getLogger().info(fields)
connect.create_collection(collection_name, fields)
assert connect.has_collection(collection_name)
@pytest.mark.tags(CaseLabel.L2)
def _test_create_collection_segment_row_limit(self, connect, get_segment_row_limit):
'''
target: test create normal collection with different fields
method: create collection with diff segment_row_limit
expected: no exception raised
'''
collection_name = gen_unique_str(uid)
fields = copy.deepcopy(default_fields)
# fields["segment_row_limit"] = get_segment_row_limit
connect.create_collection(collection_name, fields)
assert connect.has_collection(collection_name)
@pytest.mark.tags(CaseLabel.tags_smoke)
def test_create_collection_after_insert(self, connect, collection):
'''
target: test insert vector, then create collection again
method: insert vector and create collection
expected: error raised
'''
# pdb.set_trace()
connect.insert(collection, default_entity)
try:
connect.create_collection(collection, default_fields)
except Exception as e:
code = getattr(e, 'code', "The exception does not contain the field of code.")
assert code == 1
message = getattr(e, 'message', "The exception does not contain the field of message.")
assert message == "Create collection failed: meta table add collection failed,error = collection %s exist" % collection
@pytest.mark.tags(CaseLabel.tags_smoke)
def test_create_collection_after_insert_flush(self, connect, collection):
'''
target: test insert vector, then create collection again
method: insert vector and create collection
expected: error raised
'''
connect.insert(collection, default_entity)
# connect.flush([collection])
try:
connect.create_collection(collection, default_fields)
except Exception as e:
code = getattr(e, 'code', "The exception does not contain the field of code.")
assert code == 1
message = getattr(e, 'message', "The exception does not contain the field of message.")
assert message == "Create collection failed: meta table add collection failed,error = collection %s exist" % collection
# TODO: assert exception
@pytest.mark.tags(CaseLabel.L2)
def test_create_collection_without_connection(self, dis_connect):
'''
target: test create collection, without connection
method: create collection with correct params, with a disconnected instance
expected: error raised
'''
collection_name = gen_unique_str(uid)
with pytest.raises(Exception) as e:
dis_connect.create_collection(collection_name, default_fields)
@pytest.mark.tags(CaseLabel.tags_smoke)
def test_create_collection_existed(self, connect):
'''
target: test create collection but the collection name have already existed
method: create collection with the same collection_name
expected: error raised
'''
collection_name = gen_unique_str(uid)
connect.create_collection(collection_name, default_fields)
try:
connect.create_collection(collection_name, default_fields)
except Exception as e:
code = getattr(e, 'code', "The exception does not contain the field of code.")
assert code == 1
message = getattr(e, 'message', "The exception does not contain the field of message.")
assert message == "Create collection failed: meta table add collection failed,error = collection %s exist" % collection_name
@pytest.mark.tags(CaseLabel.tags_smoke)
def test_create_after_drop_collection(self, connect, collection):
'''
target: create with the same collection name after collection dropped
method: delete, then create
expected: create success
'''
connect.drop_collection(collection)
time.sleep(2)
connect.create_collection(collection, default_fields)
@pytest.mark.tags(CaseLabel.L2)
def test_create_collection_multithread(self, connect):
'''
target: test create collection with multithread
method: create collection using multithread,
expected: collections are created
'''
threads_num = 8
threads = []
collection_names = []
def create():
collection_name = gen_unique_str(uid)
collection_names.append(collection_name)
connect.create_collection(collection_name, default_fields)
for i in range(threads_num):
t = MyThread(target=create, args=())
threads.append(t)
t.start()
time.sleep(0.2)
for t in threads:
t.join()
for item in collection_names:
assert item in connect.list_collections()
connect.drop_collection(item)
class TestCreateCollectionInvalid(object):
"""
Test creating collections with invalid params
"""
@pytest.fixture(
scope="function",
params=gen_invalid_metric_types()
)
def get_metric_type(self, request):
yield request.param
@pytest.fixture(
scope="function",
params=gen_invalid_ints()
)
def get_segment_row_limit(self, request):
yield request.param
@pytest.fixture(
scope="function",
params=gen_invalid_ints()
)
def get_dim(self, request):
yield request.param
@pytest.fixture(
scope="function",
params=gen_invalid_strs()
)
def get_invalid_string(self, request):
yield request.param
@pytest.fixture(
scope="function",
params=gen_invalid_field_types()
)
def get_field_type(self, request):
yield request.param
@pytest.mark.tags(CaseLabel.L2)
def _test_create_collection_with_invalid_segment_row_limit(self, connect, get_segment_row_limit):
collection_name = gen_unique_str()
fields = copy.deepcopy(default_fields)
fields["segment_row_limit"] = get_segment_row_limit
with pytest.raises(Exception) as e:
connect.create_collection(collection_name, fields)
@pytest.mark.tags(CaseLabel.L2)
def test_create_collection_with_invalid_dimension(self, connect, get_dim):
dimension = get_dim
collection_name = gen_unique_str()
fields = copy.deepcopy(default_fields)
fields["fields"][-1]["params"]["dim"] = dimension
with pytest.raises(Exception) as e:
connect.create_collection(collection_name, fields)
@pytest.mark.tags(CaseLabel.L2)
def test_create_collection_with_invalid_collection_name(self, connect, get_invalid_string):
collection_name = get_invalid_string
with pytest.raises(Exception) as e:
connect.create_collection(collection_name, default_fields)
@pytest.mark.tags(CaseLabel.L2)
@pytest.mark.parametrize("collection_name", ('', None))
def test_create_collection_with_empty_or_None_collection_name(self, connect, collection_name):
# collection_name = ''
try:
connect.create_collection(collection_name, default_fields)
except Exception as e:
code = getattr(e, 'code', "The exception does not contain the field of code.")
assert code == 1
message = getattr(e, 'message', "The exception does not contain the field of message.")
assert message == "Collection name should not be empty"
@pytest.mark.tags(CaseLabel.L2)
def test_create_collection_no_dimension(self, connect):
'''
target: test create collection with no dimension params
method: create collection with correct params
expected: create status return ok
'''
collection_name = gen_unique_str(uid)
fields = copy.deepcopy(default_fields)
fields["fields"][-1]["params"].pop("dim")
try:
connect.create_collection(collection_name, fields)
except Exception as e:
code = getattr(e, 'code', "The exception does not contain the field of code.")
assert code == 1
message = getattr(e, 'message', "The exception does not contain the field of message.")
assert message == "dimension is not defined in field type params"
@pytest.mark.tags(CaseLabel.L2)
def _test_create_collection_no_segment_row_limit(self, connect):
'''
target: test create collection with no segment_row_limit params
method: create collection with correct params
expected: use default default_segment_row_limit
'''
collection_name = gen_unique_str(uid)
fields = copy.deepcopy(default_fields)
fields.pop("segment_row_limit")
connect.create_collection(collection_name, fields)
res = connect.get_collection_info(collection_name)
logging.getLogger().info(res)
assert res["segment_row_limit"] == default_server_segment_row_limit
# TODO: assert exception
@pytest.mark.tags(CaseLabel.L2)
def test_create_collection_limit_fields(self, connect):
collection_name = gen_unique_str(uid)
limit_num = 64
fields = copy.deepcopy(default_fields)
for i in range(limit_num):
field_name = gen_unique_str("field_name")
field = {"name": field_name, "type": DataType.INT64}
fields["fields"].append(field)
try:
connect.create_collection(collection_name, fields)
except Exception as e:
code = getattr(e, 'code', "The exception does not contain the field of code.")
assert code == 1
message = getattr(e, 'message', "The exception does not contain the field of message.")
assert message == "maximum field's number should be limited to 64"
# TODO: assert exception
@pytest.mark.tags(CaseLabel.L2)
def test_create_collection_invalid_field_name(self, connect, get_invalid_string):
collection_name = gen_unique_str(uid)
fields = copy.deepcopy(default_fields)
field_name = get_invalid_string
field = {"name": field_name, "type": DataType.INT64}
fields["fields"].append(field)
with pytest.raises(Exception) as e:
connect.create_collection(collection_name, fields)
# TODO: assert exception
@pytest.mark.tags(CaseLabel.L2)
def test_create_collection_invalid_field_type(self, connect, get_field_type):
collection_name = gen_unique_str(uid)
fields = copy.deepcopy(default_fields)
field_type = get_field_type
field = {"name": "test_field", "type": field_type}
fields["fields"].append(field)
with pytest.raises(Exception) as e:
connect.create_collection(collection_name, fields)
import pytest
import logging
import time
from utils.utils import *
from common.constants import *
uid = "describe_collection"
class TestDescribeCollection:
@pytest.fixture(
scope="function",
params=gen_single_filter_fields()
)
def get_filter_field(self, request):
yield request.param
@pytest.fixture(
scope="function",
params=gen_single_vector_fields()
)
def get_vector_field(self, request):
yield request.param
@pytest.fixture(
scope="function",
params=gen_simple_index()
)
def get_simple_index(self, request, connect):
logging.getLogger().info(request.param)
# if str(connect._cmd("mode")) == "CPU":
# if request.param["index_type"] in index_cpu_not_support():
# pytest.skip("sq8h not support in CPU mode")
return request.param
"""
******************************************************************
The following cases are used to test `describe_collection` function, no data in collection
******************************************************************
"""
@pytest.mark.tags(CaseLabel.tags_smoke)
def test_collection_fields(self, connect, get_filter_field, get_vector_field):
'''
target: test create normal collection with different fields, check info returned
method: create collection with diff fields: metric/field_type/..., calling `describe_collection`
expected: no exception raised, and value returned correct
'''
filter_field = get_filter_field
vector_field = get_vector_field
collection_name = gen_unique_str(uid)
fields = {
"fields": [gen_primary_field(), filter_field, vector_field],
# "segment_row_limit": default_segment_row_limit
}
connect.create_collection(collection_name, fields)
res = connect.describe_collection(collection_name)
# assert res['segment_row_limit'] == default_segment_row_limit
assert len(res["fields"]) == len(fields.get("fields"))
for field in res["fields"]:
if field["type"] == filter_field:
assert field["name"] == filter_field["name"]
elif field["type"] == vector_field:
assert field["name"] == vector_field["name"]
assert field["params"] == vector_field["params"]
@pytest.mark.tags(CaseLabel.tags_smoke)
def test_describe_collection_after_index_created(self, connect, collection, get_simple_index):
connect.create_index(collection, default_float_vec_field_name, get_simple_index)
if get_simple_index["index_type"] != "FLAT":
index = connect.describe_index(collection, "")
assert index["index_type"] == get_simple_index["index_type"]
assert index["metric_type"] == get_simple_index["metric_type"]
assert index["params"] == get_simple_index["params"]
@pytest.mark.tags(CaseLabel.L2)
def test_describe_collection_without_connection(self, collection, dis_connect):
'''
target: test get collection info, without connection
method: calling get collection info with correct params, with a disconnected instance
expected: get collection info raise exception
'''
with pytest.raises(Exception) as e:
dis_connect.describe_collection(collection)
@pytest.mark.tags(CaseLabel.tags_smoke)
def test_describe_collection_not_existed(self, connect):
'''
target: test if collection not created
method: random a collection name, create this collection then drop it,
assert the value returned by describe_collection method
expected: False
'''
collection_name = gen_unique_str(uid)
connect.create_collection(collection_name, default_fields)
connect.describe_collection(collection_name)
connect.drop_collection(collection_name)
try:
connect.describe_collection(collection_name)
except Exception as e:
code = getattr(e, 'code', "The exception does not contain the field of code.")
assert code == 1
message = getattr(e, 'message', "The exception does not contain the field of message.")
assert message == "describe collection failed: can't find collection: %s" % collection_name
@pytest.mark.tags(CaseLabel.L2)
def test_describe_collection_multithread(self, connect):
'''
target: test create collection with multithread
method: create collection using multithread,
expected: collections are created
'''
threads_num = 4
threads = []
collection_name = gen_unique_str(uid)
connect.create_collection(collection_name, default_fields)
def get_info():
connect.describe_collection(collection_name)
for i in range(threads_num):
t = MyThread(target=get_info)
threads.append(t)
t.start()
time.sleep(0.2)
for t in threads:
t.join()
"""
******************************************************************
The following cases are used to test `describe_collection` function, and insert data in collection
******************************************************************
"""
@pytest.mark.tags(CaseLabel.tags_smoke)
def test_describe_collection_fields_after_insert(self, connect, get_filter_field, get_vector_field):
'''
target: test create normal collection with different fields, check info returned
method: create collection with diff fields: metric/field_type/..., calling `describe_collection`
expected: no exception raised, and value returned correct
'''
filter_field = get_filter_field
vector_field = get_vector_field
collection_name = gen_unique_str(uid)
fields = {
"fields": [gen_primary_field(), filter_field, vector_field],
# "segment_row_limit": default_segment_row_limit
}
connect.create_collection(collection_name, fields)
entities = gen_entities_by_fields(fields["fields"], default_nb, vector_field["params"]["dim"])
res_ids = connect.insert(collection_name, entities)
connect.flush([collection_name])
res = connect.describe_collection(collection_name)
# assert res['segment_row_limit'] == default_segment_row_limit
assert len(res["fields"]) == len(fields.get("fields"))
for field in res["fields"]:
if field["type"] == filter_field:
assert field["name"] == filter_field["name"]
elif field["type"] == vector_field:
assert field["name"] == vector_field["name"]
assert field["params"] == vector_field["params"]
class TestDescribeCollectionInvalid(object):
"""
Test describe collection with invalid params
"""
@pytest.fixture(
scope="function",
params=gen_invalid_strs()
)
def get_collection_name(self, request):
yield request.param
@pytest.mark.tags(CaseLabel.L2)
def test_describe_collection_with_invalid_collection_name(self, connect, get_collection_name):
collection_name = get_collection_name
with pytest.raises(Exception) as e:
connect.describe_collection(collection_name)
@pytest.mark.tags(CaseLabel.L2)
@pytest.mark.parametrize("collection_name", ('', None))
def test_describe_collection_with_empty_or_None_collection_name(self, connect, collection_name):
with pytest.raises(Exception) as e:
connect.describe_collection(collection_name)
import pdb
import pytest
import logging
import itertools
from time import sleep
import threading
from multiprocessing import Process
from utils.utils import *
from common.constants import *
uid = "drop_collection"
class TestDropCollection:
"""
******************************************************************
The following cases are used to test `drop_collection` function
******************************************************************
"""
@pytest.mark.tags(CaseLabel.tags_smoke)
def test_drop_collection_A(self, connect, collection):
'''
target: test delete collection created with correct params
method: create collection and then delete,
assert the value returned by delete method
expected: status ok, and no collection in collections
'''
connect.drop_collection(collection)
time.sleep(2)
assert not connect.has_collection(collection)
@pytest.mark.tags(CaseLabel.L2)
def test_drop_collection_without_connection(self, collection, dis_connect):
'''
target: test describe collection, without connection
method: drop collection with correct params, with a disconnected instance
expected: drop raise exception
'''
with pytest.raises(Exception) as e:
dis_connect.drop_collection(collection)
@pytest.mark.tags(CaseLabel.tags_smoke)
def test_drop_collection_not_existed(self, connect):
'''
target: test if collection not created
method: random a collection name, which not existed in db,
assert the exception raised returned by drp_collection method
expected: False
'''
collection_name = gen_unique_str(uid)
try:
connect.drop_collection(collection_name)
except Exception as e:
code = getattr(e, 'code', "The exception does not contain the field of code.")
assert code == 1
message = getattr(e, 'message', "The exception does not contain the field of message.")
assert message == "describe collection failed: can't find collection: %s" % collection_name
@pytest.mark.tags(CaseLabel.L2)
def test_create_drop_collection_multithread(self, connect):
'''
target: test create and drop collection with multithread
method: create and drop collection using multithread,
expected: collections are created, and dropped
'''
threads_num = 8
threads = []
collection_names = []
def create():
collection_name = gen_unique_str(uid)
collection_names.append(collection_name)
connect.create_collection(collection_name, default_fields)
connect.drop_collection(collection_name)
for i in range(threads_num):
t = MyThread(target=create, args=())
threads.append(t)
t.start()
time.sleep(0.2)
for t in threads:
t.join()
for item in collection_names:
assert not connect.has_collection(item)
class TestDropCollectionInvalid(object):
"""
Test has collection with invalid params
"""
@pytest.fixture(
scope="function",
params=gen_invalid_strs()
)
def get_collection_name(self, request):
yield request.param
@pytest.mark.tags(CaseLabel.L2)
def test_drop_collection_with_invalid_collection_name(self, connect, get_collection_name):
collection_name = get_collection_name
with pytest.raises(Exception) as e:
connect.has_collection(collection_name)
@pytest.mark.tags(CaseLabel.L2)
@pytest.mark.parametrize("collection_name", ('', None))
def test_drop_collection_with_empty_or_None_collection_name(self, connect, collection_name):
with pytest.raises(Exception) as e:
connect.has_collection(collection_name)
import pdb
import pytest
import logging
import itertools
import threading
import time
from multiprocessing import Process
from utils.utils import *
from common.constants import *
uid = "has_collection"
class TestHasCollection:
"""
******************************************************************
The following cases are used to test `has_collection` function
******************************************************************
"""
@pytest.mark.tags(CaseLabel.tags_smoke)
def test_has_collection(self, connect, collection):
'''
target: test if the created collection existed
method: create collection, assert the value returned by has_collection method
expected: True
'''
assert connect.has_collection(collection)
@pytest.mark.tags(CaseLabel.L2)
def test_has_collection_without_connection(self, collection, dis_connect):
'''
target: test has collection, without connection
method: calling has collection with correct params, with a disconnected instance
expected: has collection raise exception
'''
with pytest.raises(Exception) as e:
assert dis_connect.has_collection(collection)
@pytest.mark.tags(CaseLabel.tags_smoke)
def test_has_collection_not_existed(self, connect):
'''
target: test if collection not created
method: random a collection name, create this collection then drop it,
assert the value returned by has_collection method
expected: False
'''
collection_name = gen_unique_str(uid)
connect.create_collection(collection_name, default_fields)
assert connect.has_collection(collection_name)
connect.drop_collection(collection_name)
assert not connect.has_collection(collection_name)
@pytest.mark.tags(CaseLabel.L2)
def test_has_collection_multithread(self, connect):
'''
target: test create collection with multithread
method: create collection using multithread,
expected: collections are created
'''
threads_num = 4
threads = []
collection_name = gen_unique_str(uid)
connect.create_collection(collection_name, default_fields)
def has():
assert connect.has_collection(collection_name)
# assert not assert_collection(connect, collection_name)
for i in range(threads_num):
t = MyThread(target=has, args=())
threads.append(t)
t.start()
time.sleep(0.2)
for t in threads:
t.join()
class TestHasCollectionInvalid(object):
"""
Test has collection with invalid params
"""
@pytest.fixture(
scope="function",
params=gen_invalid_strs()
)
def get_collection_name(self, request):
yield request.param
@pytest.mark.tags(CaseLabel.L2)
def test_has_collection_with_invalid_collection_name(self, connect, get_collection_name):
collection_name = get_collection_name
with pytest.raises(Exception) as e:
connect.has_collection(collection_name)
@pytest.mark.tags(CaseLabel.L2)
def test_has_collection_with_empty_collection_name(self, connect):
collection_name = ''
with pytest.raises(Exception) as e:
connect.has_collection(collection_name)
@pytest.mark.tags(CaseLabel.L2)
def test_has_collection_with_none_collection_name(self, connect):
collection_name = None
with pytest.raises(Exception) as e:
connect.has_collection(collection_name)
import pytest
import time
from utils.utils import *
from common.constants import *
uid = "list_collections"
class TestListCollections:
"""
******************************************************************
The following cases are used to test `list_collections` function
******************************************************************
"""
@pytest.mark.tags(CaseLabel.tags_smoke)
def test_list_collections(self, connect, collection):
'''
target: test list collections
method: create collection, assert the value returned by list_collections method
expected: True
'''
assert collection in connect.list_collections()
@pytest.mark.tags(CaseLabel.tags_smoke)
def test_list_collections_multi_collections(self, connect):
'''
target: test list collections
method: create collection, assert the value returned by list_collections method
expected: True
'''
collection_num = 50
collection_names = []
for i in range(collection_num):
collection_name = gen_unique_str(uid)
collection_names.append(collection_name)
connect.create_collection(collection_name, default_fields)
assert collection_name in connect.list_collections()
for i in range(collection_num):
connect.drop_collection(collection_names[i])
@pytest.mark.tags(CaseLabel.L2)
def test_list_collections_without_connection(self, dis_connect):
'''
target: test list collections, without connection
method: calling list collections with correct params, with a disconnected instance
expected: list collections raise exception
'''
with pytest.raises(Exception) as e:
dis_connect.list_collections()
@pytest.mark.tags(CaseLabel.L2)
def test_list_collections_not_existed(self, connect):
'''
target: test if collection not created
method: random a collection name, create this collection then drop it,
assert the value returned by list_collections method
expected: False
'''
collection_name = gen_unique_str(uid)
connect.create_collection(collection_name, default_fields)
assert collection_name in connect.list_collections()
connect.drop_collection(collection_name)
assert collection_name not in connect.list_collections()
# TODO: make sure to run this case in the end
@pytest.mark.skip("r0.3-test")
@pytest.mark.tags(CaseLabel.L2)
def test_list_collections_no_collection(self, connect):
'''
target: test show collections is correct or not, if no collection in db
method: delete all collections,
assert the value returned by list_collections method is equal to []
expected: the status is ok, and the result is equal to []
'''
result = connect.list_collections()
if result:
for collection_name in result:
assert connect.has_collection(collection_name)
@pytest.mark.tags(CaseLabel.L2)
def test_list_collections_multithread(self, connect):
'''
target: test list collection with multithread
method: list collection using multithread,
expected: list collections correctly
'''
threads_num = 10
threads = []
collection_name = gen_unique_str(uid)
connect.create_collection(collection_name, default_fields)
def _list():
assert collection_name in connect.list_collections()
for i in range(threads_num):
t = MyThread(target=_list)
threads.append(t)
t.start()
time.sleep(0.2)
for t in threads:
t.join()
......@@ -2,6 +2,7 @@ import pytest
from pymilvus import DataType, ParamError, BaseException
from utils.utils import *
from common.constants import *
from common.common_type import CaseLabel
ADD_TIMEOUT = 60
uid = "test_insert"
......@@ -49,7 +50,7 @@ class TestInsertBase:
def get_vector_field(self, request):
yield request.param
@pytest.mark.tags(CaseLabel.tags_smoke)
@pytest.mark.tags(CaseLabel.L0)
def test_insert_with_empty_entity(self, connect, collection):
'''
target: test insert with empty entity list
......@@ -60,7 +61,7 @@ class TestInsertBase:
with pytest.raises(ParamError) as e:
connect.insert(collection, entities)
@pytest.mark.tags(CaseLabel.tags_smoke)
@pytest.mark.tags(CaseLabel.L0)
def test_insert_with_None(self, connect, collection):
'''
target: test insert with None
......@@ -72,7 +73,7 @@ class TestInsertBase:
connect.insert(collection, entity)
@pytest.mark.timeout(ADD_TIMEOUT)
@pytest.mark.tags(CaseLabel.tags_smoke)
@pytest.mark.tags(CaseLabel.L0)
def test_insert_collection_not_existed(self, connect):
'''
target: test insert, with collection not existed
......@@ -107,7 +108,7 @@ class TestInsertBase:
assert connect.has_collection(collection) == False
@pytest.mark.timeout(ADD_TIMEOUT)
@pytest.mark.tags(CaseLabel.tags_smoke)
@pytest.mark.tags(CaseLabel.L0)
def test_insert_flush_drop_collection(self, connect, collection):
'''
target: test drop collection after insert entities for a while
......@@ -138,7 +139,7 @@ class TestInsertBase:
assert index == get_simple_index
@pytest.mark.timeout(ADD_TIMEOUT)
@pytest.mark.tags(CaseLabel.tags_smoke)
@pytest.mark.tags(CaseLabel.L0)
def test_insert_after_create_index(self, connect, collection, get_simple_index):
'''
target: test build index insert after vector
......@@ -154,7 +155,7 @@ class TestInsertBase:
assert index == get_simple_index
@pytest.mark.timeout(ADD_TIMEOUT)
@pytest.mark.tags(CaseLabel.tags_smoke)
@pytest.mark.tags(CaseLabel.L0)
def test_insert_search(self, connect, collection):
'''
target: test search entity after insert entity after a while
......@@ -189,7 +190,7 @@ class TestInsertBase:
yield request.param
@pytest.mark.timeout(ADD_TIMEOUT)
@pytest.mark.tags(CaseLabel.tags_smoke)
@pytest.mark.tags(CaseLabel.L0)
def test_insert_ids(self, connect, id_collection, insert_count):
'''
target: test insert entities in collection, use customize ids
......@@ -227,7 +228,7 @@ class TestInsertBase:
assert stats[row_count] == nb
@pytest.mark.timeout(ADD_TIMEOUT)
@pytest.mark.tags(CaseLabel.tags_smoke)
@pytest.mark.tags(CaseLabel.L0)
def test_insert_ids_fields(self, connect, get_filter_field, get_vector_field):
'''
target: test create normal collection with different fields, insert entities into id with ids
......@@ -268,7 +269,7 @@ class TestInsertBase:
# TODO
@pytest.mark.timeout(ADD_TIMEOUT)
@pytest.mark.tags(CaseLabel.tags_smoke)
@pytest.mark.tags(CaseLabel.L0)
def test_insert_twice_ids_no_ids(self, connect, id_collection):
'''
target: check the result of insert, with params ids and no ids
......@@ -297,7 +298,7 @@ class TestInsertBase:
connect.insert(id_collection, entities)
@pytest.mark.timeout(ADD_TIMEOUT)
@pytest.mark.tags(CaseLabel.tags_smoke)
@pytest.mark.tags(CaseLabel.L0)
def test_insert_ids_length_not_match_batch(self, connect, id_collection):
'''
target: test insert vectors in collection, use customize ids, len(ids) != len(vectors)
......@@ -327,7 +328,7 @@ class TestInsertBase:
connect.insert(id_collection, entity)
@pytest.mark.timeout(ADD_TIMEOUT)
@pytest.mark.tags(CaseLabel.tags_smoke)
@pytest.mark.tags(CaseLabel.L0)
def test_insert_partition(self, connect, collection):
'''
target: test insert entities in collection created before
......@@ -344,7 +345,7 @@ class TestInsertBase:
# TODO
@pytest.mark.timeout(ADD_TIMEOUT)
@pytest.mark.tags(CaseLabel.tags_smoke)
@pytest.mark.tags(CaseLabel.L0)
def test_insert_partition_with_ids(self, connect, id_collection):
'''
target: test insert entities in collection created before, insert with ids
......@@ -360,7 +361,7 @@ class TestInsertBase:
logging.getLogger().info(connect.describe_collection(id_collection))
@pytest.mark.timeout(ADD_TIMEOUT)
@pytest.mark.tags(CaseLabel.tags_smoke)
@pytest.mark.tags(CaseLabel.L0)
def test_insert_default_partition(self, connect, collection):
'''
target: test insert entities into default partition
......@@ -400,7 +401,7 @@ class TestInsertBase:
res = connect.get_collection_stats(collection)
assert res[row_count] == 2 * default_nb
@pytest.mark.tags(CaseLabel.tags_smoke)
@pytest.mark.tags(CaseLabel.L0)
def test_insert_dim_not_matched(self, connect, collection):
'''
target: test insert entities, the vector dimension is not equal to the collection dimension
......@@ -413,7 +414,7 @@ class TestInsertBase:
with pytest.raises(Exception) as e:
connect.insert(collection, insert_entities)
@pytest.mark.tags(CaseLabel.tags_smoke)
@pytest.mark.tags(CaseLabel.L0)
def test_insert_with_field_name_not_match(self, connect, collection):
'''
target: test insert entities, with the entity field name updated
......@@ -446,7 +447,7 @@ class TestInsertBase:
with pytest.raises(Exception):
connect.insert(collection, tmp_entity)
@pytest.mark.tags(CaseLabel.tags_smoke)
@pytest.mark.tags(CaseLabel.L0)
def test_insert_with_field_more(self, connect, collection):
'''
target: test insert entities, with more fields than collection schema
......@@ -457,7 +458,7 @@ class TestInsertBase:
with pytest.raises(Exception):
connect.insert(collection, tmp_entity)
@pytest.mark.tags(CaseLabel.tags_smoke)
@pytest.mark.tags(CaseLabel.L0)
def test_insert_with_field_vector_more(self, connect, collection):
'''
target: test insert entities, with more fields than collection schema
......@@ -468,7 +469,7 @@ class TestInsertBase:
with pytest.raises(Exception):
connect.insert(collection, tmp_entity)
@pytest.mark.tags(CaseLabel.tags_smoke)
@pytest.mark.tags(CaseLabel.L0)
def test_insert_with_field_less(self, connect, collection):
'''
target: test insert entities, with less fields than collection schema
......@@ -479,7 +480,7 @@ class TestInsertBase:
with pytest.raises(Exception):
connect.insert(collection, tmp_entity)
@pytest.mark.tags(CaseLabel.tags_smoke)
@pytest.mark.tags(CaseLabel.L0)
def test_insert_with_field_vector_less(self, connect, collection):
'''
target: test insert entities, with less fields than collection schema
......@@ -490,7 +491,7 @@ class TestInsertBase:
with pytest.raises(Exception):
connect.insert(collection, tmp_entity)
@pytest.mark.tags(CaseLabel.tags_smoke)
@pytest.mark.tags(CaseLabel.L0)
def test_insert_with_no_field_vector_value(self, connect, collection):
'''
target: test insert entities, with no vector field value
......@@ -502,7 +503,7 @@ class TestInsertBase:
with pytest.raises(Exception):
connect.insert(collection, tmp_entity)
@pytest.mark.tags(CaseLabel.tags_smoke)
@pytest.mark.tags(CaseLabel.L0)
def test_insert_with_no_field_vector_type(self, connect, collection):
'''
target: test insert entities, with no vector field type
......@@ -514,7 +515,7 @@ class TestInsertBase:
with pytest.raises(Exception):
connect.insert(collection, tmp_entity)
@pytest.mark.tags(CaseLabel.tags_smoke)
@pytest.mark.tags(CaseLabel.L0)
def test_insert_with_no_field_vector_name(self, connect, collection):
'''
target: test insert entities, with no vector field name
......@@ -582,7 +583,7 @@ class TestInsertBinary:
request.param["metric_type"] = "JACCARD"
return request.param
@pytest.mark.tags(CaseLabel.tags_smoke)
@pytest.mark.tags(CaseLabel.L0)
def test_insert_binary_entities(self, connect, binary_collection):
'''
target: test insert entities in binary collection
......@@ -595,7 +596,7 @@ class TestInsertBinary:
stats = connect.get_collection_stats(binary_collection)
assert stats[row_count] == default_nb
@pytest.mark.tags(CaseLabel.tags_smoke)
@pytest.mark.tags(CaseLabel.L0)
def test_insert_binary_partition(self, connect, binary_collection):
'''
target: test insert entities and create partition tag
......@@ -625,7 +626,7 @@ class TestInsertBinary:
stats = connect.get_collection_stats(binary_collection)
assert stats[row_count] == default_nb
@pytest.mark.tags(CaseLabel.tags_smoke)
@pytest.mark.tags(CaseLabel.L0)
def test_insert_binary_after_create_index(self, connect, binary_collection, get_binary_index):
'''
target: test insert binary entities after build index
......@@ -656,7 +657,7 @@ class TestInsertBinary:
create_target_index(get_binary_index, binary_field_name)
assert index == get_binary_index
@pytest.mark.tags(CaseLabel.tags_smoke)
@pytest.mark.tags(CaseLabel.L0)
def test_insert_binary_search(self, connect, binary_collection):
'''
target: test search vector after insert vector after a while
......@@ -697,7 +698,7 @@ class TestInsertAsync:
logging.getLogger().info("In callback check results")
assert result
@pytest.mark.tags(CaseLabel.tags_smoke)
@pytest.mark.tags(CaseLabel.L0)
def test_insert_async(self, connect, collection, insert_count):
'''
target: test insert vectors with different length of vectors
......@@ -723,7 +724,7 @@ class TestInsertAsync:
connect.flush([collection])
assert len(result.primary_keys) == nb
@pytest.mark.tags(CaseLabel.tags_smoke)
@pytest.mark.tags(CaseLabel.L0)
def test_insert_async_callback(self, connect, collection, insert_count):
'''
target: test insert vectors with different length of vectors
......@@ -764,7 +765,7 @@ class TestInsertAsync:
with pytest.raises(Exception) as e:
result = future.result()
@pytest.mark.tags(CaseLabel.tags_smoke)
@pytest.mark.tags(CaseLabel.L0)
def test_insert_async_invalid_params(self, connect):
'''
target: test insert vectors with different length of vectors
......@@ -832,7 +833,7 @@ class TestInsertMultiCollections:
connect.drop_collection(collection_list[i])
@pytest.mark.timeout(ADD_TIMEOUT)
@pytest.mark.tags(CaseLabel.tags_smoke)
@pytest.mark.tags(CaseLabel.L0)
def test_drop_collection_insert_entity_another(self, connect, collection):
'''
target: test insert vector to collection_1 after collection_2 deleted
......@@ -847,7 +848,7 @@ class TestInsertMultiCollections:
assert len(result.primary_keys) == 1
@pytest.mark.timeout(ADD_TIMEOUT)
@pytest.mark.tags(CaseLabel.tags_smoke)
@pytest.mark.tags(CaseLabel.L0)
def test_create_index_insert_entity_another(self, connect, collection, get_simple_index):
'''
target: test insert vector to collection_2 after build index for collection_1
......@@ -866,7 +867,7 @@ class TestInsertMultiCollections:
connect.drop_collection(collection_name)
@pytest.mark.timeout(ADD_TIMEOUT)
@pytest.mark.tags(CaseLabel.tags_smoke)
@pytest.mark.tags(CaseLabel.L0)
def test_insert_entity_create_index_another(self, connect, collection, get_simple_index):
'''
target: test insert vector to collection_2 after build index for collection_1
......@@ -920,7 +921,7 @@ class TestInsertMultiCollections:
assert stats[row_count] == 1
@pytest.mark.timeout(ADD_TIMEOUT)
@pytest.mark.tags(CaseLabel.tags_smoke)
@pytest.mark.tags(CaseLabel.L0)
def test_insert_entity_search_entity_another(self, connect, collection):
'''
target: test insert entity to collection_1 after search collection_2
......
......@@ -5,6 +5,7 @@ import pytest
from pymilvus import DataType
import utils.utils as ut
from common.common_type import CaseLabel
default_entities = ut.gen_entities(ut.default_nb, is_normal=True)
raw_vectors, default_binary_entities = ut.gen_binary_entities(ut.default_nb)
......@@ -80,7 +81,7 @@ class TestQueryBase:
def get_simple_index(self, request, connect):
return request.param
@pytest.mark.tags(ut.CaseLabel.tags_smoke)
@pytest.mark.tags(CaseLabel.L0)
def test_query_invalid(self, connect, collection):
"""
target: test query
......@@ -94,7 +95,7 @@ class TestQueryBase:
with pytest.raises(Exception):
res = connect.query(collection, term_expr)
@pytest.mark.tags(ut.CaseLabel.tags_smoke)
@pytest.mark.tags(CaseLabel.L0)
def test_query_valid(self, connect, collection):
"""
target: test query
......@@ -116,7 +117,7 @@ class TestQueryBase:
if res[index][default_int_field_name] == entities[0]["values"][index]:
ut.assert_equal_vector(res[index][ut.default_float_vec_field_name], entities[2]["values"][index])
@pytest.mark.tags(ut.CaseLabel.tags_smoke)
@pytest.mark.tags(CaseLabel.L0)
def test_query_collection_not_existed(self, connect):
"""
target: test query not existed collection
......@@ -127,7 +128,7 @@ class TestQueryBase:
with pytest.raises(Exception):
connect.query(collection, default_term_expr)
@pytest.mark.tags(ut.CaseLabel.tags_smoke)
@pytest.mark.tags(CaseLabel.L0)
def test_query_without_connect(self, dis_connect, collection):
"""
target: test query without connection
......@@ -137,7 +138,7 @@ class TestQueryBase:
with pytest.raises(Exception):
dis_connect.query(collection, default_term_expr)
@pytest.mark.tags(ut.CaseLabel.tags_smoke)
@pytest.mark.tags(CaseLabel.L0)
def test_query_invalid_collection_name(self, connect, get_collection_name):
"""
target: test query with invalid collection name
......@@ -148,7 +149,7 @@ class TestQueryBase:
with pytest.raises(Exception):
connect.query(collection_name, default_term_expr)
@pytest.mark.tags(ut.CaseLabel.tags_smoke)
@pytest.mark.tags(CaseLabel.L0)
def test_query_after_index(self, connect, collection, get_simple_index):
"""
target: test query after creating index
......@@ -168,7 +169,7 @@ class TestQueryBase:
assert res[index][default_float_field_name] == entities[1]["values"][index]
ut.assert_equal_vector(res[index][ut.default_float_vec_field_name], entities[-1]["values"][index])
@pytest.mark.tags(ut.CaseLabel.L2)
@pytest.mark.tags(CaseLabel.L2)
def test_query_after_search(self, connect, collection):
"""
target: test query after search
......@@ -193,7 +194,7 @@ class TestQueryBase:
assert res[index][default_float_field_name] == entities[1]["values"][index]
ut.assert_equal_vector(res[index][ut.default_float_vec_field_name], entities[2]["values"][index])
@pytest.mark.tags(ut.CaseLabel.tags_smoke)
@pytest.mark.tags(CaseLabel.L0)
def test_query_empty_collection(self, connect, collection):
"""
target: test query empty collection
......@@ -205,7 +206,7 @@ class TestQueryBase:
logging.getLogger().info(res)
assert len(res) == 0
@pytest.mark.tags(ut.CaseLabel.tags_smoke)
@pytest.mark.tags(CaseLabel.L0)
def test_query_without_loading(self, connect, collection):
"""
target: test query without loading
......@@ -217,7 +218,7 @@ class TestQueryBase:
with pytest.raises(Exception):
connect.query(collection, default_term_expr)
@pytest.mark.tags(ut.CaseLabel.tags_smoke)
@pytest.mark.tags(CaseLabel.L0)
def test_query_collection_not_primary_key(self, connect, collection):
"""
target: test query on collection that not on the primary field
......@@ -231,7 +232,7 @@ class TestQueryBase:
with pytest.raises(Exception):
connect.query(collection, term_expr)
@pytest.mark.tags(ut.CaseLabel.tags_smoke)
@pytest.mark.tags(CaseLabel.L0)
def test_query_expr_none(self, connect, collection):
"""
target: test query with none expr
......@@ -244,7 +245,7 @@ class TestQueryBase:
with pytest.raises(Exception):
connect.query(collection, None)
@pytest.mark.tags(ut.CaseLabel.tags_smoke)
@pytest.mark.tags(CaseLabel.L0)
@pytest.mark.parametrize("expr", [1, "1", "12-s", "中文", [], {}, ()])
def test_query_expr_invalid_string(self, connect, collection, expr):
"""
......@@ -258,7 +259,7 @@ class TestQueryBase:
with pytest.raises(Exception):
connect.query(collection, expr)
@pytest.mark.tags(ut.CaseLabel.tags_smoke)
@pytest.mark.tags(CaseLabel.L0)
def test_query_expr_not_existed_field(self, connect, collection):
"""
target: test query with not existed field
......@@ -275,7 +276,7 @@ class TestQueryBase:
@pytest.mark.parametrize("expr", [f'{default_int_field_name} inn [1, 2]',
f'{default_int_field_name} not in [1, 2]',
f'{default_int_field_name} in not [1, 2]'])
@pytest.mark.tags(ut.CaseLabel.tags_smoke)
@pytest.mark.tags(CaseLabel.L0)
def test_query_expr_wrong_term_keyword(self, connect, collection, expr):
"""
target: test query with wrong term expr keyword
......@@ -289,7 +290,7 @@ class TestQueryBase:
@pytest.mark.parametrize("expr", [f'{default_int_field_name} in 1',
f'{default_int_field_name} in "in"',
f'{default_int_field_name} in (mn)'])
@pytest.mark.tags(ut.CaseLabel.tags_smoke)
@pytest.mark.tags(CaseLabel.L0)
def test_query_expr_non_array_term(self, connect, collection, expr):
"""
target: test query with non-array term expr
......@@ -300,7 +301,7 @@ class TestQueryBase:
with pytest.raises(Exception):
connect.query(collection, expr)
@pytest.mark.tags(ut.CaseLabel.tags_smoke)
@pytest.mark.tags(CaseLabel.L0)
def test_query_expr_empty_term_array(self, connect, collection):
"""
target: test query with empty array term expr
......@@ -314,7 +315,7 @@ class TestQueryBase:
res = connect.query(collection, term_expr)
assert len(res) == 0
@pytest.mark.tags(ut.CaseLabel.tags_smoke)
@pytest.mark.tags(CaseLabel.L0)
def test_query_expr_single_term_array(self, connect, collection):
"""
target: test query with single array term expr
......@@ -332,7 +333,7 @@ class TestQueryBase:
ut.assert_equal_vector(res[0][ut.default_float_vec_field_name], entities[2]["values"][0])
@pytest.mark.xfail(reason="#6072")
@pytest.mark.tags(ut.CaseLabel.tags_smoke)
@pytest.mark.tags(CaseLabel.L0)
def test_query_binary_expr_single_term_array(self, connect, binary_collection):
"""
target: test query with single array term expr
......@@ -349,7 +350,7 @@ class TestQueryBase:
assert res[1][default_float_field_name] == binary_entities[1]["values"][0]
assert res[2][ut.default_float_vec_field_name] == binary_entities[2]["values"][0]
@pytest.mark.tags(ut.CaseLabel.tags_smoke)
@pytest.mark.tags(CaseLabel.L0)
def test_query_expr_all_term_array(self, connect, collection):
"""
target: test query with all array term expr
......@@ -367,7 +368,7 @@ class TestQueryBase:
assert res[index][default_float_field_name] == entities[1]["values"][index]
ut.assert_equal_vector(res[index][ut.default_float_vec_field_name], entities[2]["values"][index])
@pytest.mark.tags(ut.CaseLabel.tags_smoke)
@pytest.mark.tags(CaseLabel.L0)
def test_query_expr_repeated_term_array(self, connect, collection):
"""
target: test query with repeated term array on primary field with unique value
......@@ -382,7 +383,7 @@ class TestQueryBase:
res = connect.query(collection, term_expr)
assert len(res) == 2
@pytest.mark.tags(ut.CaseLabel.tags_smoke)
@pytest.mark.tags(CaseLabel.L0)
def test_query_expr_inconstant_term_array(self, connect, collection):
"""
target: test query with term expr that field and array are inconsistent
......@@ -396,7 +397,7 @@ class TestQueryBase:
with pytest.raises(Exception):
connect.query(collection, expr)
@pytest.mark.tags(ut.CaseLabel.tags_smoke)
@pytest.mark.tags(CaseLabel.L0)
def test_query_expr_mix_term_array(self, connect, collection):
"""
target: test query with mix type value expr
......@@ -411,7 +412,7 @@ class TestQueryBase:
connect.query(collection, expr)
@pytest.mark.parametrize("constant", [[1], (), {}])
@pytest.mark.tags(ut.CaseLabel.tags_smoke)
@pytest.mark.tags(CaseLabel.L0)
def test_query_expr_non_constant_array_term(self, connect, collection, constant):
"""
target: test query with non-constant array term expr
......@@ -425,7 +426,7 @@ class TestQueryBase:
with pytest.raises(Exception):
connect.query(collection, expr)
@pytest.mark.tags(ut.CaseLabel.tags_smoke)
@pytest.mark.tags(CaseLabel.L0)
def test_query_output_field_empty(self, connect, collection):
"""
target: test query with none output field
......@@ -440,7 +441,7 @@ class TestQueryBase:
assert default_float_field_name not in res[0].keys()
assert ut.default_float_vec_field_name not in res[0].keys()
@pytest.mark.tags(ut.CaseLabel.tags_smoke)
@pytest.mark.tags(CaseLabel.L0)
def test_query_output_one_field(self, connect, collection):
"""
target: test query with output one field
......@@ -454,7 +455,7 @@ class TestQueryBase:
assert default_int_field_name in res[0].keys()
assert len(res[0].keys()) == 1
@pytest.mark.tags(ut.CaseLabel.tags_smoke)
@pytest.mark.tags(CaseLabel.L0)
def test_query_output_all_fields(self, connect, collection):
"""
target: test query with none output field
......@@ -470,7 +471,7 @@ class TestQueryBase:
for field in fields:
assert field in res[0].keys()
@pytest.mark.tags(ut.CaseLabel.tags_smoke)
@pytest.mark.tags(CaseLabel.L0)
def test_query_output_not_existed_field(self, connect, collection):
"""
target: test query output not existed field
......@@ -483,7 +484,7 @@ class TestQueryBase:
connect.query(collection, default_term_expr, output_fields=["int"])
# @pytest.mark.xfail(reason="#6074")
@pytest.mark.tags(ut.CaseLabel.tags_smoke)
@pytest.mark.tags(CaseLabel.L0)
def test_query_output_part_not_existed_field(self, connect, collection):
"""
target: test query output part not existed field
......@@ -496,7 +497,7 @@ class TestQueryBase:
connect.query(collection, default_term_expr, output_fields=[default_int_field_name, "int"])
@pytest.mark.parametrize("fields", ut.gen_invalid_strs())
@pytest.mark.tags(ut.CaseLabel.tags_smoke)
@pytest.mark.tags(CaseLabel.L0)
def test_query_invalid_output_fields(self, connect, collection, fields):
"""
target: test query with invalid output fields
......@@ -514,7 +515,7 @@ class TestQueryPartition:
test Query interface
query(collection_name, expr, output_fields=None, partition_names=None, timeout=None)
"""
@pytest.mark.tags(ut.CaseLabel.tags_smoke)
@pytest.mark.tags(CaseLabel.L0)
def test_query_partition(self, connect, collection):
"""
target: test query on partition
......@@ -531,7 +532,7 @@ class TestQueryPartition:
assert res[index][default_float_field_name] == entities[1]["values"][index]
ut.assert_equal_vector(res[index][ut.default_float_vec_field_name], entities[2]["values"][index])
@pytest.mark.tags(ut.CaseLabel.tags_smoke)
@pytest.mark.tags(CaseLabel.L0)
def test_query_partition_without_loading(self, connect, collection):
"""
target: test query on partition without loading
......@@ -544,7 +545,7 @@ class TestQueryPartition:
with pytest.raises(Exception):
connect.query(collection, default_term_expr, partition_names=[ut.default_tag])
@pytest.mark.tags(ut.CaseLabel.tags_smoke)
@pytest.mark.tags(CaseLabel.L0)
def test_query_default_partition(self, connect, collection):
"""
target: test query on default partition
......@@ -560,7 +561,7 @@ class TestQueryPartition:
assert res[index][default_float_field_name] == entities[1]["values"][index]
ut.assert_equal_vector(res[index][ut.default_float_vec_field_name], entities[2]["values"][index])
@pytest.mark.tags(ut.CaseLabel.L2)
@pytest.mark.tags(CaseLabel.L2)
def test_query_empty_partition(self, connect, collection):
"""
target: test query on empty partition
......@@ -572,7 +573,7 @@ class TestQueryPartition:
res = connect.query(collection, default_term_expr, partition_names=[ut.default_partition_name])
assert len(res) == 0
@pytest.mark.tags(ut.CaseLabel.tags_smoke)
@pytest.mark.tags(CaseLabel.L0)
def test_query_not_existed_partition(self, connect, collection):
"""
target: test query on a not existed partition
......@@ -584,7 +585,7 @@ class TestQueryPartition:
with pytest.raises(Exception):
connect.query(collection, default_term_expr, partition_names=[tag])
@pytest.mark.tags(ut.CaseLabel.tags_smoke)
@pytest.mark.tags(CaseLabel.L0)
def test_query_partition_repeatedly(self, connect, collection):
"""
target: test query repeatedly on partition
......@@ -599,7 +600,7 @@ class TestQueryPartition:
res_two = connect.query(collection, default_term_expr, partition_names=[ut.default_tag])
assert res_one == res_two
@pytest.mark.tags(ut.CaseLabel.tags_smoke)
@pytest.mark.tags(CaseLabel.L0)
def test_query_another_partition(self, connect, collection):
"""
target: test query another partition
......@@ -613,7 +614,7 @@ class TestQueryPartition:
res = connect.query(collection, term_expr, partition_names=[ut.default_tag])
assert len(res) == 0
@pytest.mark.tags(ut.CaseLabel.tags_smoke)
@pytest.mark.tags(CaseLabel.L0)
def test_query_multi_partitions_multi_results(self, connect, collection):
"""
target: test query on multi partitions and get multi results
......@@ -632,7 +633,7 @@ class TestQueryPartition:
assert len(res) == 1
assert res[0][default_int_field_name] == entities_2[0]["values"][0]
@pytest.mark.tags(ut.CaseLabel.tags_smoke)
@pytest.mark.tags(CaseLabel.L0)
def test_query_multi_partitions_single_result(self, connect, collection):
"""
target: test query on multi partitions and get single result
......
......@@ -9,6 +9,7 @@ import numpy as np
from pymilvus import DataType
from utils.utils import *
from common.constants import *
from common.common_type import CaseLabel
uid = "test_search"
nq = 1
......@@ -141,7 +142,7 @@ class TestSearchBase:
def get_nq(self, request):
yield request.param
@pytest.mark.tags(CaseLabel.tags_smoke)
@pytest.mark.tags(CaseLabel.L0)
def test_search_flat(self, connect, collection, get_top_k, get_nq):
'''
target: test basic search function, all the search params is correct, change top-k value
......@@ -594,7 +595,7 @@ class TestSearchBase:
with pytest.raises(Exception) as e:
res = connect.search(collection_name, default_query)
@pytest.mark.tags(CaseLabel.tags_smoke)
@pytest.mark.tags(CaseLabel.L0)
def test_search_distance_l2(self, connect, collection):
'''
target: search collection, and check the result: distance
......@@ -701,7 +702,7 @@ class TestSearchBase:
# TODO:
# assert abs(res[0]._distances[0] - max_distance) <= tmp_epsilon
@pytest.mark.tags(CaseLabel.tags_smoke)
@pytest.mark.tags(CaseLabel.L0)
def test_search_distance_jaccard_flat_index(self, connect, binary_collection):
'''
target: search binary_collection, and check the result: distance
......@@ -979,7 +980,7 @@ class TestSearchDSL(object):
with pytest.raises(Exception) as e:
res = connect.search(collection, query)
@pytest.mark.tags(CaseLabel.tags_smoke)
@pytest.mark.tags(CaseLabel.L0)
def test_query_no_vector_term_only(self, connect, collection):
'''
method: build query without vector only term
......@@ -993,7 +994,7 @@ class TestSearchDSL(object):
with pytest.raises(Exception) as e:
res = connect.search(collection, query)
@pytest.mark.tags(CaseLabel.tags_smoke)
@pytest.mark.tags(CaseLabel.L0)
def test_query_no_vector_range_only(self, connect, collection):
'''
method: build query without vector only range
......@@ -1007,7 +1008,7 @@ class TestSearchDSL(object):
with pytest.raises(Exception) as e:
res = connect.search(collection, query)
@pytest.mark.tags(CaseLabel.tags_smoke)
@pytest.mark.tags(CaseLabel.L0)
def test_query_vector_only(self, connect, collection):
entities, ids = init_data(connect, collection)
connect.load_collection(collection)
......@@ -1015,7 +1016,7 @@ class TestSearchDSL(object):
assert len(res) == nq
assert len(res[0]) == default_top_k
@pytest.mark.tags(CaseLabel.tags_smoke)
@pytest.mark.tags(CaseLabel.L0)
def test_query_wrong_format(self, connect, collection):
'''
method: build query without must expr, with wrong expr name
......@@ -1029,7 +1030,7 @@ class TestSearchDSL(object):
with pytest.raises(Exception) as e:
res = connect.search(collection, query)
@pytest.mark.tags(CaseLabel.tags_smoke)
@pytest.mark.tags(CaseLabel.L0)
def test_query_empty(self, connect, collection):
'''
method: search with empty query
......@@ -1158,7 +1159,7 @@ class TestSearchDSL(object):
assert len(res) == nq
assert len(res[0]) == 0
@pytest.mark.tags(CaseLabel.tags_smoke)
@pytest.mark.tags(CaseLabel.L0)
def test_query_complex_dsl(self, connect, collection):
'''
method: query with complicated dsl
......@@ -1260,7 +1261,7 @@ class TestSearchDSL(object):
******************************************************************
"""
@pytest.mark.tags(CaseLabel.tags_smoke)
@pytest.mark.tags(CaseLabel.L0)
def test_query_range_key_error(self, connect, collection):
'''
method: build query with range key error
......@@ -1344,7 +1345,7 @@ class TestSearchDSL(object):
assert len(res) == nq
assert len(res[0]) == default_top_k
@pytest.mark.tags(CaseLabel.tags_smoke)
@pytest.mark.tags(CaseLabel.L0)
def test_query_range_one_field_not_existed(self, connect, collection):
'''
method: build query with two fields ranges, one of fields not existed
......@@ -1568,7 +1569,7 @@ class TestSearchDSLBools(object):
with pytest.raises(Exception) as e:
res = connect.search(collection, query)
@pytest.mark.tags(CaseLabel.tags_smoke)
@pytest.mark.tags(CaseLabel.L0)
def test_query_should_only_term(self, connect, collection):
'''
method: build query without must, with should.term instead
......@@ -1579,7 +1580,7 @@ class TestSearchDSLBools(object):
with pytest.raises(Exception) as e:
res = connect.search(collection, query)
@pytest.mark.tags(CaseLabel.tags_smoke)
@pytest.mark.tags(CaseLabel.L0)
def test_query_should_only_vector(self, connect, collection):
'''
method: build query without must, with should.vector instead
......@@ -1686,7 +1687,7 @@ class TestSearchInvalid(object):
with pytest.raises(Exception) as e:
res = connect.search(collection, default_query, fields=fields)
@pytest.mark.tags(CaseLabel.tags_smoke)
@pytest.mark.tags(CaseLabel.L0)
def test_search_with_not_existed_field(self, connect, collection):
fields = [gen_unique_str("field_name")]
with pytest.raises(Exception) as e:
......@@ -1703,7 +1704,7 @@ class TestSearchInvalid(object):
def get_top_k(self, request):
yield request.param
@pytest.mark.tags(CaseLabel.tags_smoke)
@pytest.mark.tags(CaseLabel.L0)
def test_search_with_invalid_top_k(self, connect, collection, get_top_k):
'''
target: test search function, with the wrong top_k
......@@ -1790,7 +1791,7 @@ class TestSearchInvalid(object):
with pytest.raises(Exception) as e:
res = connect.search(collection, query)
@pytest.mark.tags(CaseLabel.tags_smoke)
@pytest.mark.tags(CaseLabel.L0)
def test_search_with_empty_vectors(self, connect, collection):
"""
target: test search function, with empty search vectors
......@@ -1835,7 +1836,7 @@ class TestSearchWithExpression(object):
def search_params(self):
return {"metric_type": "L2", "params": {"nprobe": 10}}
@pytest.mark.tags(CaseLabel.tags_smoke)
@pytest.mark.tags(CaseLabel.L0)
def test_search_with_expression(self, connect, collection, index_param, search_params, limit, expression):
entities, ids = init_data(connect, collection)
assert len(ids) == default_nb
......
......@@ -6,6 +6,7 @@ import logging
from multiprocessing import Pool, Process
import pytest
from utils.utils import *
from common.common_type import CaseLabel
class TestMysql:
"""
......
......@@ -7,6 +7,7 @@ import json
from multiprocessing import Pool, Process
import pytest
from utils.utils import *
from common.common_type import CaseLabel
uid = "wal"
......
......@@ -4,6 +4,7 @@ import threading
from multiprocessing import Process
import concurrent.futures
from utils.utils import *
from common.common_type import CaseLabel
CONNECT_TIMEOUT = 12
......
......@@ -6,6 +6,7 @@ from multiprocessing import Pool, Process
import pytest
from utils.utils import *
from common.constants import *
from common.common_type import CaseLabel
DELETE_TIMEOUT = 60
default_single_query = {
......@@ -49,7 +50,7 @@ class TestFlushBase:
def get_vector_field(self, request):
yield request.param
@pytest.mark.tags(CaseLabel.tags_smoke)
@pytest.mark.tags(CaseLabel.L0)
def test_flush_collection_not_existed(self, connect, collection):
'''
target: test flush, params collection_name not existed
......@@ -65,7 +66,7 @@ class TestFlushBase:
message = getattr(e, 'message', "The exception does not contain the field of message.")
assert message == "describe collection failed: can't find collection: %s" % collection_new
@pytest.mark.tags(CaseLabel.tags_smoke)
@pytest.mark.tags(CaseLabel.L0)
def test_flush_empty_collection(self, connect, collection):
'''
method: flush collection with no vectors
......@@ -100,7 +101,7 @@ class TestFlushBase:
res_count = connect.get_collection_stats(id_collection)
assert res_count["row_count"] == default_nb * 2
@pytest.mark.tags(CaseLabel.tags_smoke)
@pytest.mark.tags(CaseLabel.L0)
def test_add_partitions_flush(self, connect, id_collection):
'''
method: add entities into partitions in collection, flush one
......@@ -117,7 +118,7 @@ class TestFlushBase:
res = connect.get_collection_stats(id_collection)
assert res["row_count"] == 2 * default_nb
@pytest.mark.tags(CaseLabel.tags_smoke)
@pytest.mark.tags(CaseLabel.L0)
def test_add_collections_flush(self, connect, id_collection):
'''
method: add entities into collections, flush one
......@@ -169,7 +170,7 @@ class TestFlushBase:
assert res["row_count"] == nb_new
# TODO ci failed
@pytest.mark.tags(CaseLabel.tags_smoke)
@pytest.mark.tags(CaseLabel.L0)
def test_add_flush_multiable_times(self, connect, collection):
'''
method: add entities, flush serveral times
......@@ -218,7 +219,7 @@ class TestFlushBase:
def same_ids(self, request):
yield request.param
@pytest.mark.tags(CaseLabel.tags_smoke)
@pytest.mark.tags(CaseLabel.L0)
def test_add_flush_same_ids(self, connect, id_collection, same_ids):
'''
method: add entities, with same ids, count(same ids) < 15, > 15
......@@ -233,7 +234,7 @@ class TestFlushBase:
res = connect.get_collection_stats(id_collection)
assert res["row_count"] == default_nb
@pytest.mark.tags(CaseLabel.tags_smoke)
@pytest.mark.tags(CaseLabel.L0)
def test_delete_flush_multiable_times(self, connect, collection):
'''
method: delete entities, flush serveral times
......@@ -341,7 +342,7 @@ class TestFlushAsync:
status = future.result()
assert status is None
@pytest.mark.tags(CaseLabel.tags_smoke)
@pytest.mark.tags(CaseLabel.L0)
def test_flush_async_long_drop_collection(self, connect, collection):
for i in range(5):
result = connect.insert(collection, default_entities)
......@@ -352,7 +353,7 @@ class TestFlushAsync:
res = connect.drop_collection(collection)
assert res is None
@pytest.mark.tags(CaseLabel.tags_smoke)
@pytest.mark.tags(CaseLabel.L0)
def test_flush_async(self, connect, collection):
connect.insert(collection, default_entities)
logging.getLogger().info("before")
......@@ -384,7 +385,7 @@ class TestCollectionNameInvalid(object):
with pytest.raises(Exception) as e:
connect.flush(collection_name)
@pytest.mark.tags(CaseLabel.tags_smoke)
@pytest.mark.tags(CaseLabel.L0)
def test_flush_empty(self, connect, collection):
result = connect.insert(collection, default_entities)
assert len(result.primary_keys) == default_nb
......
......@@ -389,7 +389,7 @@ class TestIndexBase:
******************************************************************
"""
@pytest.mark.tags(CaseLabel.tags_smoke)
@pytest.mark.tags(CaseLabel.L0)
@pytest.mark.timeout(BUILD_TIMEOUT)
def test_create_index(self, connect, collection, get_simple_index):
"""
......@@ -404,7 +404,7 @@ class TestIndexBase:
create_target_index(get_simple_index, field_name)
assert index == get_simple_index
@pytest.mark.tags(CaseLabel.tags_smoke)
@pytest.mark.tags(CaseLabel.L0)
@pytest.mark.skip(reason="Repeat with test_index_field_name_not_existed")
def test_create_index_on_field_not_existed(self, connect, collection, get_simple_index):
"""
......@@ -459,7 +459,7 @@ class TestIndexBase:
create_target_index(get_simple_index, field_name)
assert index == get_simple_index
@pytest.mark.tags(CaseLabel.tags_smoke)
@pytest.mark.tags(CaseLabel.L0)
@pytest.mark.timeout(BUILD_TIMEOUT)
def test_create_index_partition_flush(self, connect, collection, get_simple_index):
"""
......@@ -486,7 +486,7 @@ class TestIndexBase:
with pytest.raises(Exception) as e:
dis_connect.create_index(collection, field_name, get_simple_index)
@pytest.mark.tags(CaseLabel.tags_smoke)
@pytest.mark.tags(CaseLabel.L0)
@pytest.mark.timeout(BUILD_TIMEOUT)
def test_create_index_search_with_query_vectors(self, connect, collection, get_simple_index, get_nq):
"""
......@@ -534,7 +534,7 @@ class TestIndexBase:
for t in threads:
t.join()
@pytest.mark.tags(CaseLabel.tags_smoke)
@pytest.mark.tags(CaseLabel.L0)
def test_create_index_collection_not_existed(self, connect):
"""
target: test create index interface when collection name not existed
......@@ -618,7 +618,7 @@ class TestIndexBase:
assert index == indexs[-1]
# assert not index # FLAT is the last index_type, drop all indexes in server
@pytest.mark.tags(CaseLabel.tags_smoke)
@pytest.mark.tags(CaseLabel.L0)
@pytest.mark.timeout(BUILD_TIMEOUT)
def test_create_index_ip(self, connect, collection, get_simple_index):
"""
......@@ -634,7 +634,7 @@ class TestIndexBase:
create_target_index(get_simple_index, field_name)
assert index == get_simple_index
@pytest.mark.tags(CaseLabel.tags_smoke)
@pytest.mark.tags(CaseLabel.L0)
@pytest.mark.timeout(BUILD_TIMEOUT)
def test_create_index_no_vectors_ip(self, connect, collection, get_simple_index):
"""
......@@ -666,7 +666,7 @@ class TestIndexBase:
create_target_index(get_simple_index, field_name)
assert index == get_simple_index
@pytest.mark.tags(CaseLabel.tags_smoke)
@pytest.mark.tags(CaseLabel.L0)
@pytest.mark.timeout(BUILD_TIMEOUT)
def test_create_index_partition_flush_ip(self, connect, collection, get_simple_index):
"""
......@@ -684,7 +684,7 @@ class TestIndexBase:
create_target_index(get_simple_index, field_name)
assert index == get_simple_index
@pytest.mark.tags(CaseLabel.tags_smoke)
@pytest.mark.tags(CaseLabel.L0)
@pytest.mark.timeout(BUILD_TIMEOUT)
def test_create_index_search_with_query_vectors_ip(self, connect, collection, get_simple_index, get_nq):
"""
......@@ -748,7 +748,7 @@ class TestIndexBase:
with pytest.raises(Exception) as e:
connect.create_index(collection_name, field_name, default_index)
@pytest.mark.tags(CaseLabel.tags_smoke)
@pytest.mark.tags(CaseLabel.L0)
@pytest.mark.timeout(BUILD_TIMEOUT)
def test_create_index_no_vectors_insert_ip(self, connect, collection):
"""
......@@ -811,7 +811,7 @@ class TestIndexBase:
The following cases are used to test `drop_index` function
******************************************************************
"""
@pytest.mark.tags(CaseLabel.tags_smoke)
@pytest.mark.tags(CaseLabel.L0)
def test_drop_index(self, connect, collection, get_simple_index):
"""
target: test drop index interface
......@@ -848,7 +848,7 @@ class TestIndexBase:
with pytest.raises(Exception) as e:
dis_connect.drop_index(collection, field_name)
@pytest.mark.tags(CaseLabel.tags_smoke)
@pytest.mark.tags(CaseLabel.L0)
def test_drop_index_collection_not_existed(self, connect):
"""
target: test drop index interface when collection name not existed
......@@ -860,7 +860,7 @@ class TestIndexBase:
with pytest.raises(Exception) as e:
connect.drop_index(collection_name, field_name)
@pytest.mark.tags(CaseLabel.tags_smoke)
@pytest.mark.tags(CaseLabel.L0)
def test_drop_index_collection_not_create(self, connect, collection):
"""
target: test drop index interface when index not created
......@@ -942,7 +942,7 @@ class TestIndexBase:
connect.create_index(collection, field_name, get_simple_index)
connect.drop_index(collection, field_name)
@pytest.mark.tags(CaseLabel.tags_smoke)
@pytest.mark.tags(CaseLabel.L0)
def test_create_PQ_without_nbits(self, connect, collection):
PQ_index = {"index_type": "IVF_PQ", "params": {"nlist": 128, "m": 16}, "metric_type": "L2"}
result = connect.insert(collection, default_entities)
......@@ -1012,7 +1012,7 @@ class TestIndexBinary:
create_target_index(get_jaccard_index, binary_field_name)
assert binary_index == get_jaccard_index
@pytest.mark.tags(CaseLabel.tags_smoke)
@pytest.mark.tags(CaseLabel.L0)
@pytest.mark.timeout(BUILD_TIMEOUT)
def test_create_index_partition(self, connect, binary_collection, get_jaccard_index):
"""
......@@ -1027,7 +1027,7 @@ class TestIndexBinary:
create_target_index(get_jaccard_index, binary_field_name)
assert binary_index == get_jaccard_index
@pytest.mark.tags(CaseLabel.tags_smoke)
@pytest.mark.tags(CaseLabel.L0)
@pytest.mark.timeout(BUILD_TIMEOUT)
def test_create_index_search_with_query_vectors(self, connect, binary_collection, get_jaccard_index, get_nq):
"""
......@@ -1127,7 +1127,7 @@ class TestIndexBinary:
binary_index = connect.describe_index(binary_collection, "")
assert not binary_index
@pytest.mark.tags(CaseLabel.tags_smoke)
@pytest.mark.tags(CaseLabel.L0)
def test_drop_index_partition(self, connect, binary_collection, get_jaccard_index):
"""
target: test drop index interface
......@@ -1155,7 +1155,7 @@ class TestIndexInvalid(object):
def get_collection_name(self, request):
yield request.param
@pytest.mark.tags(CaseLabel.tags_smoke)
@pytest.mark.tags(CaseLabel.L0)
def test_create_index_with_invalid_collection_name(self, connect, get_collection_name):
collection_name = get_collection_name
with pytest.raises(Exception) as e:
......@@ -1227,7 +1227,7 @@ class TestIndexAsync:
# TODO:
logging.getLogger().info(res)
@pytest.mark.tags(CaseLabel.tags_smoke)
@pytest.mark.tags(CaseLabel.L0)
@pytest.mark.timeout(BUILD_TIMEOUT)
def test_create_index_drop(self, connect, collection):
"""
......@@ -1248,7 +1248,7 @@ class TestIndexAsync:
future = connect.create_index(collection_name, field_name, default_index, _async=True)
res = future.result()
@pytest.mark.tags(CaseLabel.tags_smoke)
@pytest.mark.tags(CaseLabel.L0)
@pytest.mark.timeout(BUILD_TIMEOUT)
def test_create_index_callback(self, connect, collection, get_simple_index):
"""
......
......@@ -8,6 +8,7 @@ from time import sleep
from multiprocessing import Process
import sklearn.preprocessing
from utils.utils import *
from common.common_type import CaseLabel
index_file_size = 10
vectors = gen_vectors(10000, default_dim)
......
......@@ -7,6 +7,7 @@ from multiprocessing import Pool, Process
import pytest
from utils.utils import *
from common.constants import *
from common.common_type import CaseLabel
TIMEOUT = 120
......@@ -17,7 +18,7 @@ class TestCreateBase:
The following cases are used to test `create_partition` function
******************************************************************
"""
@pytest.mark.tags(CaseLabel.tags_smoke)
@pytest.mark.tags(CaseLabel.L0)
def test_create_partition_a(self, connect, collection):
'''
target: test create partition, check status returned
......@@ -55,7 +56,7 @@ class TestCreateBase:
with pytest.raises(Exception) as e:
connect.create_partition(collection, tag_tmp)
@pytest.mark.tags(CaseLabel.tags_smoke)
@pytest.mark.tags(CaseLabel.L0)
def test_create_partition_repeat(self, connect, collection):
'''
target: test create partition, check status returned
......@@ -88,7 +89,7 @@ class TestCreateBase:
message = getattr(e, 'message', "The exception does not contain the field of message.")
assert message == "create partition failed: can't find collection: %s" % collection_name
@pytest.mark.tags(CaseLabel.tags_smoke)
@pytest.mark.tags(CaseLabel.L0)
def test_create_partition_name_name_None(self, connect, collection):
'''
target: test create partition, tag name set None, check status returned
......@@ -101,7 +102,7 @@ class TestCreateBase:
except Exception as e:
assert e.args[0] == "`partition_name` value None is illegal"
@pytest.mark.tags(CaseLabel.tags_smoke)
@pytest.mark.tags(CaseLabel.L0)
def test_create_different_partition_names(self, connect, collection):
'''
target: test create partition twice with different names
......@@ -113,7 +114,7 @@ class TestCreateBase:
connect.create_partition(collection, tag_name)
assert compare_list_elements(connect.list_partitions(collection), [default_tag, tag_name, '_default'])
@pytest.mark.tags(CaseLabel.tags_smoke)
@pytest.mark.tags(CaseLabel.L0)
def test_create_partition_insert_default(self, connect, id_collection):
'''
target: test create partition, and insert vectors, check status returned
......@@ -125,7 +126,7 @@ class TestCreateBase:
result = connect.insert(id_collection, default_entities)
assert len(result.primary_keys) == len(ids)
@pytest.mark.tags(CaseLabel.tags_smoke)
@pytest.mark.tags(CaseLabel.L0)
def test_create_partition_insert_with_tag(self, connect, id_collection):
'''
target: test create partition, and insert vectors, check status returned
......@@ -137,7 +138,7 @@ class TestCreateBase:
result = connect.insert(id_collection, default_entities, partition_name=default_tag)
assert len(result.primary_keys) == len(ids)
@pytest.mark.tags(CaseLabel.tags_smoke)
@pytest.mark.tags(CaseLabel.L0)
def test_create_partition_insert_with_tag_not_existed(self, connect, collection):
'''
target: test create partition, and insert vectors, check status returned
......@@ -155,7 +156,7 @@ class TestCreateBase:
message = getattr(e, 'message', "The exception does not contain the field of message.")
assert message == "partitionID of partitionName:%s can not be find" % tag_new
@pytest.mark.tags(CaseLabel.tags_smoke)
@pytest.mark.tags(CaseLabel.L0)
def test_create_partition_insert_same_tags(self, connect, id_collection):
'''
target: test create partition, and insert vectors, check status returned
......@@ -202,7 +203,7 @@ class TestShowBase:
The following cases are used to test `list_partitions` function
******************************************************************
"""
@pytest.mark.tags(CaseLabel.tags_smoke)
@pytest.mark.tags(CaseLabel.L0)
def test_list_partitions(self, connect, collection):
'''
target: test show partitions, check status and partitions returned
......@@ -212,7 +213,7 @@ class TestShowBase:
connect.create_partition(collection, default_tag)
assert compare_list_elements(connect.list_partitions(collection), [default_tag, '_default'])
@pytest.mark.tags(CaseLabel.tags_smoke)
@pytest.mark.tags(CaseLabel.L0)
def test_list_partitions_no_partition(self, connect, collection):
'''
target: test show partitions with collection name, check status and partitions returned
......@@ -222,7 +223,7 @@ class TestShowBase:
res = connect.list_partitions(collection)
assert compare_list_elements(res, ['_default'])
@pytest.mark.tags(CaseLabel.tags_smoke)
@pytest.mark.tags(CaseLabel.L0)
def test_show_multi_partitions(self, connect, collection):
'''
target: test show partitions, check status and partitions returned
......@@ -250,7 +251,7 @@ class TestHasBase:
def get_tag_name(self, request):
yield request.param
@pytest.mark.tags(CaseLabel.tags_smoke)
@pytest.mark.tags(CaseLabel.L0)
def test_has_partition_a(self, connect, collection):
'''
target: test has_partition, check status and result
......@@ -262,7 +263,7 @@ class TestHasBase:
logging.getLogger().info(res)
assert res
@pytest.mark.tags(CaseLabel.tags_smoke)
@pytest.mark.tags(CaseLabel.L0)
def test_has_partition_multi_partitions(self, connect, collection):
'''
target: test has_partition, check status and result
......@@ -275,7 +276,7 @@ class TestHasBase:
res = connect.has_partition(collection, tag_name)
assert res
@pytest.mark.tags(CaseLabel.tags_smoke)
@pytest.mark.tags(CaseLabel.L0)
def test_has_partition_name_not_existed(self, connect, collection):
'''
target: test has_partition, check status and result
......@@ -286,7 +287,7 @@ class TestHasBase:
logging.getLogger().info(res)
assert not res
@pytest.mark.tags(CaseLabel.tags_smoke)
@pytest.mark.tags(CaseLabel.L0)
def test_has_partition_collection_not_existed(self, connect, collection):
'''
target: test has_partition, check status and result
......@@ -322,7 +323,7 @@ class TestDropBase:
The following cases are used to test `drop_partition` function
******************************************************************
"""
@pytest.mark.tags(CaseLabel.tags_smoke)
@pytest.mark.tags(CaseLabel.L0)
def test_drop_partition_a(self, connect, collection):
'''
target: test drop partition, check status and partition if existed
......@@ -336,7 +337,7 @@ class TestDropBase:
res2 = connect.list_partitions(collection)
assert default_tag not in res2
@pytest.mark.tags(CaseLabel.tags_smoke)
@pytest.mark.tags(CaseLabel.L0)
def test_drop_partition_name_not_existed(self, connect, collection):
'''
target: test drop partition, but tag not existed
......@@ -353,7 +354,7 @@ class TestDropBase:
message = getattr(e, 'message', "The exception does not contain the field of message.")
assert message == "DropPartition failed: partition %s does not exist" % new_tag
@pytest.mark.tags(CaseLabel.tags_smoke)
@pytest.mark.tags(CaseLabel.L0)
def test_drop_partition_name_not_existed_A(self, connect, collection):
'''
target: test drop partition, but collection not existed
......@@ -390,7 +391,7 @@ class TestDropBase:
tag_list = connect.list_partitions(collection)
assert default_tag not in tag_list
@pytest.mark.tags(CaseLabel.tags_smoke)
@pytest.mark.tags(CaseLabel.L0)
def test_drop_partition_create(self, connect, collection):
'''
target: test drop partition, and create again, check status
......@@ -460,7 +461,7 @@ class TestNameInvalid(object):
class TestNewCase(object):
@pytest.mark.tags(CaseLabel.tags_smoke)
@pytest.mark.tags(CaseLabel.L0)
def test_drop_default_partition_A(self, connect, collection):
'''
target: test drop partition of default, check status returned
......@@ -477,7 +478,7 @@ class TestNewCase(object):
list_partition = connect.list_partitions(collection)
assert '_default' in list_partition
@pytest.mark.tags(CaseLabel.tags_smoke)
@pytest.mark.tags(CaseLabel.L0)
def test_drop_default_partition_B(self, connect, collection):
'''
target: test drop partition of default, check status returned
......
......@@ -1023,9 +1023,3 @@ class MyThread(threading.Thread):
raise self.exc
class CaseLabel:
tags_smoke = "smoke"
L1 = "L1"
L2 = "L2"
L3 = "L3"
......@@ -17,7 +17,7 @@ set -x
MILVUS_HELM_RELEASE_NAME="${MILVUS_HELM_RELEASE_NAME:-milvus-testing}"
MILVUS_CLUSTER_ENABLED="${MILVUS_CLUSTER_ENABLED:-false}"
MILVUS_HELM_NAMESPACE="${MILVUS_HELM_NAMESPACE:-default}"
PARALLEL_NUM="${PARALLEL_NUM:-4}"
PARALLEL_NUM="${PARALLEL_NUM:-6}"
MILVUS_CLIENT="${MILVUS_CLIENT:-pymilvus}"
SOURCE="${BASH_SOURCE[0]}"
......@@ -67,8 +67,8 @@ pushd "${ROOT}/tests/docker"
else
if [[ "${MILVUS_CLIENT}" == "pymilvus" ]]; then
export MILVUS_PYTEST_WORKSPACE="/milvus/tests/python_client"
docker-compose run --rm pytest /bin/bash -c "pytest -n ${PARALLEL_NUM} --ip ${MILVUS_SERVICE_IP} --host ${MILVUS_SERVICE_IP}\
--port ${MILVUS_SERVICE_PORT} --html=\${CI_LOG_PATH}/report.html --self-contained-html ${@:-}"
docker-compose run --rm pytest /bin/bash -c "pytest -n ${PARALLEL_NUM} --host ${MILVUS_SERVICE_IP} --port ${MILVUS_SERVICE_PORT} \
--html=\${CI_LOG_PATH}/report.html --self-contained-html ${@:-}"
# elif [[ "${MILVUS_CLIENT}" == "pymilvus-orm" ]]; then
# export MILVUS_PYTEST_WORKSPACE="/milvus/tests20/python_client"
# docker-compose run --rm pytest /bin/bash -c "pytest -n ${PARALLEL_NUM} --host ${MILVUS_SERVICE_IP} --port ${MILVUS_SERVICE_PORT} \
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册