未验证 提交 481630e7 编写于 作者: R Roman Donchenko 提交者: GitHub

Migrate tests/python to from os.path to pathlib (#5426)

`pathlib` improves code readability and type safety. It is already used
in some of the tests; convert all remaining `os.path` usage to `pathlib`
equivalents.
上级 3f9ab7cf
......@@ -71,7 +71,7 @@ class TestCLI:
@pytest.fixture
def fxt_new_task(self):
files = generate_images(str(self.tmp_path), 5)
files = generate_images(self.tmp_path, 5)
task = self.client.tasks.create_from_data(
spec={
......@@ -79,7 +79,7 @@ class TestCLI:
"labels": [{"name": "car"}, {"name": "person"}],
},
resource_type=ResourceType.LOCAL,
resources=files,
resources=list(map(os.fspath, files)),
)
return task
......@@ -100,13 +100,13 @@ class TestCLI:
return self.stdout.getvalue()
def test_can_create_task_from_local_images(self):
files = generate_images(str(self.tmp_path), 5)
files = generate_images(self.tmp_path, 5)
stdout = self.run_cli(
"create",
"test_task",
ResourceType.LOCAL.name,
*files,
*map(os.fspath, files),
"--labels",
json.dumps([{"name": "car"}, {"name": "person"}]),
"--completion_verification_period",
......
......@@ -3,10 +3,9 @@
# SPDX-License-Identifier: MIT
import os
import os.path as osp
import unittest
from typing import Any, Union
from pathlib import Path
from typing import Any, List, Union
from shared.utils.helpers import generate_image_file
......@@ -22,12 +21,11 @@ def run_cli(test: Union[unittest.TestCase, Any], *args: str, expected_code: int
assert expected_code == main(args)
def generate_images(dst_dir: str, count: int):
def generate_images(dst_dir: Path, count: int) -> List[Path]:
filenames = []
os.makedirs(dst_dir, exist_ok=True)
dst_dir.mkdir(parents=True, exist_ok=True)
for i in range(count):
filename = osp.join(dst_dir, f"img_{i}.jpg")
with open(filename, "wb") as f:
f.write(generate_image_file().getvalue())
filename = dst_dir / f"img_{i}.jpg"
filename.write_bytes(generate_image_file().getvalue())
filenames.append(filename)
return filenames
......@@ -3,9 +3,8 @@
#
# SPDX-License-Identifier: MIT
import glob
import json
import os.path as osp
from pathlib import Path
import pytest
from deepdiff import DeepDiff
......@@ -15,10 +14,10 @@ from shared.utils import config
@pytest.mark.usefixtures("restore_db_per_class")
class TestGetResources:
@pytest.mark.parametrize("path", glob.glob(osp.join(config.ASSETS_DIR, "*.json")))
def test_check_objects_integrity(self, path):
@pytest.mark.parametrize("path", config.ASSETS_DIR.glob("*.json"))
def test_check_objects_integrity(self, path: Path):
with open(path) as f:
endpoint = osp.basename(path).rsplit(".")[0]
endpoint = path.stem
if endpoint == "annotations":
objects = json.load(f)
for jid, annotations in objects["job"].items():
......
......@@ -3,7 +3,6 @@
# SPDX-License-Identifier: MIT
import json
import os.path as osp
from http import HTTPStatus
from time import sleep
......@@ -24,7 +23,7 @@ from shared.utils.config import delete_method, get_method, patch_method, post_me
def target_url():
env_data = {}
with open(osp.join(CVAT_ROOT_DIR, "tests", "python", "webhook_receiver", ".env"), "r") as f:
with open(CVAT_ROOT_DIR / "tests/python/webhook_receiver/.env", "r") as f:
for line in f:
name, value = tuple(line.strip().split("="))
env_data[name] = value
......
......@@ -3,7 +3,7 @@
# SPDX-License-Identifier: MIT
import io
import os.path as osp
import os
from logging import Logger
from pathlib import Path
from typing import Tuple
......@@ -103,18 +103,18 @@ class TestJobUsecases:
pbar = make_pbar(file=pbar_out)
task_id = fxt_new_task.id
path = str(self.tmp_path / f"task_{task_id}-cvat.zip")
path = self.tmp_path / f"task_{task_id}-cvat.zip"
job_id = fxt_new_task.get_jobs()[0].id
job = self.client.jobs.retrieve(job_id)
job.export_dataset(
format_name="CVAT for images 1.1",
filename=path,
filename=os.fspath(path),
pbar=pbar,
include_images=include_images,
)
assert "100%" in pbar_out.getvalue().strip("\r").split("\r")[-1]
assert osp.isfile(path)
assert path.is_file()
assert self.stdout.getvalue() == ""
def test_can_download_preview(self, fxt_new_task: Task):
......@@ -139,7 +139,7 @@ class TestJobUsecases:
filename_pattern="frame-{frame_id}{frame_ext}",
)
assert osp.isfile(self.tmp_path / "frame-0.jpg")
assert (self.tmp_path / "frame-0.jpg").is_file()
assert self.stdout.getvalue() == ""
def test_can_upload_annotations(self, fxt_new_task: Task, fxt_coco_file: Path):
......
......@@ -4,7 +4,7 @@
import io
import json
import os.path as osp
import os
import zipfile
from logging import Logger
from pathlib import Path
......@@ -112,7 +112,7 @@ class TestTaskUsecases:
task_files = generate_image_files(7)
for i, f in enumerate(task_files):
fname = self.tmp_path / osp.basename(f.name)
fname = self.tmp_path / f.name
with fname.open("wb") as fd:
fd.write(f.getvalue())
task_files[i] = str(fname)
......@@ -252,17 +252,17 @@ class TestTaskUsecases:
pbar = make_pbar(file=pbar_out)
task_id = fxt_new_task.id
path = str(self.tmp_path / f"task_{task_id}-cvat.zip")
path = self.tmp_path / f"task_{task_id}-cvat.zip"
task = self.client.tasks.retrieve(task_id)
task.export_dataset(
format_name="CVAT for images 1.1",
filename=path,
filename=os.fspath(path),
pbar=pbar,
include_images=include_images,
)
assert "100%" in pbar_out.getvalue().strip("\r").split("\r")[-1]
assert osp.isfile(path)
assert path.is_file()
assert self.stdout.getvalue() == ""
def test_can_download_backup(self, fxt_new_task: Task):
......@@ -270,12 +270,12 @@ class TestTaskUsecases:
pbar = make_pbar(file=pbar_out)
task_id = fxt_new_task.id
path = str(self.tmp_path / f"task_{task_id}-backup.zip")
path = self.tmp_path / f"task_{task_id}-backup.zip"
task = self.client.tasks.retrieve(task_id)
task.download_backup(filename=path, pbar=pbar)
task.download_backup(filename=os.fspath(path), pbar=pbar)
assert "100%" in pbar_out.getvalue().strip("\r").split("\r")[-1]
assert osp.isfile(path)
assert path.is_file()
assert self.stdout.getvalue() == ""
def test_can_download_preview(self, fxt_new_task: Task):
......@@ -300,7 +300,7 @@ class TestTaskUsecases:
filename_pattern="frame-{frame_id}{frame_ext}",
)
assert osp.isfile(self.tmp_path / "frame-0.jpg")
assert (self.tmp_path / "frame-0.jpg").is_file()
assert self.stdout.getvalue() == ""
@pytest.mark.parametrize("quality", ("compressed", "original"))
......
......@@ -2,8 +2,8 @@
#
# SPDX-License-Identifier: MIT
import os.path as osp
import textwrap
from pathlib import Path
from typing import Tuple
from cvat_sdk.core.helpers import TqdmProgressReporter
......@@ -14,11 +14,11 @@ def make_pbar(file, **kwargs):
return TqdmProgressReporter(tqdm(file=file, mininterval=0, **kwargs))
def generate_coco_json(filename: str, img_info: Tuple[str, int, int]):
def generate_coco_json(filename: Path, img_info: Tuple[Path, int, int]):
image_filename, image_width, image_height = img_info
content = generate_coco_anno(
osp.basename(image_filename),
image_filename.name,
image_width=image_width,
image_height=image_height,
)
......
......@@ -3,14 +3,11 @@
# SPDX-License-Identifier: MIT
import json
import os.path as osp
import pytest
from shared.utils.config import ASSETS_DIR
CVAT_DB_DIR = osp.join(ASSETS_DIR, "cvat_db")
class Container:
def __init__(self, data, key="id"):
......@@ -39,73 +36,73 @@ class Container:
@pytest.fixture(scope="session")
def users():
with open(osp.join(ASSETS_DIR, "users.json")) as f:
with open(ASSETS_DIR / "users.json") as f:
return Container(json.load(f)["results"])
@pytest.fixture(scope="session")
def organizations():
with open(osp.join(ASSETS_DIR, "organizations.json")) as f:
with open(ASSETS_DIR / "organizations.json") as f:
return Container(json.load(f))
@pytest.fixture(scope="session")
def memberships():
with open(osp.join(ASSETS_DIR, "memberships.json")) as f:
with open(ASSETS_DIR / "memberships.json") as f:
return Container(json.load(f)["results"])
@pytest.fixture(scope="session")
def tasks():
with open(osp.join(ASSETS_DIR, "tasks.json")) as f:
with open(ASSETS_DIR / "tasks.json") as f:
return Container(json.load(f)["results"])
@pytest.fixture(scope="session")
def projects():
with open(osp.join(ASSETS_DIR, "projects.json")) as f:
with open(ASSETS_DIR / "projects.json") as f:
return Container(json.load(f)["results"])
@pytest.fixture(scope="session")
def jobs():
with open(osp.join(ASSETS_DIR, "jobs.json")) as f:
with open(ASSETS_DIR / "jobs.json") as f:
return Container(json.load(f)["results"])
@pytest.fixture(scope="session")
def invitations():
with open(osp.join(ASSETS_DIR, "invitations.json")) as f:
with open(ASSETS_DIR / "invitations.json") as f:
return Container(json.load(f)["results"], key="key")
@pytest.fixture(scope="session")
def annotations():
with open(osp.join(ASSETS_DIR, "annotations.json")) as f:
with open(ASSETS_DIR / "annotations.json") as f:
return json.load(f)
@pytest.fixture(scope="session")
def cloud_storages():
with open(osp.join(ASSETS_DIR, "cloudstorages.json")) as f:
with open(ASSETS_DIR / "cloudstorages.json") as f:
return Container(json.load(f)["results"])
@pytest.fixture(scope="session")
def issues():
with open(osp.join(ASSETS_DIR, "issues.json")) as f:
with open(ASSETS_DIR / "issues.json") as f:
return Container(json.load(f)["results"])
@pytest.fixture(scope="session")
def comments():
with open(osp.join(ASSETS_DIR, "comments.json")) as f:
with open(ASSETS_DIR / "comments.json") as f:
return Container(json.load(f)["results"])
@pytest.fixture(scope="session")
def webhooks():
with open(osp.join(ASSETS_DIR, "webhooks.json")) as f:
with open(ASSETS_DIR / "webhooks.json") as f:
return Container(json.load(f)["results"])
......
......@@ -2,10 +2,9 @@
#
# SPDX-License-Identifier: MIT
import os
import os.path as osp
import re
from http import HTTPStatus
from pathlib import Path
from subprocess import PIPE, CalledProcessError, run
from time import sleep
......@@ -14,12 +13,12 @@ import requests
from shared.utils.config import ASSETS_DIR, get_api_url
CVAT_ROOT_DIR = __file__[: __file__.rfind(osp.join("tests", ""))]
CVAT_DB_DIR = osp.join(ASSETS_DIR, "cvat_db")
CVAT_ROOT_DIR = next(dir.parent for dir in Path(__file__).parents if dir.name == "tests")
CVAT_DB_DIR = ASSETS_DIR / "cvat_db"
PREFIX = "test"
CONTAINER_NAME_FILES = [
osp.join(CVAT_ROOT_DIR, dc_file)
CVAT_ROOT_DIR / dc_file
for dc_file in (
"components/analytics/docker-compose.analytics.tests.yml",
"docker-compose.tests.yml",
......@@ -27,7 +26,7 @@ CONTAINER_NAME_FILES = [
]
DC_FILES = [
osp.join(CVAT_ROOT_DIR, dc_file)
CVAT_ROOT_DIR / dc_file
for dc_file in (
"docker-compose.dev.yml",
"tests/docker-compose.file_share.yml",
......@@ -157,7 +156,7 @@ def running_containers():
def dump_db():
if "test_cvat_server_1" not in running_containers():
pytest.exit("CVAT is not running")
with open(osp.join(CVAT_DB_DIR, "data.json"), "w") as f:
with open(CVAT_DB_DIR / "data.json", "w") as f:
try:
run( # nosec
"docker exec test_cvat_server_1 \
......@@ -173,7 +172,9 @@ def dump_db():
def create_compose_files():
for filename in CONTAINER_NAME_FILES:
with open(filename.replace(".tests.yml", ".yml"), "r") as dcf, open(filename, "w") as ndcf:
with open(filename.with_name(filename.name.replace(".tests", "")), "r") as dcf, open(
filename, "w"
) as ndcf:
ndcf.writelines(
[line for line in dcf.readlines() if not re.match("^.+container_name.+$", line)]
)
......@@ -181,8 +182,7 @@ def create_compose_files():
def delete_compose_files():
for filename in CONTAINER_NAME_FILES:
if osp.exists(filename):
os.remove(filename)
filename.unlink(missing_ok=True)
def wait_for_server():
......@@ -195,7 +195,7 @@ def wait_for_server():
def docker_restore_data_volumes():
docker_cp(
osp.join(CVAT_DB_DIR, "cvat_data.tar.bz2"),
CVAT_DB_DIR / "cvat_data.tar.bz2",
f"{PREFIX}_cvat_server_1:/tmp/cvat_data.tar.bz2",
)
docker_exec_cvat("tar --strip 3 -xjf /tmp/cvat_data.tar.bz2 -C /home/django/data/")
......@@ -204,7 +204,7 @@ def docker_restore_data_volumes():
def kube_restore_data_volumes():
pod_name = _kube_get_server_pod_name()
kube_cp(
osp.join(CVAT_DB_DIR, "cvat_data.tar.bz2"),
CVAT_DB_DIR / "cvat_data.tar.bz2",
f"{pod_name}:/tmp/cvat_data.tar.bz2",
)
kube_exec_cvat("tar --strip 3 -xjf /tmp/cvat_data.tar.bz2 -C /home/django/data/")
......@@ -218,19 +218,24 @@ def start_services(rebuild=False):
)
_run(
# use compatibility mode to have fixed names for containers (with underscores)
# https://github.com/docker/compose#about-update-and-backward-compatibility
f"docker-compose -p {PREFIX} --compatibility "
+ "--env-file "
+ osp.join(CVAT_ROOT_DIR, "tests", "python", "webhook_receiver", ".env")
+ f" -f {' -f '.join(DC_FILES)} up -d "
+ "--build" * rebuild,
[
"docker-compose",
f"--project-name={PREFIX}",
# use compatibility mode to have fixed names for containers (with underscores)
# https://github.com/docker/compose#about-update-and-backward-compatibility
"--compatibility",
f"--env-file={CVAT_ROOT_DIR / 'tests/python/webhook_receiver/.env'}",
*(f"--file={f}" for f in DC_FILES),
"up",
"-d",
*["--build"] * rebuild,
],
capture_output=False,
)
docker_restore_data_volumes()
docker_cp(osp.join(CVAT_DB_DIR, "restore.sql"), f"{PREFIX}_cvat_db_1:/tmp/restore.sql")
docker_cp(osp.join(CVAT_DB_DIR, "data.json"), f"{PREFIX}_cvat_server_1:/tmp/data.json")
docker_cp(CVAT_DB_DIR / "restore.sql", f"{PREFIX}_cvat_db_1:/tmp/restore.sql")
docker_cp(CVAT_DB_DIR / "data.json", f"{PREFIX}_cvat_server_1:/tmp/data.json")
@pytest.fixture(autouse=True, scope="session")
......@@ -260,18 +265,23 @@ def services(request):
delete_compose_files()
pytest.exit("All generated test files have been deleted", returncode=0)
if not all([osp.exists(f) for f in CONTAINER_NAME_FILES]) or rebuild:
if not all([f.exists() for f in CONTAINER_NAME_FILES]) or rebuild:
delete_compose_files()
create_compose_files()
if stop:
_run(
# use compatibility mode to have fixed names for containers (with underscores)
# https://github.com/docker/compose#about-update-and-backward-compatibility
f"docker-compose -p {PREFIX} --compatibility "
+ "--env-file "
+ osp.join(CVAT_ROOT_DIR, "tests", "python", "webhook_receiver", ".env")
+ f" -f {' -f '.join(DC_FILES)} down -v",
[
"docker-compose",
f"--project-name={PREFIX}",
# use compatibility mode to have fixed names for containers (with underscores)
# https://github.com/docker/compose#about-update-and-backward-compatibility
"--compatibility",
f"--env-file={CVAT_ROOT_DIR / 'tests/python/webhook_receiver/.env'}",
*(f"--file={f}" for f in DC_FILES),
"down",
"-v",
],
capture_output=False,
)
pytest.exit("All testing containers are stopped", returncode=0)
......@@ -296,8 +306,8 @@ def services(request):
kube_restore_data_volumes()
server_pod_name = _kube_get_server_pod_name()
db_pod_name = _kube_get_db_pod_name()
kube_cp(osp.join(CVAT_DB_DIR, "restore.sql"), f"{db_pod_name}:/tmp/restore.sql")
kube_cp(osp.join(CVAT_DB_DIR, "data.json"), f"{server_pod_name}:/tmp/data.json")
kube_cp(CVAT_DB_DIR / "restore.sql", f"{db_pod_name}:/tmp/restore.sql")
kube_cp(CVAT_DB_DIR / "data.json", f"{server_pod_name}:/tmp/data.json")
wait_for_server()
......
......@@ -2,13 +2,13 @@
#
# SPDX-License-Identifier: MIT
import os.path as osp
from pathlib import Path
import requests
from cvat_sdk.api_client import ApiClient, Configuration
ROOT_DIR = __file__[: __file__.rfind(osp.join("utils", ""))]
ASSETS_DIR = osp.abspath(osp.join(ROOT_DIR, "assets"))
ROOT_DIR = next(dir.parent for dir in Path(__file__).parents if dir.name == "utils")
ASSETS_DIR = (ROOT_DIR / "assets").resolve()
# Suppress the warning from Bandit about hardcoded passwords
USER_PASS = "!Q@W#E$R" # nosec
BASE_URL = "http://localhost:8080"
......
......@@ -3,7 +3,6 @@
# SPDX-License-Identifier: MIT
import json
import os.path as osp
from http import HTTPStatus
from config import ASSETS_DIR, get_method
......@@ -24,7 +23,7 @@ if __name__ == "__main__":
"webhook",
]:
response = get_method("admin1", f"{obj}s", page_size="all")
with open(osp.join(ASSETS_DIR, f"{obj}s.json"), "w") as f:
with open(ASSETS_DIR / f"{obj}s.json", "w") as f:
json.dump(response.json(), f, indent=2, sort_keys=True)
if obj in ["job", "task"]:
......@@ -35,5 +34,5 @@ if __name__ == "__main__":
if response.status_code == HTTPStatus.OK:
annotations[obj][oid] = response.json()
with open(osp.join(ASSETS_DIR, f"annotations.json"), "w") as f:
with open(ASSETS_DIR / "annotations.json", "w") as f:
json.dump(annotations, f, indent=2, sort_keys=True)
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册