diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 7eaba936c..54d6684a4 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -15,14 +15,14 @@ jobs: strategy: matrix: - python-version: ["3.9", "3.10", "3.11"] + python-version: ["3.10", "3.11", "3.12"] services: postgres: image: postgres env: POSTGRES_PASSWORD: postgres - POSTGRES_DB: fractal_client_test + POSTGRES_DB: pytest-fractal-client options: >- --health-cmd pg_isready --health-interval 10s @@ -50,8 +50,17 @@ jobs: - name: Test with pytest env: COVERAGE_FILE: coverage-data-${{ matrix.python-version }} + GHA_FRACTAL_SERVER_LOG: /tmp run: poetry run coverage run -m pytest + - name: Log server STDOUT if pytest failed + if: failure() + run: cat /tmp/server_out + + - name: Log server STDERR if pytest failed + if: failure() + run: cat /tmp/server_err + - name: Upload coverage data uses: actions/upload-artifact@v4 with: diff --git a/.github/workflows/pip_install.yml b/.github/workflows/pip_install.yml index b1aab0a51..c5ba6dfc8 100644 --- a/.github/workflows/pip_install.yml +++ b/.github/workflows/pip_install.yml @@ -15,7 +15,7 @@ jobs: strategy: matrix: - python-version: ["3.9", "3.10", "3.11"] + python-version: ["3.10", "3.11", "3.12"] steps: diff --git a/CHANGELOG.md b/CHANGELOG.md index 41cd48a8a..9497b6b6e 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -2,11 +2,20 @@ # 2.3.0 +> WARNING: Starting from this release, Python3.9 is not supported any more. + + * Align with [`fractal-server` 2.7.0](https://fractal-analytics-platform.github.io/fractal-server/changelog/#270) (\#712). * Remove `--new-name` and `--new-version` options from `task edit` command (\#712). -* Rename `source` into `label` `task collect-custom` command (\#712). -* Do not rely on tasks' `source` or `owner` attributes (\#712). -* Add `--new-ssh-settings-json` to `fractal user edit` (\#715). +* Rename `source` into `label`, for `task collect-custom` command (\#712). +* Do not refer to obsolete task attributes `source` or `owner` (\#712, \#717). +* Add `--new-ssh-settings-json` option to `fractal user edit` (\#715). +* Add `--private` option to task-creating commands (\#717). +* Drop `task delete` command (\#717). +* Testing: + * Run all tests against a single `fractal-server` instance (\#717). + * Run tests in random module order, based on `pytest-randomly` (\#717). + * Include Python3.12 in GitHub CI (\#717). # 2.2.1 diff --git a/fractal_client/cmd/__init__.py b/fractal_client/cmd/__init__.py index 5a60cdcea..4960a8801 100644 --- a/fractal_client/cmd/__init__.py +++ b/fractal_client/cmd/__init__.py @@ -21,7 +21,6 @@ from ._project import get_project_list from ._project import patch_project from ._project import post_project -from ._task import delete_task from ._task import get_task_list from ._task import patch_task from ._task import post_task @@ -136,6 +135,7 @@ def task( "python_version", "package_extras", "pinned_dependency", + "private", ] function_kwargs = get_kwargs(parameters, kwargs) iface = task_collect_pip(client, batch=batch, **function_kwargs) @@ -147,6 +147,7 @@ def task( "version", "package_name", "package_root", + "private", ] function_kwargs = get_kwargs(parameters, kwargs) iface = task_collect_custom(client, batch=batch, **function_kwargs) @@ -165,6 +166,7 @@ def task( "args_schema_non_parallel", "args_schema_parallel", "args_schema_version", + "private", ] function_kwargs = get_kwargs(parameters, kwargs) iface = post_task(client, batch=batch, **function_kwargs) @@ -180,10 +182,6 @@ def task( ] function_kwargs = get_kwargs(parameters, kwargs) iface = patch_task(client, **function_kwargs) - elif subcmd == "delete": - parameters = ["id", "name", "version"] - function_kwargs = get_kwargs(parameters, kwargs) - iface = delete_task(client, **function_kwargs) else: raise NoCommandError(f"Command 'task {subcmd}' not found") return iface diff --git a/fractal_client/cmd/_aux_task_caching.py b/fractal_client/cmd/_aux_task_caching.py index b86034e5c..16dbc0575 100644 --- a/fractal_client/cmd/_aux_task_caching.py +++ b/fractal_client/cmd/_aux_task_caching.py @@ -114,13 +114,10 @@ def _format_task_list(task_list: _TaskList) -> str: Helper function to print a formatted task list with only a few task attributes, to be used in error messages. """ - header = " ID, Name, Version, Source" + header = " ID, Name, Version" formatted_list = "\n".join( [ - ( - f' {task["id"]}, "{task["name"]}", {task["version"]}, ' - f'{task["source"]}' - ) + f' {task["id"]}, "{task["name"]}", {task["version"]}' for task in task_list ] ) diff --git a/fractal_client/cmd/_task.py b/fractal_client/cmd/_task.py index 8f70c222a..31c6389ad 100644 --- a/fractal_client/cmd/_task.py +++ b/fractal_client/cmd/_task.py @@ -25,6 +25,7 @@ def task_collect_pip( python_version: Optional[str] = None, package_extras: Optional[str] = None, pinned_dependency: Optional[list[str]] = None, + private: bool = False, batch: bool = False, ) -> Interface: @@ -49,8 +50,10 @@ def task_collect_pip( for _name, _version in (p.split("=") for p in pinned_dependency) } + is_private = "?private=true" if private else "" + res = client.post( - f"{settings.BASE_URL}/task/collect/pip/", json=task_collect + f"{settings.BASE_URL}/task/collect/pip/{is_private}", json=task_collect ) state = check_response(res, expected_status_code=[200, 201]) @@ -70,6 +73,7 @@ def task_collect_custom( version: Optional[str] = None, package_name: Optional[str] = None, package_root: Optional[str] = None, + private: bool = False, batch: bool = False, ) -> Interface: @@ -94,9 +98,11 @@ def task_collect_custom( task_collect["package_name"] = package_name if package_root: task_collect["package_root"] = package_root + is_private = "?private=true" if private else "" res = client.post( - f"{settings.BASE_URL}/task/collect/custom/", json=task_collect + f"{settings.BASE_URL}/task/collect/custom/{is_private}", + json=task_collect, ) task_list = check_response( @@ -141,6 +147,7 @@ def post_task( args_schema_non_parallel: Optional[str] = None, args_schema_parallel: Optional[str] = None, args_schema_version: Optional[str] = None, + private: bool = False, ) -> Interface: task = dict(name=name) if command_non_parallel: @@ -163,8 +170,9 @@ def post_task( task["args_schema_non_parallel"] = json.load(f) if args_schema_version: task["args_schema_version"] = args_schema_version + is_private = "?private=true" if private else "" - res = client.post(f"{settings.BASE_URL}/task/", json=task) + res = client.post(f"{settings.BASE_URL}/task/{is_private}", json=task) new_task = check_response(res, expected_status_code=201) if batch: @@ -215,30 +223,3 @@ def patch_task( res = client.patch(f"{settings.BASE_URL}/task/{id}/", json=task_update) new_task = check_response(res, expected_status_code=200) return Interface(retcode=0, data=new_task) - - -def delete_task( - client: AuthClient, - *, - id: Optional[int] = None, - name: Optional[str] = None, - version: Optional[str] = None, -) -> Interface: - - if id: - if version: - logging.error( - "Too many arguments: cannot provide both `id` and `version`." - ) - sys.exit(1) - else: - try: - id = get_task_id_from_cache( - client=client, task_name=name, version=version - ) - except FractalCacheError as e: - print(e) - sys.exit(1) - res = client.delete(f"{settings.BASE_URL}/task/{id}/") - check_response(res, expected_status_code=204) - return Interface(retcode=0, data="") diff --git a/fractal_client/parser.py b/fractal_client/parser.py index e3a9cc48f..dd153092f 100644 --- a/fractal_client/parser.py +++ b/fractal_client/parser.py @@ -235,6 +235,12 @@ "pin several packages to specific versions." ), ) +task_collect_parser.add_argument( + "--private", + default=False, + action="store_true", + help="Make task group private.", +) # task collect custom @@ -278,6 +284,12 @@ "it will be automatically inferred based on 'package_name'." ), ) +task_collect_custom_parser.add_argument( + "--private", + default=False, + action="store_true", + help="Make task group private.", +) # task check-collection task_check_collection_parser = task_subparsers.add_parser( @@ -341,6 +353,12 @@ "(e.g. `pydantic_v1`)." ), ) +task_new_parser.add_argument( + "--private", + default=False, + action="store_true", + help="Make task group private.", +) # task edit task_edit_parser = task_subparsers.add_parser( @@ -384,31 +402,6 @@ help=("Path to JSON file with new output types."), ) -# task delete -task_delete_parser = task_subparsers.add_parser( - "delete", - description="Delete task.", - argument_default=ap.SUPPRESS, - allow_abbrev=False, -) -task_delete_id_or_name_group = task_delete_parser.add_mutually_exclusive_group( - required=True -) -task_delete_id_or_name_group.add_argument( - "--id", help="ID of the task to delete.", type=int -) -task_delete_id_or_name_group.add_argument( - "--name", help="Name of the task to delete." -) -task_delete_parser.add_argument( - "--version", - help=( - "Version of the task to delete " - "(only accepted in combination with `--name`)." - ), -) - - # WORKFLOW GROUP workflow_parser = subparsers_main.add_parser( diff --git a/poetry.lock b/poetry.lock index c08238888..fcc711cbe 100644 --- a/poetry.lock +++ b/poetry.lock @@ -711,10 +711,10 @@ typing = ["typing-extensions (>=4.12.2)"] [[package]] name = "fractal-server" -version = "2.7.0a9" +version = "2.7.0a11" description = "Server component of the Fractal analytics platform" optional = false -python-versions = "^3.9" +python-versions = "^3.10" files = [] develop = false @@ -739,14 +739,13 @@ uvicorn-worker = "^0.2.0" [package.extras] gunicorn = ["gunicorn (>=21.2,<23.0)"] -postgres = ["asyncpg (>=0.29.0,<0.30.0)", "psycopg2 (>=2.9.5,<3.0.0)"] postgres-psycopg-binary = ["psycopg[binary] (>=3.1.0,<4.0.0)"] [package.source] type = "git" url = "https://github.com/fractal-analytics-platform/fractal-server.git" reference = "main" -resolved_reference = "64bba84eb8282d305654394683fa3c3f440f013b" +resolved_reference = "2cc4f9423a651ccfc3510111eaece99e3291e488" [[package]] name = "ghp-import" @@ -985,29 +984,6 @@ files = [ [package.extras] all = ["flake8 (>=7.1.1)", "mypy (>=1.11.2)", "pytest (>=8.3.2)", "ruff (>=0.6.2)"] -[[package]] -name = "importlib-metadata" -version = "8.5.0" -description = "Read metadata from Python packages" -optional = false -python-versions = ">=3.8" -files = [ - {file = "importlib_metadata-8.5.0-py3-none-any.whl", hash = "sha256:45e54197d28b7a7f1559e60b95e7c567032b602131fbd588f1497f47880aa68b"}, - {file = "importlib_metadata-8.5.0.tar.gz", hash = "sha256:71522656f0abace1d072b9e5481a48f07c138e00f079c38c8f883823f9c26bd7"}, -] - -[package.dependencies] -zipp = ">=3.20" - -[package.extras] -check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1)"] -cover = ["pytest-cov"] -doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] -enabler = ["pytest-enabler (>=2.2)"] -perf = ["ipython"] -test = ["flufl.flake8", "importlib-resources (>=1.3)", "jaraco.test (>=5.4)", "packaging", "pyfakefs", "pytest (>=6,!=8.1.*)", "pytest-perf (>=0.9.2)"] -type = ["pytest-mypy"] - [[package]] name = "iniconfig" version = "2.0.0" @@ -1099,9 +1075,6 @@ files = [ {file = "markdown-3.7.tar.gz", hash = "sha256:2ae2471477cfd02dbbf038d5d9bc226d40def84b4fe2986e49b59b6b472bbed2"}, ] -[package.dependencies] -importlib-metadata = {version = ">=4.4", markers = "python_version < \"3.10\""} - [package.extras] docs = ["mdx-gh-links (>=0.2)", "mkdocs (>=1.5)", "mkdocs-gen-files", "mkdocs-literate-nav", "mkdocs-nature (>=0.6)", "mkdocs-section-index", "mkdocstrings[python]"] testing = ["coverage", "pyyaml"] @@ -1202,7 +1175,6 @@ files = [ click = ">=7.0" colorama = {version = ">=0.4", markers = "platform_system == \"Windows\""} ghp-import = ">=1.0" -importlib-metadata = {version = ">=4.3", markers = "python_version < \"3.10\""} jinja2 = ">=2.11.1" markdown = ">=3.2.1" markupsafe = ">=2.0.1" @@ -1333,7 +1305,6 @@ files = [ [package.dependencies] click = ">=7.0" -importlib-metadata = {version = ">=4.6", markers = "python_version < \"3.10\""} Jinja2 = ">=2.11.1" Markdown = ">=3.3" MarkupSafe = ">=1.1" @@ -1342,7 +1313,6 @@ mkdocs-autorefs = ">=0.3.1" mkdocstrings-python = {version = ">=0.5.2", optional = true, markers = "extra == \"python\""} platformdirs = ">=2.2.0" pymdown-extensions = ">=6.3" -typing-extensions = {version = ">=4.1", markers = "python_version < \"3.10\""} [package.extras] crystal = ["mkdocstrings-crystal (>=0.3.4)"] @@ -1797,6 +1767,20 @@ tomli = {version = ">=1.0.0", markers = "python_version < \"3.11\""} [package.extras] testing = ["argcomplete", "attrs (>=19.2.0)", "hypothesis (>=3.56)", "mock", "nose", "pygments (>=2.7.2)", "requests", "setuptools", "xmlschema"] +[[package]] +name = "pytest-randomly" +version = "3.15.0" +description = "Pytest plugin to randomly order tests and control random.seed." +optional = false +python-versions = ">=3.8" +files = [ + {file = "pytest_randomly-3.15.0-py3-none-any.whl", hash = "sha256:0516f4344b29f4e9cdae8bce31c4aeebf59d0b9ef05927c33354ff3859eeeca6"}, + {file = "pytest_randomly-3.15.0.tar.gz", hash = "sha256:b908529648667ba5e54723088edd6f82252f540cc340d748d1fa985539687047"}, +] + +[package.dependencies] +pytest = "*" + [[package]] name = "python-dateutil" version = "2.9.0.post0" @@ -2204,7 +2188,6 @@ files = [ [package.dependencies] anyio = ">=3.4.0,<5" -typing-extensions = {version = ">=3.10.0", markers = "python_version < \"3.10\""} [package.extras] full = ["httpx (>=0.22.0)", "itsdangerous", "jinja2", "python-multipart (>=0.0.7)", "pyyaml"] @@ -2445,26 +2428,7 @@ files = [ {file = "wrapt-1.16.0.tar.gz", hash = "sha256:5f370f952971e7d17c7d1ead40e49f32345a7f7a5373571ef44d800d06b1899d"}, ] -[[package]] -name = "zipp" -version = "3.20.2" -description = "Backport of pathlib-compatible object wrapper for zip files" -optional = false -python-versions = ">=3.8" -files = [ - {file = "zipp-3.20.2-py3-none-any.whl", hash = "sha256:a817ac80d6cf4b23bf7f2828b7cabf326f15a001bea8b1f9b49631780ba28350"}, - {file = "zipp-3.20.2.tar.gz", hash = "sha256:bc9eb26f4506fda01b81bcde0ca78103b6e62f991b381fec825435c836edbc29"}, -] - -[package.extras] -check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1)"] -cover = ["pytest-cov"] -doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] -enabler = ["pytest-enabler (>=2.2)"] -test = ["big-O", "importlib-resources", "jaraco.functools", "jaraco.itertools", "jaraco.test", "more-itertools", "pytest (>=6,!=8.1.*)", "pytest-ignore-flaky"] -type = ["pytest-mypy"] - [metadata] lock-version = "2.0" -python-versions = "^3.9" -content-hash = "9944308972f3aa1860f7f35c8f7155dccf2cc1d5564d0fdaf27b04f830c4e503" +python-versions = "^3.10" +content-hash = "2d813f525b4a6411fe7ee23f8915dcf3fa7e395b50a390e3631ff88f1fb6384c" diff --git a/pyproject.toml b/pyproject.toml index 2c2d3a41c..da74d9cc9 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -23,11 +23,12 @@ exclude = [ "Changelog" = "https://github.com/fractal-analytics-platform/fractal-client/blob/main/CHANGELOG.md" [tool.poetry.dependencies] -python = "^3.9" +python = "^3.10" python-dotenv = "^1.0.0" httpx = "^0.27.0" PyJWT = "^2.8.0" packaging = "^23.1" +pytest-randomly = "^3.15.0" [tool.poetry.group.dev.dependencies] devtools = "^0.12.0" diff --git a/tests/conftest.py b/tests/conftest.py index e12f63d93..f2da4fd59 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -4,16 +4,11 @@ from pathlib import Path import pytest -from httpx import Client -# These three variables must be defined before the first import of config.py -environ["FRACTAL_SERVER"] = "http://127.0.0.1:10080" -environ["FRACTAL_USER"] = "test@fake-exact-lab.it" -environ["FRACTAL_PASSWORD"] = "password" - -environ["FRACTAL_USERNAME"] = "myusername" - +# This variable must be defined before the first import of config.py +environ["FRACTAL_SERVER"] = "http://127.0.0.1:8765" +from fractal_client.client import handle # noqa: E402 # set_start_method("fork") necessary to run tests on MacOS # https://github.com/pytest-dev/pytest-flask/issues/104#issuecomment-577908228 @@ -40,23 +35,6 @@ def testdata_path() -> Path: return Path(__file__).parent / "data" -@pytest.fixture -def client(): - with Client(timeout=10) as client: - yield client - - -@pytest.fixture -def client_superuser(): - from fractal_client.authclient import AuthClient - - with AuthClient( - username="admin@fractal.xy", - password="1234", - ) as client_superuser: - yield client_superuser - - def _clisplit(args: str): return shlex.split(f"fractal {args}") @@ -69,21 +47,25 @@ def _remove_session(): cache_file.unlink(missing_ok=True) -@pytest.fixture -def invoke(): - from fractal_client.client import handle +@pytest.fixture(scope="session") +def tester(): + return dict(email="client_tester@example.org", password="pytest") + +@pytest.fixture +def invoke(tester): def __invoke(args: str): _remove_session() - return handle(_clisplit(args)) + new_args = ( + f"--user {tester['email']} --password {tester['password']} {args}" + ) + return handle(_clisplit(new_args)) return __invoke @pytest.fixture def invoke_as_superuser(): - from fractal_client.client import handle - def __invoke(args: str): _remove_session() new_args = f"--user admin@fractal.xy --password 1234 {args}" @@ -92,6 +74,21 @@ def __invoke(args: str): return __invoke +@pytest.fixture +def invoke_as_custom_user(): + def __invoke(args: str, email: str, password: str): + _remove_session() + new_args = f"--user {email} --password {password} {args}" + return handle(_clisplit(new_args)) + + return __invoke + + +@pytest.fixture +def superuser(invoke_as_superuser): + return invoke_as_superuser("user whoami").data + + @pytest.fixture(scope="function") def override_settings(monkeypatch, tmp_path): import fractal_client.config @@ -127,4 +124,18 @@ def _override_settings( return _override_settings +@pytest.fixture(scope="session") +def new_name(): + class Counter(object): + ind: int = 0 + + def __next__(self): + self.ind = self.ind + 1 + return f"name{self.ind - 1}" + + names = Counter() + + return lambda: next(names) + + from .fixtures_testserver import * # noqa: 401 diff --git a/tests/data/import-export/wf3.json b/tests/data/import-export/wf3.json new file mode 100644 index 000000000..7fbb2a3f6 --- /dev/null +++ b/tests/data/import-export/wf3.json @@ -0,0 +1,20 @@ +{ + "name": "Generic WF", + "task_list": [ + { + "task": { + "name": "generic_task", + "pkg_name": "fractal-tasks-mock" + } + }, + { + "args_non_parallel": { + "raise_error": true + }, + "task": { + "name": "generic_task", + "pkg_name": "fractal-tasks-mock" + } + } + ] +} diff --git a/tests/data/import-export/workflow.json b/tests/data/import-export/workflow.json index 44ea014cd..9ea2c71bc 100644 --- a/tests/data/import-export/workflow.json +++ b/tests/data/import-export/workflow.json @@ -6,14 +6,15 @@ "message_x": "task_x", "arg_x": "value_x" }, + "meta_parallel": {"key1": "value1"}, "args_non_parallel": { "message_y": "task_y", "arg_y": "value_y" }, - "meta_parallel": {"key1": "value1"}, "meta_non_parallel": {"key2": "value2"}, "task": { - "source": "PKG_SOURCE:dummy2" + "name": "dummy", + "pkg_name": "dummy" } } ] diff --git a/tests/data/import-export/workflow_2.json b/tests/data/import-export/workflow_2.json index b43ae4991..db94903e0 100644 --- a/tests/data/import-export/workflow_2.json +++ b/tests/data/import-export/workflow_2.json @@ -13,7 +13,8 @@ "meta_parallel": {"key1": "value1"}, "meta_non_parallel": {"key2": "value2"}, "task": { - "source": "PKG_SOURCE:dummy2" + "name": "dummy2", + "pkg_name": "dummy2" } } ] diff --git a/tests/fixtures_testserver.py b/tests/fixtures_testserver.py index 1bb124190..39191ea77 100644 --- a/tests/fixtures_testserver.py +++ b/tests/fixtures_testserver.py @@ -1,303 +1,237 @@ import logging +import os +import shlex +import subprocess import time -from multiprocessing import Process -from os import environ +from pathlib import Path from typing import Optional -import httpx import pytest -import uvicorn -from sqlmodel import select +from httpx import ConnectError +from fractal_client.client import handle + +DB_NAME = "pytest-fractal-client" logger = logging.getLogger("fractal-client") logger.setLevel(logging.DEBUG) -PORT = 10080 +PORT = 8765 @pytest.fixture -def override_server_settings(tmp_path): - from fractal_server.config import Settings, get_settings - from fractal_server.syringe import Inject +def superuser(invoke_as_superuser): + return invoke_as_superuser("user whoami").data - settings = Settings() - settings.DB_ENGINE = "postgres-psycopg" - settings.POSTGRES_DB = "fractal_client_test" - settings.POSTGRES_USER = "postgres" - settings.POSTGRES_PASSWORD = "postgres" +@pytest.fixture(scope="session") +def tester(): + return dict(email="client_tester@example.org", password="pytest") - settings.FRACTAL_RUNNER_BACKEND = "local" - settings.JWT_SECRET_KEY = "secret_key" - base_folder = tmp_path - settings.FRACTAL_TASKS_DIR = base_folder / "FRACTAL_TASKS_DIR" - settings.FRACTAL_RUNNER_WORKING_BASE_DIR = ( - base_folder / "FRACTAL_RUNNER_WORKING_BASE_DIR" +def _run_command(cmd: str) -> str: + logging.warning(f"Now running {cmd=}") + res = subprocess.run( + shlex.split(cmd), + capture_output=True, + env=dict(PGPASSWORD="postgres", **os.environ), + encoding="utf-8", ) - settings.FRACTAL_LOGGING_LEVEL = logging.DEBUG - settings.FRACTAL_API_SUBMIT_RATE_LIMIT = 0 - - def _get_settings(): - return settings - - Inject.override(get_settings, _get_settings) - try: - yield - finally: - Inject.pop(get_settings) - - -@pytest.fixture(scope="function", autouse=True) -def testserver(override_server_settings): - - from fractal_server.app.db import DB - from fractal_server.app.models.security import SQLModel - from fractal_server.app.models.security import UserOAuth - from fractal_server.app.models.user_settings import UserSettings - from fractal_server.app.models.security import UserGroup - from fractal_server.app.models.linkusergroup import LinkUserGroup - from fractal_server.app.security import _create_first_group - - # INIT DB - engine_sync = DB.engine_sync() - logger.debug(engine_sync.url) - SQLModel.metadata.create_all(engine_sync) - - # Create default group and first superuser - # NOTE: we have to do it here, because we are not calling the `set_db` - # function from fractal-server. This would change with - # https://github.com/fractal-analytics-platform/fractal-client/issues/697 - # NOTE: `hashed_password` is the bcrypt hash of "1234", see - # https://github.com/fractal-analytics-platform/fractal-server/issues/1750 - _create_first_group() - with next(DB.get_sync_db()) as db: - user = UserOAuth( - email="admin@fractal.xy", - hashed_password=( - "$2b$12$K0C4t7XILgpcQx35V3QE3enOODQ1IH9pzW49nqjHbrx2uQTMVYsQC" - ), - username=environ["FRACTAL_USERNAME"], - is_superuser=True, - is_verified=True, - is_active=True, - ) - empty_user_settings = UserSettings() - user.settings = empty_user_settings - db.add(user) - db.commit() - - first_group = db.execute(select(UserGroup)).scalar() - first_user = db.execute(select(UserOAuth)).scalar() - - link = LinkUserGroup(group_id=first_group.id, user_id=first_user.id) - db.add(link) - db.commit() - - # Run testserver in a separate process - # cf. https://stackoverflow.com/a/57816608/283972 - def run_server(): - uvicorn.run( - "fractal_server.main:app", - port=PORT, - log_level="debug", - timeout_keep_alive=10, + if res.returncode != 0: + logging.error(f"{res.stdout=}") + logging.error(f"{res.stderr=}") + raise RuntimeError(res.stderr) + else: + return res.stdout + + +@pytest.fixture(scope="session", autouse=True) +def testserver(tester, tmpdir_factory, request): + + FRACTAL_TASK_DIR = str(tmpdir_factory.mktemp("TASKS")) + FRACTAL_RUNNER_WORKING_BASE_DIR = str(tmpdir_factory.mktemp("JOBS")) + + env_file = Path(".fractal_server.env") + with env_file.open("w") as f: + f.write( + "DB_ENGINE=postgres-psycopg\n" + "POSTGRES_HOST=localhost\n" + f"POSTGRES_DB={DB_NAME}\n" + "POSTGRES_USER=postgres\n" + "POSTGRES_PASSWORD=postgres\n" + "FRACTAL_RUNNER_BACKEND=local\n" + "JWT_SECRET_KEY=secret_key\n" + f"FRACTAL_TASKS_DIR={FRACTAL_TASK_DIR}\n" + "FRACTAL_RUNNER_WORKING_BASE_DIR=" + f"{FRACTAL_RUNNER_WORKING_BASE_DIR}\n" + "FRACTAL_LOGGING_LEVEL=0\n" ) - - proc = Process(target=run_server, args=(), daemon=True) - proc.start() + _run_command( + f"dropdb --username=postgres --host localhost --if-exists {DB_NAME}" + ) + _run_command(f"createdb --username=postgres --host localhost {DB_NAME}") + _run_command("poetry run fractalctl set-db") + + LOG_DIR = Path( + os.environ.get( + "GHA_FRACTAL_SERVER_LOG", + tmpdir_factory.mktemp("LOGS"), + ), + ) + path_out = LOG_DIR / "server_out" + path_err = LOG_DIR / "server_err" + f_out = path_out.open("w") + f_err = path_err.open("w") + + server_process = subprocess.Popen( + shlex.split(f"poetry run fractalctl start --port {PORT}"), + stdout=f_out, + stderr=f_err, + ) # Wait until the server is up - TIMEOUT = 8 - time_used = 0 + TIMEOUT = 8.0 + t_start = time.perf_counter() while True: try: - res = httpx.get(f"http://localhost:{PORT}/api/alive/") - assert res.status_code == 200 - break - except httpx.ConnectError: + res = handle(shlex.split("fractal version")) + if res.retcode == 0: + break + else: + raise ConnectError("fractal-server not ready") + except ConnectError: logger.debug("Fractal server not ready, wait one more second.") - time.sleep(1) - time_used += 1 - if time_used > TIMEOUT: + if time.perf_counter() - t_start > TIMEOUT: raise RuntimeError( f"Could not start up server within {TIMEOUT} seconds," " in `testserver` fixture." ) + time.sleep(0.1) - logger.debug(environ["FRACTAL_SERVER"]) - yield environ["FRACTAL_SERVER"] - - # Cleanup DB - engine_sync.dispose() - try: - DB._engine_async - raise - except AttributeError: - # we show here that we do not need to dispose of `engine_async`, - # because it is never used. - pass - SQLModel.metadata.drop_all(engine_sync) - logger.debug("Dropped all tables from the database.") - - proc.kill() + handle( + shlex.split( + ( + "fractal --user admin@fractal.xy --password 1234 " + f"user register {tester['email']} {tester['password']}" + ) + ) + ) + yield -@pytest.fixture -def db(testserver): - """ - NOTE: Only use this fixture within other fixtures!!! - """ - from fractal_server.app.db import get_sync_db + request.session.warn( + Warning( + f"\n\nTerminating Fractal Server (PID: {server_process.pid}).\n" + f"stdout -> {path_out}\n" + f"stderr -> {path_err}\n" + ) + ) - for db in get_sync_db(): - yield db + server_process.terminate() + server_process.kill() + _run_command(f"dropdb --username=postgres --host localhost {DB_NAME}") + env_file.unlink() + f_out.close() + f_err.close() @pytest.fixture -def task_factory(db): - from fractal_server.app.models.v2.task import TaskV2 - from fractal_server.app.models.v2.task import TaskGroupV2 - - def _task_factory(user_id: int, **task_args_override): - task_args = dict(name="test_task", type="parallel") - task_args.update(task_args_override) - t = TaskV2(**task_args) - - db.add( - TaskGroupV2( - user_id=user_id, - origin="other", - pkg_name=t.name, - task_list=[t], - ) - ) - - db.commit() - db.refresh(t) - return t +def task_factory(invoke): + def _task_factory( + name: str, + command_non_parallel: Optional[str] = None, + command_parallel: Optional[str] = None, + version: Optional[str] = None, + meta_non_parallel: Optional[str] = None, + meta_parallel: Optional[str] = None, + args_schema_non_parallel: Optional[str] = None, + args_schema_parallel: Optional[str] = None, + args_schema_version: Optional[str] = None, + ): + cmd = "task new" + if command_non_parallel is not None: + cmd += f" --command-non-parallel {command_non_parallel}" + if command_parallel is not None: + cmd += f" --command-parallel {command_parallel}" + if version is not None: + cmd += f" --version {version}" + if meta_non_parallel is not None: + cmd += f" --meta-non-parallel {meta_non_parallel}" + if meta_parallel is not None: + cmd += f" --meta-parallel {meta_parallel}" + if args_schema_non_parallel is not None: + cmd += f" --args-schema-non-parallel {args_schema_non_parallel}" + if args_schema_parallel is not None: + cmd += f" --args-schema-parallel {args_schema_parallel}" + if args_schema_version is not None: + cmd += f" --args-schema-version {args_schema_version}" + cmd += f" {name}" + + res = invoke(cmd) + return res.data return _task_factory @pytest.fixture -def project_factory(db): - from fractal_server.app.models.v2.project import ProjectV2 - - def _project_factory(user_id=None, **project_args_override): - project_args = dict(name="name") - project_args.update(project_args_override) - p = ProjectV2(**project_args) - if user_id: - from fractal_server.app.security import User - - user = db.get(User, user_id) - p.user_list.append(user) - db.add(p) - db.commit() - db.refresh(p) - return p +def project_factory(invoke): + def _project_factory(name: str): + res = invoke(f"project new {name}") + return res.data return _project_factory @pytest.fixture -def workflow_factory(db, project_factory): - from fractal_server.app.models.v2.workflow import WorkflowV2 - - def _workflow_factory(**wf_args_override): - wf_args = dict(name="name") - wf_args.update(wf_args_override) - wf = WorkflowV2(**wf_args) - db.add(wf) - db.commit() - db.refresh(wf) - return wf +def workflow_factory(invoke): + def _workflow_factory(name: str, project_id: int): + res = invoke(f"workflow new {name} {project_id}") + return res.data return _workflow_factory @pytest.fixture -def job_factory(db): - from fractal_server.app.models.v2.job import JobV2 - from fractal_server.utils import get_timestamp - - def _job_factory(**job_args_override): - job_args = dict( - project_id=1, - input_dataset_id=1, - output_dataset_id=2, - workflow_id=1, - worker_init="WORKER_INIT string", - first_task_index=9999, - last_task_index=9999, - workflow_dump={}, - dataset_dump=dict( - id=1, - name="ds-in", - zarr_dir="/abc", - project_id=1, - timestamp_created=str(get_timestamp()), - filters=dict(attributes=dict(a=1), types=dict(b=True)), - ), - project_dump=dict( - id=1, - name="proj", - timestamp_created=str(get_timestamp()), - ), - start_timestamp=get_timestamp(), - user_email="test@test.test", - ) - job_args.update(job_args_override) - j = JobV2(**job_args) - db.add(j) - db.commit() - db.refresh(j) - return j +def dataset_factory(invoke): + def _dataset_factory( + project_id: int, + name: str, + zarr_dir: str, + filters: Optional[str] = None, + ): + cmd = "project add-dataset" + if filters is not None: + cmd += f" --filters {filters}" + cmd += f" {project_id} {name} {zarr_dir}" + + res = invoke(cmd) + return res.data - return _job_factory + return _dataset_factory @pytest.fixture -def user_factory(testserver, db, client_superuser): - def __register_user( +def user_factory(invoke_as_superuser): + def __user_factory( email: str, password: str, + cache_dir: Optional[str] = None, slurm_user: Optional[str] = None, username: Optional[str] = None, + superuser: bool = False, ): - # Prepare payload - new_user = dict(email=email, password=password) - if slurm_user: - new_user["slurm_user"] = slurm_user - if username: - new_user["username"] = username - # Register user via API call - res = client_superuser.post( - f"http://localhost:{PORT}/auth/register/", - json=new_user, - ) - assert res.status_code == 201 - user_id = res.json()["id"] - # Make user verified via API call - res = client_superuser.patch( - f"http://localhost:{PORT}/auth/users/{user_id}/", - json=dict(is_verified=True), - ) - assert res.status_code == 200 - return res.json() - - return __register_user - - -@pytest.fixture -def register_user(user_factory): - - created_user = user_factory( - email=environ["FRACTAL_USER"], - password=environ["FRACTAL_PASSWORD"], - username=environ["FRACTAL_USERNAME"], - ) - - yield created_user + cmd = "user register" + if cache_dir is not None: + cmd += f" --cache-dir {cache_dir}" + if slurm_user is not None: + cmd += f" --slurm-user {slurm_user}" + if username is not None: + cmd += f" --username {username}" + if superuser is True: + cmd += " --superuser" + cmd += f" {email} {password}" + + res = invoke_as_superuser(cmd) + return res.data + + return __user_factory diff --git a/tests/test_auth.py b/tests/test_auth.py index 09f3eec59..ef8a2a861 100644 --- a/tests/test_auth.py +++ b/tests/test_auth.py @@ -1,34 +1,31 @@ -from os import environ - import pytest from devtools import debug from fractal_client.authclient import AuthenticationError from fractal_client.authclient import AuthToken +from fractal_client.authclient import Client -def test_auth_registered(client, register_user): +def test_auth_registered(tester): """ GIVEN an existing user WHEN fetching a token THEN authentication goes through """ auth = AuthToken( - client, - username=environ.get("FRACTAL_USER"), - password=environ.get("FRACTAL_PASSWORD"), + client=Client(), username=tester["email"], password=tester["password"] ) token = auth() assert token -def test_auth_fail(client): +def test_auth_fail(): """ GIVEN no user registered WHEN fetching a token THEN authentication error is raised """ with pytest.raises(AuthenticationError) as err: - auth = AuthToken(client, username="foo", password="bar") + auth = AuthToken(client=Client(), username="foo", password="bar") auth() debug(err.value) diff --git a/tests/test_client.py b/tests/test_client.py index da800389a..50b2389ec 100644 --- a/tests/test_client.py +++ b/tests/test_client.py @@ -1,5 +1,4 @@ import shlex -from os import environ import httpx import pytest @@ -9,9 +8,6 @@ from fractal_client.client import handle -DEFAULT_TEST_EMAIL = environ["FRACTAL_USER"] - - def test_debug(invoke): res = invoke("--debug version") assert res.retcode == 0 @@ -31,17 +27,17 @@ def test_server(): WHEN it gets called THEN it replies """ - res = httpx.get("http://localhost:10080/api/alive/") + res = httpx.get("http://localhost:8765/api/alive/") debug(res.json()) assert res.status_code == 200 -def test_register_user(register_user, invoke): +def test_register_user(tester, invoke): res = invoke("user whoami") user = res.data debug(user) assert res.retcode == 0 - assert user["email"] == DEFAULT_TEST_EMAIL + assert user["email"] == tester["email"] def test_user_override(user_factory, invoke): @@ -98,7 +94,7 @@ def test_missing_password(override_settings): assert "FRACTAL_PASSWORD variable not defined" in res.data -def test_connecterror(client, override_settings): +def test_connecterror(override_settings): override_settings( FRACTAL_USER="admin@fractal.xy", FRACTAL_PASSWORD="1234", diff --git a/tests/test_dataset.py b/tests/test_dataset.py index 5d65a00ea..1943a4e65 100644 --- a/tests/test_dataset.py +++ b/tests/test_dataset.py @@ -3,12 +3,12 @@ from devtools import debug -def test_create_dataset(register_user, invoke, tmp_path): +def test_create_dataset(invoke, tmp_path, new_name): """ Test some specific branches of the post_dataset function and parser. """ - res = invoke("project new prj0") + res = invoke(f"project new {new_name()}") project_id = res.data["id"] FILTERS = {"attributes": {"a": 1}, "types": {"b": True}} @@ -19,7 +19,7 @@ def test_create_dataset(register_user, invoke, tmp_path): res = invoke( ( - f"project add-dataset {project_id} MyDS /tmp " + f"project add-dataset {project_id} {new_name()} /tmp " f"--filters {file_filters}" ) ) @@ -28,19 +28,19 @@ def test_create_dataset(register_user, invoke, tmp_path): assert res.retcode == 0 assert res.data["filters"] == FILTERS - res = invoke(f"--batch project add-dataset {project_id} MyNewDS /tmp") + res = invoke(f"--batch project add-dataset {project_id} {new_name()} /tmp") debug(res.data) assert res.retcode == 0 -def test_edit_dataset(register_user, invoke, tmp_path): - res = invoke("project new prj0") +def test_edit_dataset(invoke, tmp_path, new_name): + res = invoke(f"project new {new_name()}") project_id = res.data["id"] - res = invoke(f"project add-dataset {project_id} test_name /tmp") + res = invoke(f"project add-dataset {project_id} {new_name()} /tmp") dataset_id = res.data["id"] - NAME = "this_new_name" + NAME = new_name() FILTERS = {"attributes": {"a": 1}, "types": {"b": True}} FILTERS_FILE = str(tmp_path / "meta.json") with open(FILTERS_FILE, "w") as f: @@ -60,12 +60,12 @@ def test_edit_dataset(register_user, invoke, tmp_path): assert res.retcode == 0 -def test_delete_dataset(register_user, invoke): +def test_delete_dataset(invoke, new_name): # Create a project with its default dataset - res = invoke("project new prj0") + res = invoke(f"project new {new_name()}") project_id = res.data["id"] - res = invoke(f"project add-dataset {project_id} test_name /tmp") + res = invoke(f"project add-dataset {project_id} {new_name()} /tmp") dataset_id = res.data["id"] # Delete dataset @@ -76,11 +76,11 @@ def test_delete_dataset(register_user, invoke): assert res.data["detail"] == "Dataset not found" -def test_show_dataset(register_user, invoke): +def test_show_dataset(invoke, new_name): # Create a project with its default dataset - res = invoke("project new prj0") + res = invoke(f"project new {new_name()}") project_id = res.data["id"] - res = invoke(f"project add-dataset {project_id} test_name /tmp") + res = invoke(f"project add-dataset {project_id} {new_name()} /tmp") dataset_id = res.data["id"] res = invoke(f"dataset show {project_id} {dataset_id}") diff --git a/tests/test_group.py b/tests/test_group.py index 721710da9..d5068455e 100644 --- a/tests/test_group.py +++ b/tests/test_group.py @@ -2,7 +2,7 @@ from fractal_server.app.security import FRACTAL_DEFAULT_GROUP_NAME -def test_group_commands_auth(register_user, invoke, caplog): +def test_group_commands_auth(invoke, caplog): """ Assert 'group' commands are not accessible to standard users """ @@ -19,34 +19,38 @@ def _assert_403(cmd): _assert_403(cmd="group update 1 --new-user-ids 1") -def test_group_commands(user_factory, invoke_as_superuser): +def test_group_commands( + user_factory, invoke_as_superuser, new_name, superuser +): # get default group id and superuser id res = invoke_as_superuser("group list --user-ids") assert res.retcode == 0 - assert len(res.data) == 1 # Only one group (default) - assert res.data[0]["name"] == FRACTAL_DEFAULT_GROUP_NAME - assert len(res.data[0]["user_ids"]) == 1 # Only one user (superuser) + initial_number_of_groups = len(res.data) + default_group = next( + group + for group in res.data + if group["name"] == FRACTAL_DEFAULT_GROUP_NAME + ) + initial_number_of_users = len(default_group["user_ids"]) - default_group_id = res.data[0]["id"] - superuser_id = res.data[0]["user_ids"][0] + default_group_id = default_group["id"] + superuser_id = superuser["id"] # create 3 standard users (by default in default group) - user1 = user_factory(email="user1@fractal.xy", password="psw1") + user1 = user_factory(email=f"{new_name()}@example.org", password="psw1") user1_id = user1["id"] assert user1["group_ids_names"] == [[default_group_id, "All"]] - user2 = user_factory(email="user2@fractal.xy", password="psw2") + user2 = user_factory(email=f"{new_name()}@example.org", password="psw2") user2_id = user2["id"] assert user2["group_ids_names"] == [[default_group_id, "All"]] - user3 = user_factory(email="user3@fractal.xy", password="psw3") + user3 = user_factory(email=f"{new_name()}@example.org", password="psw3") user3_id = user3["id"] assert user3["group_ids_names"] == [[default_group_id, "All"]] res = invoke_as_superuser("group list --user-ids") - assert len(res.data) == 1 - assert set(res.data[0]["user_ids"]) == set( - [superuser_id, user1_id, user2_id, user3_id] - ) + assert len(res.data) == initial_number_of_groups + assert len(res.data[0]["user_ids"]) == initial_number_of_users + 3 # Create 2 new empty groups (`group new`) @@ -54,15 +58,16 @@ def test_group_commands(user_factory, invoke_as_superuser): # missing 'name' invoke_as_superuser("group new") - res = invoke_as_superuser("group new foo --viewer-paths /a /b") + NEW_NAME = new_name() + res = invoke_as_superuser(f"group new {NEW_NAME} --viewer-paths /a /b") assert res.retcode == 0 - assert res.data["name"] == "foo" + assert res.data["name"] == NEW_NAME assert res.data["user_ids"] == [] group1_viewer_paths = res.data["viewer_paths"] assert group1_viewer_paths == ["/a", "/b"] group1_id = res.data["id"] - res = invoke_as_superuser("group new bar") + res = invoke_as_superuser(f"group new {new_name()}") group2_id = res.data["id"] group2_viewer_paths = res.data["viewer_paths"] assert group2_viewer_paths == [] @@ -117,14 +122,12 @@ def test_group_commands(user_factory, invoke_as_superuser): # Check groups are updated res = invoke_as_superuser("group list --user-ids") - assert len(res.data) == 3 + assert len(res.data) == initial_number_of_groups + 2 assert res.data[0]["id"] == default_group_id - assert set(res.data[0]["user_ids"]) == set( - [superuser_id, user1_id, user2_id, user3_id] - ) - assert res.data[1]["id"] == group1_id + assert len(res.data[0]["user_ids"]) == initial_number_of_users + 3 + assert res.data[-2]["id"] == group1_id assert set(res.data[1]["user_ids"]) == set([user1_id, user2_id]) - assert res.data[2]["id"] == group2_id + assert res.data[-1]["id"] == group2_id assert set(res.data[2]["user_ids"]) == set( [user3_id, user2_id, superuser_id] ) @@ -138,7 +141,7 @@ def test_group_commands(user_factory, invoke_as_superuser): res = invoke_as_superuser(f"group get {default_group_id}") assert res.retcode == 0 assert res.data["name"] == FRACTAL_DEFAULT_GROUP_NAME - assert len(res.data["user_ids"]) == 4 + assert len(res.data["user_ids"]) == initial_number_of_users + 3 # Test `list` without `--user-ids` @@ -149,9 +152,9 @@ def test_group_commands(user_factory, invoke_as_superuser): # Test `--batch` res = invoke_as_superuser("--batch group list") - assert res.data == f"{default_group_id} {group1_id} {group2_id}" + assert len(res.data.split(" ")) == initial_number_of_groups + 2 - res = invoke_as_superuser("--batch group new xyz") + res = invoke_as_superuser(f"--batch group new {new_name()}") assert isinstance(res.data, int) # Test update of viewer-paths @@ -172,7 +175,6 @@ def test_group_commands(user_factory, invoke_as_superuser): invoke_as_superuser( f"group update {group1_id} --new-user-ids {superuser_id}" ) - res = invoke_as_superuser("user whoami") - assert "viewer_paths" not in res.data + assert "viewer_paths" not in superuser res = invoke_as_superuser("user whoami --viewer-paths") assert set(res.data.get("viewer_paths")) == {"/a/b", "/c/d"} diff --git a/tests/test_job.py b/tests/test_job.py index 371d46725..820357339 100644 --- a/tests/test_job.py +++ b/tests/test_job.py @@ -1,258 +1,21 @@ -import json import time from pathlib import Path from urllib.request import urlretrieve -import pytest # noqa F401 +import pytest from devtools import debug -from fractal_server.utils import get_timestamp TIMEOUT = 15.0 -LOG = "Here are some logs" - -@pytest.mark.parametrize("status", ["done", "failed"]) -def test_job_show( - register_user, - invoke, - tmp_path: Path, - status: str, - workflow_factory, - project_factory, - job_factory, -): - """ - GIVEN a job entry in the database - WHEN calling the `job show` command with multiple options - THEN the client response has the expected status and log attributes - """ - - # Create mock Workflow/ApplyWorkflow objects - res = invoke("project new prj0") - project_id = res.data["id"] - wf = workflow_factory(project_id=project_id) - debug(wf) - log = LOG - workflow_path = tmp_path / f"workflow_{wf.id}" - workflow_path.mkdir() - job = job_factory( - working_dir=workflow_path.as_posix(), - worfklow_id=wf.id, - status=status, - log=log, - workflow_dump={ - "name": "my workflow", - "id": 1, - "project_id": 1, - "timestamp_created": str(get_timestamp()), - }, - ) - debug(job) - - # Check `job show` output - cmd = f"job show {project_id} {job.id}" - debug(cmd) - res = invoke(cmd) - assert res.retcode == 0 - assert res.data["status"] == status - debug(res.data) - assert LOG in res.data["log"] - res.show() - - # Check `job show` output with --batch - cmd = f"--batch job show {project_id} {job.id}" - res = invoke(cmd) - assert res.retcode == 0 - assert res.data == status - - -def test_job_list( - register_user, +def test_job_submit( invoke, - tmp_path: Path, project_factory, - workflow_factory, - job_factory, -): - """ - GIVEN several job entries in the database - WHEN calling the `job list ` command - THEN the client response lists the jobs associated to the project - """ - - # Create mock Project/Workflow/ApplyWorkflow objects - res = invoke("project new prj0") - project_id = res.data["id"] - wf_1 = workflow_factory(project_id=project_id) - wf_2 = workflow_factory(project_id=project_id) - wd_1 = tmp_path / f"workflow_{wf_1.id}" - wd_2 = tmp_path / f"workflow_{wf_2.id}" - job1 = job_factory( - working_dir=wd_1.as_posix(), - worfklow_id=wf_1.id, - status="running", - workflow_dump={ - "name": "my workflow", - "id": 1, - "project_id": 1, - "timestamp_created": str(get_timestamp()), - }, - ) - job2 = job_factory( - working_dir=wd_2.as_posix(), - worfklow_id=wf_2.id, - status="done", - workflow_dump={ - "name": "my workflow", - "id": 1, - "project_id": 1, - "timestamp_created": str(get_timestamp()), - }, - ) - debug(job1) - debug(job2) - - # Check `job list` output with --batch option - cmd = f"--batch job list {project_id}" - debug(cmd) - res = invoke(cmd) - debug(res.data) - assert res.retcode == 0 - job_ids = [int(i) for i in res.data.split()] - assert job1.id in job_ids - assert job2.id in job_ids - - # Check `job list` output - cmd = f"job list {project_id}" - debug(cmd) - res = invoke(cmd) - assert res.retcode == 0 - debug(res.data) - # There is not much to assert here, apart from successful invocation of the - # command. We add a res.show() for when pytest is run with the -s flag - res.show() - - -def test_job_download_logs( - register_user, - invoke, - tmp_path: Path, - workflow_factory, - job_factory, -): - """ - Test the `job download-logs` command - """ - - # Create mock Workflow/ApplyWorkflow objects - res = invoke("project new prj0") - project_id = res.data["id"] - wf = workflow_factory(project_id=project_id) - wd = tmp_path / f"workflow_{wf.id}" - job = job_factory( - working_dir=wd.as_posix(), - worfklow_id=wf.id, - status="running", - ) - - # Write something in a logfile within the workflow directory - LOGFILE = "log.txt" - wd.mkdir() - with (wd / LOGFILE).open("w") as f: - f.write(LOG) - - # Check that download-logs fails if the output folder already exists - output_fail = tmp_path / "output_dir_for_logs_fail" - output_fail.mkdir() - cmd = ( - f"job download-logs {job.project_id} {job.id} " - f"--output {str(output_fail)}" - ) - debug(cmd) - res = invoke(cmd) - assert res.retcode == 1 - - # Check standard `job download-logs` output - output = tmp_path / "output_dir_for_logs" - cmd = ( - f"job download-logs {job.project_id} {job.id} " - f"--output {str(output)}" - ) - debug(cmd) - res = invoke(cmd) - assert res.retcode == 0 - debug(res.data) - - # Check that the logfile is there and has the right content - logfile = output / LOGFILE - assert logfile.exists() - with logfile.open("r") as f: - contents = f.read() - assert contents == LOG - - # Check failure for invalid ID - output = tmp_path / "output_dir_for_logs" - cmd = "job download-logs 9999 9999 --output /somewhere/useless" - debug(cmd) - with pytest.raises(SystemExit) as e: - res = invoke(cmd) - debug(e.value) - - -def test_job_stop( - register_user, - invoke, + dataset_factory, tmp_path: Path, - workflow_factory, - project_factory, - job_factory, - caplog, + testdata_path: Path, + new_name, ): - """ - GIVEN a job entry in the database - WHEN calling the `job stop` command - THEN the client response has the expected status - - NOTE 1: This is a test of the client command, not a test of the - corresponding fractal-server feature - - NOTE 2: We don't have a fractal-server instance with SLURM backend in the - fractal client CI, so this command can be tested for consistency but not - for functionality. - """ - - # Create mock Workflow/ApplyWorkflow objects - res = invoke("project new prj0") - project_id = res.data["id"] - wf = workflow_factory(project_id=project_id) - workflow_path = tmp_path / f"workflow_{wf.id}" - workflow_path.mkdir() - job = job_factory( - working_dir=workflow_path.as_posix(), - worfklow_id=wf.id, - ) - debug(job) - - # Call `job stop` (this will fail because FRACTAL_RUNNER_BACKEND="local" - cmd = f"job stop {project_id} {job.id}" - debug(cmd) - with pytest.raises(SystemExit): - res = invoke(cmd) - debug(caplog.text) - assert "Stopping a job execution is not implemented" in caplog.text - caplog.clear() - - -def test_job_submit( - register_user, invoke, testdata_path: Path, tmp_path: Path -): - """ - GIVEN a project and a nontrivial workflow - WHEN the client requests to apply the workflow to the project - THEN the workflow is scheduled and executed, and the artifacts created - """ - # Collect tasks PACKAGE_URL = ( "https://github.com/fractal-analytics-platform/fractal-server/" @@ -262,12 +25,18 @@ def test_job_submit( PACKAGE_PATH = "/tmp/fractal_tasks_mock-0.0.1-py3-none-any.whl" urlretrieve(PACKAGE_URL, PACKAGE_PATH) - WORKFLOW_NAME = "mywf" - res0 = invoke(f"task collect {PACKAGE_PATH}") - debug(res0) - venv_path = res0.data["data"]["venv_path"] - debug(venv_path) - state_id = res0.data["id"] + res = invoke(f"task collect {PACKAGE_PATH}") + assert res.retcode == 0 + state_id = res.data["id"] + + # Create a project + project = project_factory(name=new_name()) + project_id = project["id"] + zarr_dir = (tmp_path / "zarr_dir").as_posix() + dataset = dataset_factory( + name=new_name(), project_id=project_id, zarr_dir=zarr_dir + ) + dataset_id = dataset["id"] # Wait for task collection to end starting_time = time.perf_counter() @@ -276,129 +45,104 @@ def test_job_submit( if res1.data["data"]["status"] == "OK": debug(res1.data) break - time.sleep(1) + time.sleep(0.1) assert time.perf_counter() - starting_time < TIMEOUT - # Create a project - res = invoke("project new testproject") - assert res.retcode == 0 - prj = res.data - prj_id = prj["id"] - res = invoke(f"project add-dataset {prj_id} test_name {tmp_path}/ds") - dataset_id = res.data["id"] - - # Create workflow and add task twice - res = invoke(f"workflow new {WORKFLOW_NAME} {prj_id}") + wf_json = (testdata_path / "import-export/wf3.json").as_posix() + res = invoke( + f"workflow import --project-id {project_id} --json-file {wf_json}" + ) workflow = res.data workflow_id = workflow["id"] - args_file = str(tmp_path / "args_file.json") - with open(args_file, "w") as f: - json.dump({"image_dir": "/asdasd"}, f) debug(workflow) - assert res.retcode == 0 - for _ in [0, 1]: - TASK_ID = 1 - res = invoke( - ( - f"workflow add-task {prj_id} {workflow_id} " - f"--task-id {TASK_ID} " - f"--args-non-parallel {args_file}" - ) - ) - workflow_task = res.data - debug(workflow_task) - assert res.retcode == 0 - TASK_NAME = res.data["task"]["name"] - debug(TASK_NAME) - # Call `workflow apply` FIRST_TASK_INDEX = 0 LAST_TASK_INDEX = 0 - cmd = ( - f"job submit " - f"{prj_id} {workflow_id} {dataset_id} " + WORKER_INIT = "export MYVARIABLE=MYVALUE" + res = invoke( + f"job submit {project_id} {workflow_id} {dataset_id} " f"--start {FIRST_TASK_INDEX} --end {LAST_TASK_INDEX} " - f'--worker-init "export SOMEVARIABLE=1"' + f'--worker-init "{WORKER_INIT}"' ) - debug(cmd) - res = invoke(cmd) - job = res.data - debug(job) assert res.retcode == 0 - job_id = job["id"] - assert job["status"] == "submitted" - - # Avoid immediately calling `job show` right after `workflow apply` - time.sleep(1) + job1 = res.data + job1_id = job1["id"] + assert job1["status"] == "submitted" + assert job1["first_task_index"] == FIRST_TASK_INDEX + assert job1["last_task_index"] == LAST_TASK_INDEX + assert job1["worker_init"] == WORKER_INIT # Check that job completed successfully - cmd = f"job show {prj_id} {job_id}" + cmd = f"job show {project_id} {job1_id}" starting_time = time.perf_counter() debug(cmd) while True: res = invoke(cmd) - job = res.data - debug(job) + job1 = res.data + debug(job1) assert res.retcode == 0 - if job["status"] == "done": + if job1["status"] == "done": break - elif job["status"] == "failed": - raise RuntimeError(job) - time.sleep(1) + elif job1["status"] == "failed": + raise RuntimeError(job1) + time.sleep(0.1) assert time.perf_counter() - starting_time < TIMEOUT - - # Check that job has correct first_task_index and last_task_index - # attributes - assert job["first_task_index"] == FIRST_TASK_INDEX - assert job["last_task_index"] == LAST_TASK_INDEX + assert job1["log"] is not None # Prepare and run a workflow with a failing task - args_file = str(tmp_path / "args.json") - with open(args_file, "w") as f: - json.dump({"image_dir": "/asdasd"}, f) + FIRST_TASK_INDEX = 0 + LAST_TASK_INDEX = 1 res = invoke( - ( - f"workflow add-task {prj_id} {workflow_id} --task-id {TASK_ID}" - f" --args-non-parallel {args_file} " - ) + f"--batch " + f"job submit {project_id} {workflow_id} {dataset_id} " + f"--start {FIRST_TASK_INDEX} --end {LAST_TASK_INDEX} " + f'--worker-init "{WORKER_INIT}"' ) assert res.retcode == 0 - cmd = f"job submit " f"{prj_id} {workflow_id} {dataset_id}" - debug(cmd) - res = invoke(cmd) - assert res.retcode == 0 - job_id = res.data["id"] - - # Avoid immediately calling `job show` right after `workflow apply` - time.sleep(1) + job2_id = res.data # Verify that status is failed, and that there is a log - cmd = f"job show {prj_id} {job_id}" + cmd = f"--batch job show {project_id} {job2_id}" starting_time = time.perf_counter() while True: res = invoke(cmd) - job = res.data - debug(job) + status = res.data + debug(status) assert res.retcode == 0 - if job["status"] == "failed": + if status == "failed": break - time.sleep(1) + time.sleep(0.1) assert time.perf_counter() - starting_time < TIMEOUT - assert job["log"] is not None - # Prepare and submit a workflow with --batch - res = invoke(f"workflow new OneMoreWorkflow {prj_id}") - workflow_id = res.data["id"] - res = invoke( - ( - f"workflow add-task {prj_id} {workflow_id} --task-id {TASK_ID} " - f"--args-non-parallel {args_file}" - ) - ) + # Run job list with/without --batch + res = invoke(f"--batch job list {project_id}") assert res.retcode == 0 - cmd = f"--batch job submit " f"{prj_id} {workflow_id} {dataset_id}" - debug(cmd) + assert res.data == f"{job1_id} {job2_id}" + res = invoke(f"job list {project_id}") + assert res.retcode == 0 + assert set(job["id"] for job in res.data) == set([job1_id, job2_id]) + + # Download logs / success + log1_dir = tmp_path / "log1" + cmd = ( + f"job download-logs {project_id} {job1_id} " + f"--output {log1_dir.as_posix()}" + ) res = invoke(cmd) assert res.retcode == 0 - debug(res.data) - assert isinstance(res.data, int) + files = log1_dir.glob("*") + assert "workflow.log" in [f.name for f in files] + + # Download logs / fail because folder already exists + log1_dir = tmp_path / "log1" + cmd = ( + f"job download-logs {project_id} {job1_id} " + f"--output {log1_dir.as_posix()}" + ) + res = invoke(cmd) + assert res.retcode == 1 + + # Download logs / fail because of invalid job_id + cmd = f"job download-logs {project_id} 9999 --output /tmp/invalid/" + with pytest.raises(SystemExit): + invoke(cmd) diff --git a/tests/test_project.py b/tests/test_project.py index fb0d3efa5..c4beeba4e 100644 --- a/tests/test_project.py +++ b/tests/test_project.py @@ -2,17 +2,17 @@ from devtools import debug -def test_project_create(register_user, invoke): - PROJECT_NAME = "project_name" +def test_project_create(invoke, new_name): + PROJECT_NAME = new_name() res = invoke(f"project new {PROJECT_NAME}") debug(res) assert res.data["name"] == PROJECT_NAME -def test_project_delete(register_user, invoke): +def test_project_delete(invoke, new_name): # Create project - res = invoke("project new MyProj1") + res = invoke(f"project new {new_name()}") res.show() project_id_1 = res.data["id"] @@ -29,54 +29,57 @@ def test_project_delete(register_user, invoke): res = invoke(f"project show {project_id_1}") -def test_project_create_batch(register_user, invoke): - res = invoke("--batch project new MyProj1") +def test_project_create_batch(invoke, new_name): + res = invoke("project list") + initial_projects = res.data + + res = invoke(f"--batch project new {new_name()}") debug(res) debug(res.data) - project_id = int(res.data) - assert project_id == 1 + project_id = res.data + res = invoke("project list") + assert len(res.data) == len(initial_projects) + 1 + assert any(project["id"] == project_id for project in res.data) -def test_project_list(register_user, invoke): + +def test_project_list(invoke, new_name): res = invoke("project list") - debug(res) - assert len(res.data) == 0 + initial_projects = len(res.data) res.show() - res = invoke("--batch project new proj0") + res = invoke(f"--batch project new {new_name()}") project0_id = res.data - res = invoke(f"--batch project add-dataset {project0_id} NAME /tmp") - res = invoke("--batch project new proj1") + res = invoke( + f"--batch project add-dataset {project0_id} {new_name()} /tmp" + ) + res = invoke(f"--batch project new {new_name()}") res = invoke("project list") debug(res) res.show() - assert len(res.data) == 2 + assert len(res.data) == initial_projects + 2 -@pytest.mark.parametrize("new_name", ["new_name", None]) -def test_edit_project( - register_user, - invoke, - new_name, - tmp_path, -): - name = "name" +@pytest.mark.parametrize("patch_name", [True, False]) +def test_edit_project(invoke, new_name, patch_name: bool): + name = new_name() res = invoke(f"project new {name}") project = res.data project_id = project["id"] cmd = f"project edit {project_id}" - if new_name: - cmd += f" --new-name {new_name}" + if patch_name: + NEW_NAME = new_name() + cmd += f" --new-name {NEW_NAME}" res = invoke(cmd) debug(res) assert res.retcode == 0 new_project = res.data - if new_name: - assert new_project["name"] == new_name + if patch_name: + assert new_project["name"] == NEW_NAME else: assert new_project["name"] == name diff --git a/tests/test_task.py b/tests/test_task.py index abe0d134d..49d06f10e 100644 --- a/tests/test_task.py +++ b/tests/test_task.py @@ -1,6 +1,5 @@ import json from pathlib import Path -from urllib.request import urlretrieve import pytest from devtools import debug @@ -11,16 +10,14 @@ COLLECTION_TIMEOUT = 15.0 -PACKAGE_URL = ( - "https://github.com/fractal-analytics-platform/fractal-server/" - "raw/main/tests/v2/fractal_tasks_mock/dist/" - "fractal_tasks_mock-0.0.1-py3-none-any.whl" -) -PACKAGE_PATH = "/tmp/fractal_tasks_mock-0.0.1-py3-none-any.whl" -urlretrieve(PACKAGE_URL, PACKAGE_PATH) - -def test_task_new(register_user, invoke, tmp_path): +def test_task_new( + invoke, + invoke_as_custom_user, + tmp_path, + new_name, + user_factory, +): # create a new task with just positional required args args_path = str(tmp_path / "args.json") @@ -33,39 +30,55 @@ def test_task_new(register_user, invoke, tmp_path): with open(meta_path, "w") as f: json.dump(meta, f) + TASK_NAME = new_name() res = invoke( - "task new _name --command-parallel _command " + f"task new {TASK_NAME} --command-parallel _command " f"--version _version --meta-parallel {meta_path} " f"--args-schema-parallel {args_path} " - f"--args-schema-version 1.0.0" + f"--args-schema-version 1.0.0 " + "--private" ) debug(res.data) assert res.retcode == 0 - assert res.data["name"] == "_name" + assert res.data["name"] == TASK_NAME assert res.data["command_parallel"] == "_command" assert res.data["version"] == "_version" assert res.data["meta_parallel"] == meta assert res.data["args_schema_version"] == "1.0.0" - first_task_id = int(res.data["id"]) + # Check that task is actually private + new_user_credentials = dict( + email=f"{new_name()}@example.org", + password="1234", + ) + user_factory(**new_user_credentials) + with pytest.raises(SystemExit): + res = invoke_as_custom_user( + f"task show {first_task_id}", + **new_user_credentials, + ) + # create a new task with batch option - res = invoke("--batch task new _name2 --command-parallel _command2") + TASK_NAME_2 = new_name() + res = invoke( + f"--batch task new {TASK_NAME_2} --command-parallel _command2" + ) res.show() assert res.retcode == 0 assert res.data == str(first_task_id + 1) - # create a new task with same source as before. Note that in check_response + # create a new task with same name as before. Note that in check_response # we have sys.exit(1) when status code is not the expecte one with pytest.raises(SystemExit) as e: - invoke("task new _name2 --command-parallel _command2") + invoke(f"task new {TASK_NAME_2} --command-parallel _command2") assert e.value.code == 1 # create a new task passing not existing file res = invoke( ( - "task new _name --command-parallel _command --meta-parallel " - "./foo.pdf" + f"task new {new_name()} --command-parallel _command " + "--meta-parallel ./foo.pdf" ) ) assert res.retcode == 1 @@ -76,7 +89,7 @@ def test_task_new(register_user, invoke, tmp_path): json.dump(metanp, f) res = invoke( ( - f"task new _name_np --command-non-parallel _command_np " + f"task new {new_name()} --command-non-parallel _command_np " f"--meta-non-parallel {metanp_path} " f"--args-schema-non-parallel {args_path} " ) @@ -86,9 +99,9 @@ def test_task_new(register_user, invoke, tmp_path): def test_task_edit( caplog, - register_user, invoke, tmp_path, + new_name, ): args_path = str(tmp_path / "args.json") @@ -101,7 +114,7 @@ def test_task_edit( with open(meta_path, "w") as f: json.dump(meta, f) - NAME = "task-name" + NAME = new_name() task = invoke( f"task new {NAME} --command-parallel _command " f"--version _version --meta-parallel {meta_path} " @@ -133,7 +146,7 @@ def test_task_edit( task_np = invoke( ( - f"task new _name_np --command-non-parallel _command_np " + f"task new {new_name()} --command-non-parallel _command_np " f"--version 1.0.1 --meta-non-parallel {meta_path}" ) ) @@ -217,58 +230,3 @@ def test_task_edit( res = invoke(f"task edit --name {NAME} --output-types {n_o_types_path}") assert res.data["output_types"] == new_output_types assert res.retcode == 0 - - -@pytest.mark.skip( - reason="DELETE-task is not currently available on fractal-server" -) -def test_task_delete( - register_user, - user_factory, - invoke, - tmp_path, -): - """ - Test task delete - """ - NAME = "_name" - VERSION = "1.0.0" - - meta_path = str(tmp_path / "meta.json") - meta = {"a": "b"} - with open(meta_path, "w") as f: - json.dump(meta, f) - - task = invoke( - ( - f"task new {NAME} --command-parallel _command " - f"--version {VERSION} --meta-parallel {meta_path}" - ) - ) - - task.show() - assert task.retcode == 0 - task_id = task.data["id"] - - # Test access control - with pytest.raises(SystemExit): - EMAIL = "someone@example.org" - PASSWORD = "123123" - user_factory(email=EMAIL, password=PASSWORD) - res = invoke(f"-u {EMAIL} -p {PASSWORD} task delete --id {task_id}") - # Test fail "id and version" - with pytest.raises(SystemExit): - invoke(f"task delete --id {task_id} --version {VERSION}") - # Test fail "name and wrong version" - with pytest.raises(SystemExit): - invoke(f"task delete --name {NAME} --version INVALID_VERSION") - - # Test success - res = invoke("task list") - task_list = res.data - assert len(task_list) == 1 - res = invoke(f"task delete --name {NAME} --version {VERSION}") - assert res.retcode == 0 - res = invoke("task list") - task_list = res.data - assert len(task_list) == 0 diff --git a/tests/test_task_collection.py b/tests/test_task_collection.py index eca787b7d..07b171256 100644 --- a/tests/test_task_collection.py +++ b/tests/test_task_collection.py @@ -8,18 +8,7 @@ from devtools import debug -COLLECTION_TIMEOUT = 15.0 - -PACKAGE_URL = ( - "https://github.com/fractal-analytics-platform/fractal-server/" - "raw/main/tests/v2/fractal_tasks_mock/dist/" - "fractal_tasks_mock-0.0.1-py3-none-any.whl" -) -PACKAGE_PATH = "/tmp/fractal_tasks_mock-0.0.1-py3-none-any.whl" -urlretrieve(PACKAGE_URL, PACKAGE_PATH) - - -def test_task_collection_command(register_user, invoke, caplog): +def test_task_collection_command(invoke, caplog): """ Test that all `task collect` options are correctly parsed and included in the the payload for the API request. @@ -74,7 +63,7 @@ def test_task_collection_invalid_pinned_dependency(invoke, caplog): assert "Invalid pin:" in log_lines[0] -def test_task_collection(register_user, invoke, testdata_path): +def test_task_collection(invoke_as_custom_user, user_factory, new_name): """ GIVEN a pip installable package containing fractal-compatible tasks WHEN the collection subcommand is called @@ -82,8 +71,25 @@ def test_task_collection(register_user, invoke, testdata_path): * the collection is initiated in the background * the server returns immediately """ + COLLECTION_TIMEOUT = 15.0 + + PACKAGE_URL = ( + "https://github.com/fractal-analytics-platform/fractal-server/" + "raw/main/tests/v2/fractal_tasks_mock/dist/" + "fractal_tasks_mock-0.0.1-py3-none-any.whl" + ) + PACKAGE_PATH = "/tmp/fractal_tasks_mock-0.0.1-py3-none-any.whl" + urlretrieve(PACKAGE_URL, PACKAGE_PATH) + + new_user = dict(email=f"{new_name()}@example.org", password="1234") + user_factory(**new_user) + + res = invoke_as_custom_user("task list", **new_user) + initial_task_list = len(res.data) - res0 = invoke(f"task collect {PACKAGE_PATH}") + res0 = invoke_as_custom_user( + f"task collect --private {PACKAGE_PATH}", **new_user + ) debug(res0) res0.show() @@ -92,64 +98,44 @@ def test_task_collection(register_user, invoke, testdata_path): state_id = res0.data["id"] debug(state_id) - time.sleep(0.5) - # Wait until collection is complete starting_time = time.perf_counter() while True: - res1 = invoke(f"task check-collection {state_id}") + res1 = invoke_as_custom_user( + f"task check-collection {state_id}", **new_user + ) debug(res1.data) assert res1.retcode == 0 res1.show() - time.sleep(1) + time.sleep(0.1) if res1.data["data"]["status"] == "OK": break assert time.perf_counter() - starting_time < COLLECTION_TIMEOUT - res2 = invoke(f"task check-collection {state_id}" " --include-logs") + res2 = invoke_as_custom_user( + f"task check-collection {state_id} --include-logs", **new_user + ) debug(res2.data) assert res2.retcode == 0j res2.show() assert res2.data["data"]["status"] == "OK" - res = invoke("task list") - assert len(res.data) == 14 - - -def test_repeated_task_collection(register_user, invoke, testdata_path): - """ - GIVEN - * a pip installable package containing fractal-compatible tasks - * a successful collection subcommand was executed - WHEN the collection subcommand is called a second time - THEN - * TBD.. - """ - - res0 = invoke(f"--batch task collect {PACKAGE_PATH}") - debug(res0) - - state_id = res0.data[0] # extract id from batch string - debug(res0.data) - assert res0.data.startswith("1 ") - - time.sleep(0.5) - - # Wait until collection is complete - starting_time = time.perf_counter() - while True: - res1 = invoke(f"task check-collection {state_id}") - time.sleep(1) - if res1.data["data"]["status"] == "OK": - break - assert time.perf_counter() - starting_time < COLLECTION_TIMEOUT + res = invoke_as_custom_user("task list", **new_user) + assert len(res.data) == initial_task_list + 14 # Second collection with pytest.raises(SystemExit): - res0 = invoke(f"task collect {PACKAGE_PATH}") + res0 = invoke_as_custom_user( + f"task collect {PACKAGE_PATH}", **new_user + ) + +def test_task_collection_custom( + user_factory, new_name, tmp_path, invoke_as_custom_user, caplog +): + new_user = dict(email=f"{new_name()}@example.org", password="1234") + user_factory(**new_user) -def test_task_collection_custom(register_user, tmp_path, invoke, caplog): python_interpreter = sys.executable package_name = "fractal-client" manifest = str(tmp_path / "manifest.json") @@ -166,42 +152,42 @@ def test_task_collection_custom(register_user, tmp_path, invoke, caplog): json.dump(manifest_dict, f) cmd = ( - f"task collect-custom --package-name {package_name} " - f"source {python_interpreter} {manifest}" + f"task collect-custom --private --package-name {package_name} " + f"label {python_interpreter} {manifest}" ) - res = invoke(cmd) + res = invoke_as_custom_user(cmd, **new_user) assert res.retcode == 0 assert isinstance(res.data, list) - # Second API call fails (tasks with the same sources already exist) + # Second API call fails (tasks with the same identity already exist) caplog.clear() with pytest.raises(SystemExit): - res = invoke(cmd) + res = invoke_as_custom_user(cmd, **new_user) # Manifest was redacted, when logging the payload assert '"manifest": "[value too long - redacted]"' in caplog.text # Missing manifest file cmd = ( f"task collect-custom --package-name {package_name} " - f"source {python_interpreter} /foo/bar" + f"label {python_interpreter} /foo/bar" ) - res = invoke(cmd) + res = invoke_as_custom_user(cmd, **new_user) assert res.retcode == 1 assert "file must be on the same machine" in res.data cmd = ( - "--batch task collect-custom --package-root /tmp --version 2 " - f"source2 {python_interpreter} {manifest}" + "--batch task collect-custom --private --package-root /tmp --version 2" + f" label2 {python_interpreter} {manifest}" ) - res = invoke(cmd) + res = invoke_as_custom_user(cmd, **new_user) assert res.retcode == 0 assert isinstance(res.data, str) # test that '--package-root' and '--package-name' are mutually exclusive cmd = ( - "task collect-custom " + "task collect-custom --private" f"--package-root /tmp --package-name {package_name} " - f"source3 {python_interpreter} {manifest}" + f"label3 {python_interpreter} {manifest}" ) with pytest.raises(SystemExit): - res = invoke(cmd) + res = invoke_as_custom_user(cmd, **new_user) diff --git a/tests/test_unit_task_cache.py b/tests/test_unit_task_cache.py index f39557f5e..121b9d070 100644 --- a/tests/test_unit_task_cache.py +++ b/tests/test_unit_task_cache.py @@ -36,14 +36,14 @@ def test_search_in_task_list(): """Test all possible cases for function `_search_in_task_list`""" TASK_LIST = [ - dict(name="dummy1", id=101, version="1.0.1", source="a"), - dict(name="dummy2", id=201, version=None, source="b"), - dict(name="dummy2", id=202, version="2.0.0", source="c"), - dict(name="dummy3", id=301, version="3.0.0", source="d"), - dict(name="dummy3", id=302, version="3.1.4", source="e"), - dict(name="dummy4", id=401, version="4.0.0", source="f"), - dict(name="dummy4", id=402, version="4.1.1", source="g"), - dict(name="dummy4", id=401, version="4.1.1", source="h"), + dict(name="dummy1", id=101, version="1.0.1"), + dict(name="dummy2", id=201, version=None), + dict(name="dummy2", id=202, version="2.0.0"), + dict(name="dummy3", id=301, version="3.0.0"), + dict(name="dummy3", id=302, version="3.1.4"), + dict(name="dummy4", id=401, version="4.0.0"), + dict(name="dummy4", id=402, version="4.1.1"), + dict(name="dummy4", id=401, version="4.1.1"), ] # TEST zero matching diff --git a/tests/test_user.py b/tests/test_user.py index ea4c022ff..b097085c7 100644 --- a/tests/test_user.py +++ b/tests/test_user.py @@ -1,22 +1,23 @@ import json -from os import environ import pytest from devtools import debug -EMAIL_USER = "test@testmail.com" -PWD_USER = "testpassword" +PWD_USER = "1234" -def test_register_as_user(invoke, register_user, caplog): +def test_register_as_user(invoke, caplog): with pytest.raises(SystemExit): - invoke(f"user register {EMAIL_USER} {PWD_USER}") + invoke("user register aaa bbb") debug(caplog.text) assert "403" in caplog.text @pytest.mark.parametrize("is_superuser", [True, False]) -def test_register_as_superuser(invoke_as_superuser, is_superuser: bool): +def test_register_as_superuser( + invoke_as_superuser, is_superuser: bool, new_name +): + EMAIL_USER = f"{new_name()}@example.org" if is_superuser: res = invoke_as_superuser( f"user register {EMAIL_USER} {PWD_USER} --superuser" @@ -39,7 +40,8 @@ def test_register_as_superuser(invoke_as_superuser, is_superuser: bool): assert res.data["is_verified"] -def test_register_as_superuser_with_batch(invoke_as_superuser): +def test_register_as_superuser_with_batch(invoke_as_superuser, new_name): + EMAIL_USER = f"{new_name()}@example.org" # Register a user with the --batch flag res = invoke_as_superuser(f"--batch user register {EMAIL_USER} {PWD_USER}") user_id = res.data @@ -52,24 +54,25 @@ def test_register_as_superuser_with_batch(invoke_as_superuser): assert res.retcode == 0 -def test_list_as_user(invoke, register_user, caplog): +def test_list_as_user(invoke, caplog): with pytest.raises(SystemExit): invoke("user list") debug(caplog.text) assert "403" in caplog.text -def test_list_as_superuser(invoke_as_superuser, register_user): +def test_list_as_superuser(invoke_as_superuser, superuser, tester): res = invoke_as_superuser("user list") debug(res.data) assert res.retcode == 0 list_emails = [user["email"] for user in res.data] debug(list_emails) - assert "admin@fractal.xy" in list_emails - assert environ["FRACTAL_USER"] in list_emails + assert superuser["email"] in list_emails + assert tester["email"] in list_emails -def test_show_as_user(invoke, invoke_as_superuser, register_user, caplog): +def test_show_as_user(invoke, invoke_as_superuser, caplog, new_name): + EMAIL_USER = f"{new_name()}@example.org" # Register a new user res = invoke_as_superuser(f"user register {EMAIL_USER} {PWD_USER}") user_id = res.data["id"] @@ -80,7 +83,8 @@ def test_show_as_user(invoke, invoke_as_superuser, register_user, caplog): assert "403" in caplog.text -def test_show_as_superuser(invoke_as_superuser): +def test_show_as_superuser(invoke_as_superuser, new_name): + EMAIL_USER = f"{new_name()}@example.org" # Register a new user res = invoke_as_superuser(f"user register {EMAIL_USER} {PWD_USER}") user_id = res.data["id"] @@ -91,7 +95,8 @@ def test_show_as_superuser(invoke_as_superuser): assert res.data["email"] == EMAIL_USER -def test_edit_as_user(invoke, invoke_as_superuser, register_user, caplog): +def test_edit_as_user(invoke, invoke_as_superuser, caplog, new_name): + EMAIL_USER = f"{new_name()}@example.org" # Register a new user res = invoke_as_superuser(f"user register {EMAIL_USER} {PWD_USER}") user_id = res.data["id"] @@ -109,14 +114,19 @@ def test_edit_as_user(invoke, invoke_as_superuser, register_user, caplog): @pytest.mark.parametrize("new_is_verified", [True, False]) @pytest.mark.parametrize("new_is_non_verified", [True, False]) def test_edit_as_superuser( - invoke_as_superuser, new_is_superuser, new_is_verified, new_is_non_verified + invoke_as_superuser, + new_is_superuser, + new_is_verified, + new_is_non_verified, + new_name, ): + EMAIL_USER = f"{new_name()}@example.org" # Register a new user res = invoke_as_superuser(f"user register {EMAIL_USER} {PWD_USER}") assert res.retcode == 0 user_id = res.data["id"] # Call fractal user edit - NEW_EMAIL = "asd@asd.new" + NEW_EMAIL = f"{new_name()}@example.org" NEW_CACHE_DIR = "/tmp/xxx" NEW_SLURM_USER = "new_slurm" NEW_USERNAME = "new_username" @@ -185,7 +195,8 @@ def test_edit_as_superuser( assert res.data["is_verified"] -def test_edit_user_settings(invoke_as_superuser, tmp_path): +def test_edit_user_settings(invoke_as_superuser, tmp_path, new_name): + EMAIL_USER = f"{new_name()}@example.org" EMPTY_USER_SETTINGS = { "ssh_host": None, @@ -300,7 +311,8 @@ def test_edit_arguments(invoke_as_superuser): @pytest.mark.skip( reason="Delete-user endpoint was removed in fractal-server 1.4.0" ) -def test_delete_as_user(invoke, invoke_as_superuser, register_user, caplog): +def test_delete_as_user(invoke, invoke_as_superuser, caplog, new_name): + EMAIL_USER = f"{new_name()}@example.org" # Register a new user res = invoke_as_superuser(f"user register {EMAIL_USER} {PWD_USER}") user_id = res.data["id"] @@ -314,7 +326,8 @@ def test_delete_as_user(invoke, invoke_as_superuser, register_user, caplog): @pytest.mark.skip( reason="Delete-user endpoint was removed in fractal-server 1.4.0" ) -def test_delete_as_superuser(invoke_as_superuser, caplog): +def test_delete_as_superuser(invoke_as_superuser, caplog, new_name): + EMAIL_USER = f"{new_name()}@example.org" # Register a new user res = invoke_as_superuser(f"user register {EMAIL_USER} {PWD_USER}") user_id = res.data["id"] @@ -329,11 +342,11 @@ def test_delete_as_superuser(invoke_as_superuser, caplog): assert "Not Found" in caplog.text -def test_whoami_as_user(invoke, register_user): +def test_whoami_as_user(invoke, tester): res = invoke("user whoami") assert res.retcode == 0 debug(res.data) - assert res.data["email"] == environ["FRACTAL_USER"] + assert res.data["email"] == tester["email"] assert not res.data["is_superuser"] user_id = res.data["id"] @@ -344,9 +357,9 @@ def test_whoami_as_user(invoke, register_user): assert res.retcode == 0 -def test_whoami_as_superuser(invoke_as_superuser): +def test_whoami_as_superuser(invoke_as_superuser, superuser): res = invoke_as_superuser("user whoami") assert res.retcode == 0 debug(res.data) - assert res.data["email"] == "admin@fractal.xy" + assert res.data["email"] == superuser["email"] assert res.data["is_superuser"] diff --git a/tests/test_workflow.py b/tests/test_workflow.py index cdbf7ec8f..a0e7cf726 100644 --- a/tests/test_workflow.py +++ b/tests/test_workflow.py @@ -1,6 +1,5 @@ import json import logging -from pathlib import Path import pytest # noqa F401 from devtools import debug @@ -8,14 +7,16 @@ TIMEOUT = 15.0 -def test_workflow_new(register_user, invoke): - PROJECT_NAME = "project_name" +def test_workflow_new(invoke, new_name): + PROJECT_NAME = new_name() res = invoke(f"project new {PROJECT_NAME}") + debug(res) + debug(res.data) proj = res.data assert proj["name"] == PROJECT_NAME project_id = proj["id"] - WORKFLOW_NAME = "mywf" + WORKFLOW_NAME = new_name() res = invoke(f"workflow new {WORKFLOW_NAME} {project_id}") wf = res.data debug(wf) @@ -24,21 +25,21 @@ def test_workflow_new(register_user, invoke): assert wf["project_id"] == project_id # Include --batch - WORKFLOW_NAME = "mywf-2" + WORKFLOW_NAME = new_name() res = invoke(f"--batch workflow new {WORKFLOW_NAME} {project_id}") assert res.retcode == 0 debug(res.data) assert isinstance(res.data, int) -def test_workflow_delete(register_user, invoke): +def test_workflow_delete(invoke, new_name): # Create project - res_pj = invoke("project new project_name") + res_pj = invoke(f"project new {new_name()}") assert res_pj.retcode == 0 project_id = res_pj.data["id"] # Create workflow - res_wf = invoke(f"workflow new MyWorkflow {project_id}") + res_wf = invoke(f"workflow new {new_name()} {project_id}") workflow_id = res_wf.data["id"] assert res_wf.retcode == 0 @@ -59,14 +60,14 @@ def test_workflow_delete(register_user, invoke): assert len(res_list.data) == 0 -def test_workflow_edit(register_user, invoke): +def test_workflow_edit(invoke, new_name): # Create a project - res_pj = invoke("project new project_name_1") + res_pj = invoke(f"project new {new_name()}") assert res_pj.retcode == 0 project_id = res_pj.data["id"] # Add a workflow - res_wf = invoke(f"workflow new MyWorkflow {project_id}") + res_wf = invoke(f"workflow new {new_name()} {project_id}") workflow_id = res_wf.data["id"] assert res_wf.retcode == 0 @@ -76,7 +77,7 @@ def test_workflow_edit(register_user, invoke): res = invoke(cmd) # Edit workflow name - NAME = "new-workflow-name" + NAME = new_name() cmd = f"workflow edit {project_id} {workflow_id} --new-name {NAME}" debug(cmd) res_edit = invoke(cmd) @@ -90,17 +91,17 @@ def test_workflow_edit(register_user, invoke): assert res.data["name"] == NAME -def test_workflow_list(register_user, invoke): - PROJECT_NAME = "project_name" +def test_workflow_list(invoke, new_name): + PROJECT_NAME = new_name() res_pj = invoke(f"project new {PROJECT_NAME}") project_id = res_pj.data["id"] debug(project_id) - res_wf = invoke(f"workflow new WF1 {project_id}") + res_wf = invoke(f"workflow new {new_name()} {project_id}") res_wf.show() assert res_wf.retcode == 0 - res_wf = invoke(f"workflow new WF2 {project_id}") + res_wf = invoke(f"workflow new {new_name()} {project_id}") res_wf.show() assert res_wf.retcode == 0 @@ -111,21 +112,21 @@ def test_workflow_list(register_user, invoke): assert len(res_list.data) == 2 -def test_workflow_list_when_two_projects_exist(register_user, invoke): - res_pj1 = invoke("project new PRJ1") - res_pj2 = invoke("project new PRJ2") +def test_workflow_list_when_two_projects_exist(invoke, new_name): + res_pj1 = invoke(f"project new {new_name()}") + res_pj2 = invoke(f"project new {new_name()}") project_id_1 = res_pj1.data["id"] project_id_2 = res_pj2.data["id"] NUM_WF_PROJECT_1 = 2 NUM_WF_PROJECT_2 = 4 - for wf in range(NUM_WF_PROJECT_1): - res_wf = invoke(f"workflow new WF{wf} {project_id_1}") + for _ in range(NUM_WF_PROJECT_1): + res_wf = invoke(f"workflow new {new_name()} {project_id_1}") assert res_wf.retcode == 0 - for wf in range(NUM_WF_PROJECT_2): - res_wf = invoke(f"workflow new WF{wf} {project_id_2}") + for _ in range(NUM_WF_PROJECT_2): + res_wf = invoke(f"workflow new {new_name()} {project_id_2}") assert res_wf.retcode == 0 res_list_1 = invoke(f"workflow list {project_id_1}") @@ -140,10 +141,10 @@ def test_workflow_list_when_two_projects_exist(register_user, invoke): def test_workflow_add_task( caplog, invoke, - register_user, task_factory, workflow_factory, - tmp_path: Path, + tmp_path, + new_name, ): """ GIVEN a workflow @@ -154,10 +155,11 @@ def test_workflow_add_task( the WorkflowTask's are correctly registered in the db, and the returned object has the right properties """ - res = invoke("project new MyProject") + res = invoke(f"project new {new_name()}") project_id = res.data["id"] - wf = workflow_factory(project_id=project_id) - t = task_factory(user_id=register_user["id"], type="parallel") + + wf = workflow_factory(name=new_name(), project_id=project_id) + t = task_factory(name=new_name(), command_parallel="pwd") INPUT_FILTERS = {"attributes": {"a": 1}, "types": {"b": True}} ARGS_PARALLEL = {"image_dir": "/asdasd"} @@ -185,7 +187,7 @@ def test_workflow_add_task( with meta_non_parallel_file.open("w") as f: json.dump(META_NON_PARALLEL, f) - cmd = f"workflow add-task {project_id} {wf.id}" + cmd = f"workflow add-task {project_id} {wf['id']}" # Test fail with no task_id nor task_name with pytest.raises(SystemExit): invoke(cmd) @@ -193,7 +195,7 @@ def test_workflow_add_task( with pytest.raises(SystemExit): invoke( ( - f"{cmd} --task-id {t.id} --task-name {t.name} " + f"{cmd} --task-id {t['id']} --task-name {t['name']} " f"--args-parallel {args_parallel_file}" ) ) @@ -201,7 +203,7 @@ def test_workflow_add_task( with pytest.raises(SystemExit): invoke( ( - f"{cmd} --task-id {t.id} --task-version 1.2.3.4.5.6 " + f"{cmd} --task-id {t['id']} --task-version 1.2.3.4.5.6 " f"--args-parallel {args_parallel_file}" ) ) @@ -210,7 +212,7 @@ def test_workflow_add_task( ) cmd_args = ( - f"{cmd} --task-id {t.id} --input-filters {input_filters_file} " + f"{cmd} --task-id {t['id']} --input-filters {input_filters_file} " f"--args-parallel {args_parallel_file} " ) debug(cmd_args) @@ -227,8 +229,8 @@ def test_workflow_add_task( # Add a WorkflowTask by Task.name with the --batch option cmd_batch = ( - f"--batch workflow add-task {project_id} {wf.id} " - f"--task-name {t.name} --args-parallel {args_parallel_file}" + f"--batch workflow add-task {project_id} {wf['id']} " + f"--task-name {t['name']} --args-parallel {args_parallel_file}" ) debug(cmd_batch) res = invoke(cmd_batch) @@ -238,7 +240,7 @@ def test_workflow_add_task( # Add a WorkflowTask with meta-parallel args cmd_meta = ( - f"{cmd} --task-id {t.id} --input-filters {input_filters_file} " + f"{cmd} --task-id {t['id']} --input-filters {input_filters_file} " f"--args-parallel {args_parallel_file} " f"--meta-parallel {meta_parallel_file} " ) @@ -255,12 +257,12 @@ def test_workflow_add_task( # Add a WorkflowTask with meta-non-parallel args t_non_parallel = task_factory( - user_id=register_user["id"], - type="non_parallel", + name=new_name(), + command_non_parallel="pwd", ) cmd_meta = ( - f"{cmd} --task-id {t_non_parallel.id} " + f"{cmd} --task-id {t_non_parallel['id']} " f"--input-filters {input_filters_file} " f"--args-non-parallel {args_non_parallel_file} " f"--meta-non-parallel {meta_non_parallel_file}" @@ -277,7 +279,7 @@ def test_workflow_add_task( assert workflow_task["args_non_parallel"] == ARGS_NON_PARALLEL # Check that the WorkflowTask's in Workflow.task_list have the correct IDs - cmd = f"workflow show {project_id} {wf.id}" + cmd = f"workflow show {project_id} {wf['id']}" res = invoke(cmd) assert res.retcode == 0 workflow = res.data @@ -293,10 +295,10 @@ def test_workflow_add_task( def test_workflow_add_task_by_name( invoke, - register_user, task_factory, workflow_factory, - tmp_path: Path, + tmp_path, + new_name, ): """ GIVEN a workflow and a task @@ -304,10 +306,10 @@ def test_workflow_add_task_by_name( THEN the WorkflowTask is added (for a valid name) or an error is raised (for invalid name) """ - res = invoke("project new MyProject") + res = invoke(f"project new {new_name()}") project_id = res.data["id"] - wf = workflow_factory(project_id=project_id) - task = task_factory(user_id=register_user["id"], type="parallel") + wf = workflow_factory(name=new_name(), project_id=project_id) + task = task_factory(name=new_name(), command_parallel="parallel") debug(task) ARGS = {"image_dir": "/asdasd"} @@ -317,20 +319,20 @@ def test_workflow_add_task_by_name( json.dump(ARGS, f) cmd = ( - f"workflow add-task {project_id} {wf.id} --task-name {task.name} " - f"--args-parallel {args_file}" + f"workflow add-task {project_id} {wf['id']} --task-name {task['name']}" + f" --args-parallel {args_file}" ) debug(cmd) res = invoke(cmd) assert res.retcode == 0 debug(res.data) - assert res.data["task"]["id"] == task.id + assert res.data["task"]["id"] == task["id"] # Fail when adding task via a wrong name with pytest.raises(SystemExit): cmd = ( - f"workflow add-task {project_id} {wf.id} --task-name INVALID_NAME " - f"--args-parallel {args_file}" + f"workflow add-task {project_id} {wf['id']} " + f"--task-name INVALID_NAME --args-parallel {args_file}" ) debug(cmd) res = invoke(cmd) @@ -339,10 +341,10 @@ def test_workflow_add_task_by_name( @pytest.mark.skip(reason="Definition of expected behavior is ongoing") def test_task_cache_with_non_unique_names( invoke, - register_user, task_factory, workflow_factory, - tmp_path: Path, + tmp_path, + new_name, caplog: pytest.LogCaptureFixture, ): """ @@ -353,7 +355,7 @@ def test_task_cache_with_non_unique_names( * Addressing tasks by name raises a FileNotFoundError """ - res = invoke("project new MyProject") + res = invoke(f"project new {new_name()}") project_id = res.data["id"] ARGS = {"image_dir": "/asdasd"} @@ -361,9 +363,9 @@ def test_task_cache_with_non_unique_names( with args_file.open("w") as f: json.dump(ARGS, f) # Create two tasks with the same name - task1 = task_factory(user_id=register_user["id"], type="parallel") - task2 = task_factory(user_id=register_user["id"], type="parallel") - assert task1.name == task2.name + task1 = task_factory(name=new_name(), command_parallel="parallel") + task2 = task_factory(name=task1["name"], command_parallel="parallel") + assert task1["name"] == task2["name"] # Verify that a warning is raised upon creating the cache file caplog.set_level(logging.WARNING) @@ -374,10 +376,10 @@ def test_task_cache_with_non_unique_names( # Verify that adding tasks to a worfklow by name (as opposed to "by id") # fails because of missing cache file - wf = workflow_factory(project_id=project_id) + wf = workflow_factory(name=new_name(), project_id=project_id) cmd = ( - f"workflow add-task {project_id} {wf.id} --task-name {task1.name} " - f"--args-parallel {args_file}" + f"workflow add-task {project_id} {wf['id']} " + f"--task-name {task1['name']} --args-parallel {args_file}" ) debug(cmd) with pytest.raises(FileNotFoundError): @@ -386,16 +388,16 @@ def test_task_cache_with_non_unique_names( def test_workflow_rm_task( invoke, - register_user, task_factory, workflow_factory, - tmp_path: Path, + tmp_path, + new_name, ): # Create project, workflow and task - res = invoke("project new MyProject") + res = invoke(f"project new {new_name()}") project_id = res.data["id"] - wf = workflow_factory(project_id=project_id) - t = task_factory(user_id=register_user["id"], type="parallel") + wf = workflow_factory(name=new_name(), project_id=project_id) + t = task_factory(name=new_name(), command_parallel="parallel") ARGS = {"image_dir": "/asdasd"} @@ -405,7 +407,7 @@ def test_workflow_rm_task( # Add task to workflow, twice cmd = ( - f"workflow add-task {project_id} {wf.id} --task-id {t.id} " + f"workflow add-task {project_id} {wf['id']} --task-id {t['id']} " f"--args-parallel {args_file}" ) res = invoke(cmd) @@ -415,7 +417,7 @@ def test_workflow_rm_task( workflow_task_id_1 = res.data["id"] # Remove task 1 from workflow - cmd = f"workflow rm-task {project_id} {wf.id} {workflow_task_id_1}" + cmd = f"workflow rm-task {project_id} {wf['id']} {workflow_task_id_1}" debug(cmd) res = invoke(cmd) assert res.retcode == 0 @@ -423,11 +425,7 @@ def test_workflow_rm_task( def test_workflow_edit_task( - invoke, - register_user, - task_factory, - workflow_factory, - tmp_path: Path, + invoke, task_factory, workflow_factory, tmp_path, new_name ): """ GIVEN a workflow @@ -437,10 +435,10 @@ def test_workflow_edit_task( gargs """ - res = invoke("project new MyProject") + res = invoke(f"project new {new_name()}") project_id = res.data["id"] - wf = workflow_factory(project_id=project_id) - t = task_factory(user_id=register_user["id"], type="parallel") + wf = workflow_factory(name=new_name(), project_id=project_id) + t = task_factory(name=new_name(), command_parallel="parallel") INPUT_FILTERS = {"attributes": {"a": 1}, "types": {"b": True}} ARGS_PARALLEL = {"image_dir": "/asdasd"} @@ -470,7 +468,7 @@ def test_workflow_edit_task( # Create task, without overriding arguments cmd = ( - f"workflow add-task {project_id} {wf.id} --task-id {t.id} " + f"workflow add-task {project_id} {wf['id']} --task-id {t['id']} " f"--args-parallel {args_parallel_file}" ) res = invoke(cmd) @@ -480,7 +478,7 @@ def test_workflow_edit_task( debug(res.data) workflow_task_id = res.data["id"] cmd = ( - f"workflow edit-task {project_id} {wf.id} {workflow_task_id} " + f"workflow edit-task {project_id} {wf['id']} {workflow_task_id} " f"--input-filters {input_filters_file}" ) debug(cmd) @@ -492,7 +490,7 @@ def test_workflow_edit_task( debug(res.data) workflow_task_id = res.data["id"] cmd = ( - f"workflow edit-task {project_id} {wf.id} {workflow_task_id} " + f"workflow edit-task {project_id} {wf['id']} {workflow_task_id} " f"--args-parallel {args_parallel_file} " f"--meta-parallel {meta_parallel_file}" ) @@ -504,12 +502,12 @@ def test_workflow_edit_task( # Add a WorkflowTask with meta-non-parallel args t_non_parallel = task_factory( - user_id=register_user["id"], type="non_parallel" + name=new_name(), command_non_parallel="non_parallel" ) cmd = ( - f"workflow add-task {project_id} {wf.id} " - f"--task-id {t_non_parallel.id} " + f"workflow add-task {project_id} {wf['id']} " + f"--task-id {t_non_parallel['id']} " f"--args-non-parallel {args_non_parallel_file}" ) res = invoke(cmd) @@ -517,7 +515,7 @@ def test_workflow_edit_task( workflow_task_id = res.data["id"] cmd = ( - f"workflow edit-task {project_id} {wf.id} {workflow_task_id} " + f"workflow edit-task {project_id} {wf['id']} {workflow_task_id} " f"--input-filters {input_filters_file} " f"--args-non-parallel {args_non_parallel_file} " f"--meta-non-parallel {meta_non_parallel_file}" @@ -534,31 +532,34 @@ def test_workflow_edit_task( def test_workflow_import( - register_user, invoke, - testdata_path: Path, - task_factory, - caplog, + testdata_path, + new_name, ): + res = invoke( + "task new --command-parallel pwd --command-non-parallel pwd dummy" + ) + debug(res.data) + assert res.retcode == 0 + + res = invoke( + "task new --command-parallel pwd --command-non-parallel pwd dummy2" + ) + debug(res.data) + assert res.retcode == 0 + # create project - PROJECT_NAME = "project_name" + PROJECT_NAME = new_name() res_pj = invoke(f"project new {PROJECT_NAME}") assert res_pj.retcode == 0 project_id = res_pj.data["id"] - task_factory( - user_id=register_user["id"], name="task", source="PKG_SOURCE:dummy2" - ) - # Fail due to missing --json-file argument with pytest.raises(SystemExit): invoke(f"workflow import --project-id {project_id}") - # import workflow into project filename = str(testdata_path / "import-export/workflow.json") - with open(filename, "r") as f: - debug(f.read()) res = invoke( f"workflow import --project-id {project_id} --json-file {filename}" ) @@ -570,8 +571,9 @@ def test_workflow_import( # get the workflow from the server, and check that it is the same workflow_id = res.data["id"] res = invoke(f"workflow show {project_id} {workflow_id}") + debug(res.retcode, res.data) assert res.retcode == 0 - imported_workflow["task_list"][-1]["warning"] = None + res.data["task_list"][-1]["warning"] = None assert res.data == imported_workflow # import workflow into project, with --batch @@ -580,40 +582,39 @@ def test_workflow_import( f"--batch workflow import --project-id {project_id} " f"--json-file {filename}" ) + debug(res.retcode, res.data) assert res.retcode == 0 - assert res.data == "2 2" # import workflow into project, with --workflow-name - NEW_NAME = "new name for workflow" res = invoke( f"workflow import --project-id {project_id} --json-file {filename} " - f' --workflow-name "{NEW_NAME}"' + f" --workflow-name MyWorkflow-V2-xxx" ) + debug(res.data) assert res.retcode == 0 - assert res.data["name"] == NEW_NAME + assert res.data["name"] == "MyWorkflow-V2-xxx" def test_workflow_export( - register_user, invoke, workflow_factory, - tmp_path: Path, + tmp_path, task_factory, - caplog, + new_name, ): - res = invoke("project new testproject") + res = invoke(f"project new {new_name()}") assert res.retcode == 0 project_id = res.data["id"] - NAME = "WorkFlow" + NAME = new_name() wf = workflow_factory(project_id=project_id, name=NAME) - prj_id = wf.project_id - wf_id = wf.id + prj_id = wf["project_id"] + wf_id = wf["id"] filename = str(tmp_path / "exported_wf.json") - task = task_factory(user_id=register_user["id"]) - res = invoke(f"workflow add-task {prj_id} {wf_id} --task-id {task.id}") + task = task_factory(name=new_name(), command_parallel="pwd") + res = invoke(f"workflow add-task {prj_id} {wf_id} --task-id {task['id']}") assert res.retcode == 0 res = invoke(f"workflow export {prj_id} {wf_id} --json-file {filename}")