diff --git a/README.md b/README.md index d30a84d..a18c7d4 100644 --- a/README.md +++ b/README.md @@ -115,6 +115,7 @@ If you want to test this package on examples from this repo: ```bash git clone git@github.com:artefactory/vertex-pipelines-deployer.git poetry install +poetry shell # if you want to activate the virtual environment cd example ``` diff --git a/deployer/_templates/deployer.env.jinja b/deployer/_templates/deployer.env.jinja index f7ea74a..13642fa 100644 --- a/deployer/_templates/deployer.env.jinja +++ b/deployer/_templates/deployer.env.jinja @@ -3,12 +3,17 @@ GCP_REGION= TAG=latest -# Google Artifact Registry -GAR_LOCATION= # Google Artifact Registry repo location +# GOOGLE ARTIFACT REGISTRY +# Google Artifact Registry repo location +GAR_LOCATION= + GAR_DOCKER_REPO_ID= GAR_PIPELINES_REPO_ID= GAR_VERTEX_BASE_IMAGE_NAME= -# Vertex AI -VERTEX_STAGING_BUCKET_NAME= # without gs:// -VERTEX_SERVICE_ACCOUNT= # full service account email +# VERTEX AI +# without gs:// +VERTEX_STAGING_BUCKET_NAME= + +# full service account email +VERTEX_SERVICE_ACCOUNT= diff --git a/deployer/_templates/deployment/Dockerfile.jinja b/deployer/_templates/deployment/Dockerfile.jinja index 36d9814..c77a797 100644 --- a/deployer/_templates/deployment/Dockerfile.jinja +++ b/deployer/_templates/deployment/Dockerfile.jinja @@ -16,9 +16,10 @@ ENV VERTEX_SERVICE_ACCOUNT=${VERTEX_SERVICE_ACCOUNT} WORKDIR /app -COPY deployer-requirements.txt . +COPY requirements.txt . +COPY requirements-vertex.txt . RUN python3 -m pip install --upgrade pip -RUN python3 -m pip install -r deployer-requirements.txt +RUN python3 -m pip install -r requirements-vertex.txt ENV PYTHONPATH "${PYTHONPATH}:." diff --git a/deployer/_templates/requirements-vertex.txt.jinja b/deployer/_templates/requirements-vertex.txt.jinja index 9be2991..7822748 100644 --- a/deployer/_templates/requirements-vertex.txt.jinja +++ b/deployer/_templates/requirements-vertex.txt.jinja @@ -5,4 +5,4 @@ # deploy kfp google-cloud-aiplatform -vertex-deployer={{ deployer_version }} +vertex-deployer=={{ deployer_version }} diff --git a/deployer/cli.py b/deployer/cli.py index 28463d4..442073b 100644 --- a/deployer/cli.py +++ b/deployer/cli.py @@ -227,6 +227,15 @@ def deploy( # noqa: C901 "Defaults to '{pipeline_name}-experiment'.", ), ] = None, + run_name: Annotated[ + Optional[str], + typer.Option( + "--run-name", + "-rn", + help="The pipeline's run name. Displayed in the UI." + "Defaults to '{pipeline_name}-{tags}-%Y%m%d%H%M%S'.", + ), + ] = None, skip_validation: Annotated[ bool, typer.Option( @@ -276,6 +285,7 @@ def deploy( # noqa: C901 staging_bucket_name=vertex_settings.VERTEX_STAGING_BUCKET_NAME, service_account=vertex_settings.VERTEX_SERVICE_ACCOUNT, pipeline_name=pipeline_name, + run_name=run_name, pipeline_func=pipeline_func, gar_location=vertex_settings.GAR_LOCATION, gar_repo_id=vertex_settings.GAR_PIPELINES_REPO_ID, diff --git a/deployer/constants.py b/deployer/constants.py index a157901..fae789d 100644 --- a/deployer/constants.py +++ b/deployer/constants.py @@ -1,3 +1,4 @@ +import re from pathlib import Path TEMPLATES_PATH = Path(__file__).parent / "_templates" @@ -64,3 +65,5 @@ "you can add the following flags to the deploy command if not set in your config:\n" "--schedule --cron=cron_expression --scheduler-timezone=IANA_time_zone\n" ) + +VALID_RUN_NAME_PATTERN = re.compile("^[a-z][-a-z0-9]{0,127}$", re.IGNORECASE) diff --git a/deployer/init_deployer.py b/deployer/init_deployer.py index 96eaa02..3f2b79a 100644 --- a/deployer/init_deployer.py +++ b/deployer/init_deployer.py @@ -4,8 +4,8 @@ from jinja2 import Environment, FileSystemLoader, meta from rich.tree import Tree -from deployer import constants from deployer.__init__ import __version__ as deployer_version +from deployer.constants import INSTRUCTIONS, TEMPLATES_DEFAULT_STRUCTURE, TEMPLATES_PATH from deployer.settings import ( DeployerSettings, find_pyproject_toml, @@ -72,7 +72,7 @@ def _create_file_from_template(path: Path, template_path: Path, **kwargs): ) else: path.write_text(content) - except (FileNotFoundError, KeyError, jinja2.TemplateError) as e: + except (KeyError, jinja2.TemplateError, jinja2.TemplateNotFound) as e: raise TemplateFileCreationError( f"An error occurred while creating the file from template: {e}" ) from e @@ -85,9 +85,9 @@ def _generate_templates_mapping( ): """Generate the mapping of a list of templates to create and their variables.""" templates_mapping = {} - env = Environment(loader=FileSystemLoader(str(constants.TEMPLATES_PATH)), autoescape=True) + env = Environment(loader=FileSystemLoader(str(TEMPLATES_PATH)), autoescape=True) for template, template_path in templates_dict.items(): - template_name = str(template_path.relative_to(constants.TEMPLATES_PATH)) + template_name = str(template_path.relative_to(TEMPLATES_PATH)) template_source = env.loader.get_source(env, template_name)[0] parsed_content = env.parse(template_source) variables = meta.find_undeclared_variables(parsed_content) @@ -110,12 +110,10 @@ def build_default_folder_structure(deployer_settings: DeployerSettings): """Create the default folder structure for the Vertex Pipelines project.""" vertex_folder_path = deployer_settings.vertex_folder_path dockerfile_path = vertex_folder_path / str( - constants.TEMPLATES_DEFAULT_STRUCTURE["dockerfile"].relative_to(constants.TEMPLATES_PATH) + TEMPLATES_DEFAULT_STRUCTURE["dockerfile"].relative_to(TEMPLATES_PATH) ).replace(".jinja", "") cloud_build_path = vertex_folder_path / str( - constants.TEMPLATES_DEFAULT_STRUCTURE["cloudbuild_local"].relative_to( - constants.TEMPLATES_PATH - ) + TEMPLATES_DEFAULT_STRUCTURE["cloudbuild_local"].relative_to(TEMPLATES_PATH) ).replace(".jinja", "") # Create the folder structure @@ -130,7 +128,7 @@ def build_default_folder_structure(deployer_settings: DeployerSettings): } templates_mapping = _generate_templates_mapping( - constants.TEMPLATES_DEFAULT_STRUCTURE, mapping_variables, vertex_folder_path + TEMPLATES_DEFAULT_STRUCTURE, mapping_variables, vertex_folder_path ) # Create the files @@ -177,6 +175,4 @@ def show_commands(deployer_settings: DeployerSettings): vertex_folder_path = deployer_settings.vertex_folder_path build_base_image_path = vertex_folder_path / "deployment" / "build_base_image.sh" - console.print( - constants.INSTRUCTIONS.format(build_base_image_path=build_base_image_path), style="blue" - ) + console.print(INSTRUCTIONS.format(build_base_image_path=build_base_image_path), style="blue") diff --git a/deployer/pipeline_deployer.py b/deployer/pipeline_deployer.py index 7b591d6..eb1285c 100644 --- a/deployer/pipeline_deployer.py +++ b/deployer/pipeline_deployer.py @@ -1,6 +1,7 @@ from __future__ import annotations import os +from datetime import datetime from pathlib import Path from typing import Callable, List, Optional @@ -11,6 +12,7 @@ from loguru import logger from requests import HTTPError +from deployer import constants from deployer.utils.exceptions import ( MissingGoogleArtifactRegistryHostError, TagNotFoundError, @@ -24,6 +26,7 @@ def __init__( self, pipeline_name: str, pipeline_func: Callable, + run_name: Optional[str] = None, project_id: Optional[str] = None, region: Optional[str] = None, staging_bucket_name: Optional[str] = None, @@ -39,6 +42,7 @@ def __init__( self.service_account = service_account self.pipeline_name = pipeline_name + self.run_name = run_name self.pipeline_func = pipeline_func self.gar_location = gar_location @@ -106,6 +110,26 @@ def _check_experiment_name(self, experiment_name: Optional[str] = None) -> str: return experiment_name + def _check_run_name(self, tag: Optional[str] = None) -> None: + """Each run name (job_id) must be unique. + We thus always add a timestamp to ensure uniqueness. + """ + now_str = datetime.now().strftime("%Y%m%d-%H%M%S") + if self.run_name is None: + self.run_name = f"{self.pipeline_name}" + if tag: + self.run_name += f"-{tag}" + + self.run_name = self.run_name.replace("_", "-") + self.run_name += f"-{now_str}" + + if not constants.VALID_RUN_NAME_PATTERN.match(self.run_name): + raise ValueError( + f"Run name {self.run_name} does not match the pattern" + f" {constants.VALID_RUN_NAME_PATTERN.pattern}" + ) + logger.debug(f"run_name is: {self.run_name}") + def _create_pipeline_job( self, template_path: str, @@ -139,6 +163,7 @@ def _create_pipeline_job( """ # noqa: E501 job = aiplatform.PipelineJob( display_name=self.pipeline_name, + job_id=self.run_name, template_path=template_path, pipeline_root=self.staging_bucket_uri, location=self.region, @@ -210,7 +235,7 @@ def run( tag (str, optional): Tag of the pipeline template. Defaults to None. """ # noqa: E501 experiment_name = self._check_experiment_name(experiment_name) - + self._check_run_name(tag=tag) template_path = self._get_template_path(tag) logger.debug( @@ -238,7 +263,7 @@ def run( f"Encountered an error while linking your job {job.job_id}" f" with experiment {experiment_name}." " This is likely due to a bug in the AI Platform Pipelines client." - " You job should be running anyway. Try to link it manually." + " Your job should be running anyway. Try to link it manually." ) else: raise e diff --git a/deployer/settings.py b/deployer/settings.py index 8f77515..f0227f3 100644 --- a/deployer/settings.py +++ b/deployer/settings.py @@ -30,6 +30,7 @@ class _DeployerDeploySettings(CustomBaseModel): config_name: Optional[str] = None enable_caching: Optional[bool] = None experiment_name: Optional[str] = None + run_name: Optional[str] = None skip_validation: bool = True diff --git a/docs/CLI_REFERENCE.md b/docs/CLI_REFERENCE.md index 6db310a..e59a43d 100644 --- a/docs/CLI_REFERENCE.md +++ b/docs/CLI_REFERENCE.md @@ -123,6 +123,7 @@ $ vertex-deployer deploy [OPTIONS] PIPELINE_NAMES... * `-cn, --config-name TEXT`: Name of the json/py file with parameter values and input artifacts to use when running the pipeline. It must be in the pipeline config dir. e.g. `config_dev.json` for `./vertex/configs/{pipeline-name}/config_dev.json`. * `-ec, --enable-caching / -nec, --no-cache`: Whether to turn on caching for the run.If this is not set, defaults to the compile time settings, which are True for alltasks by default, while users may specify different caching options for individualtasks. If this is set, the setting applies to all tasks in the pipeline.Overrides the compile time settings. Defaults to None. * `-en, --experiment-name TEXT`: The name of the experiment to run the pipeline in.Defaults to '{pipeline_name}-experiment'. +* `-rn, --run-name TEXT`: The pipeline's run name. Displayed in the UI.Defaults to '{pipeline_name}-{tags}-%Y%m%d%H%M%S'. * `-y, --skip-validation / -n, --no-skip`: Whether to continue without user validation of the settings. [default: skip-validation] * `--help`: Show this message and exit. diff --git a/example/README.md b/example/README.md index 67a5268..1628f17 100644 --- a/example/README.md +++ b/example/README.md @@ -16,3 +16,59 @@ git commit -m "first commit" git remote add origin "your_repo_url" git push -u origin master ``` + +# Running the example + +In this section we detail how to run basic commands in the example folder. + +* Before the start, add this environment variable, so the pipelines are found: `export PYTHONPATH=.` + +* You must also add the required environment variables in the [example.env](example.env) file. + +## Check pipeline validity + +The following command will check if your pipeline is valid (notably, that the pipeline can be compiled and the config files are correctly defined). + +```bash +vertex-deployer check dummy_pipeline +``` + +## Build the custom image + +To build and upload the custom image to Artifact Registry, you can use the following make command: + +```bash +export $(cat example.env | xargs) +make build-base-image +``` + +## Deploy the dummy pipeline via Cloud Build + +For the `vertex-deployer deploy` command to work within cloudbuild (and not simply locally), you will need to give additional IAM rights, to the service account used in Cloud Build Jobs. +\ +\ +By default, the service account used is the following: +* `[PROJECT_NUMBER]@cloudbuild.gserviceaccount.com` + +```bash +export CLOUDBUILD_SERVICE_ACCOUNT = [PROJECT_NUMBER]@cloudbuild.gserviceaccount.com + +gcloud projects add-iam-policy-binding ${PROJECT_ID} \ + --member="serviceAccount:${CLOUDBUILD_SERVICE_ACCOUNT}" \ + --role="roles/aiplatform.user" + +gcloud projects add-iam-policy-binding ${PROJECT_ID} \ + --member="serviceAccount:${CLOUDBUILD_SERVICE_ACCOUNT}" \ + --role="roles/iam.serviceAccountUser" +``` + +Once this is done, you can launch the make command. + +If you do not modify the [cloudbuild_cd.yaml](cloudbuild.yaml) file, it should: +- rebuild the base image +- deploy a scheduled Vertex AI pipeline + +```bash +export $(cat example.env | xargs) +make deploy-pipeline +``` diff --git a/example/example.env b/example/example.env old mode 100644 new mode 100755 index b08cda3..c97e506 --- a/example/example.env +++ b/example/example.env @@ -3,12 +3,13 @@ GCP_REGION=europe-west1 TAG=latest -# Google Artifact Registry -GAR_LOCATION=europe-west1 # Google Artifact Registry repo location -GAR_DOCKER_REPO_ID=demo_docker_repo -GAR_PIPELINES_REPO_ID=demo_pipelines_repo +# Google Artifact Registry - GAR +GAR_LOCATION=europe-west1 +GAR_DOCKER_REPO_ID=demo-docker-repo +GAR_PIPELINES_REPO_ID=demo-pipelines-repo GAR_VERTEX_BASE_IMAGE_NAME=demo_base_image # Vertex AI -VERTEX_STAGING_BUCKET_NAME=YOUR_VERTEX_STAGING_BUCKET_NAME # without gs:// -VERTEX_SERVICE_ACCOUNT=YOUR_VERTEX_SERVICE_ACCOUNT # full service account email +VERTEX_STAGING_BUCKET_NAME=demo-vertex-staging-bucket +VERTEX_SERVICE_ACCOUNT_NAME=demo-vertex-ai-sa +VERTEX_SERVICE_ACCOUNT=demo-vertex-ai-sa@PROJECT_ID.iam.gserviceaccount.com diff --git a/example/vertex/deployment/cloudbuild_cd.yaml b/example/vertex/deployment/cloudbuild_cd.yaml index 5688751..7a2744a 100644 --- a/example/vertex/deployment/cloudbuild_cd.yaml +++ b/example/vertex/deployment/cloudbuild_cd.yaml @@ -18,10 +18,18 @@ steps: # schedule pipeline: compile, upload, schedule - name: '${_GAR_IMAGE_PATH}' entrypoint: 'bash' - args: [ - '-c', - 'vertex-deployer -log DEBUG deploy dummy_pipeline --compile --upload --run -ec --tags ${_TAG} --schedule --delete-last-schedule --cron *-*-19-*-* --config-name config_test.json' - ] + args: + - '-c' + - | + vertex-deployer -log DEBUG deploy dummy_pipeline \ + --compile \ + --upload \ + --run \ + --enable-caching \ + --config-name config_test.json \ + --tags ${_TAG} \ + --schedule --delete-last-schedule --cron '*-*-19-*-*' + dir: '.' id: schedule-dummy-pipeline waitFor: ['build-base-image'] diff --git a/mkdocs.yml b/mkdocs.yml index 30e0ed8..46d1dcc 100644 --- a/mkdocs.yml +++ b/mkdocs.yml @@ -49,7 +49,7 @@ nav: - Basic Usage: usage.md - Advanced User Guide: - Vertex DevOps: advanced_user_guide.md - - Undestand settings and configurations: configuration.md + - Understand settings and configurations: configuration.md - CLI Reference: CLI_REFERENCE.md - Contributing: contributing.md - Changelog: changelog.md diff --git a/tests/conftest.py b/tests/conftest.py index 718f804..881092c 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -1,3 +1,5 @@ +from pathlib import Path + import kfp.dsl import pytest from kfp.dsl import Artifact, Input @@ -20,3 +22,8 @@ def dummy_pipeline(name: str, artifact: Input[Artifact]) -> None: raise Exception("This is an exception.") except Exception as e: exception_traceback = e.__traceback__ + + +@pytest.fixture(scope="session") +def templates_path_fixture(): + return Path("tests/unit_tests/input_files") diff --git a/tests/integration_tests/test_init.py b/tests/integration_tests/test_init.py new file mode 100644 index 0000000..4aac57f --- /dev/null +++ b/tests/integration_tests/test_init.py @@ -0,0 +1,100 @@ +from pathlib import Path + +import toml +from typer.testing import CliRunner + +from deployer.cli import app + +runner = CliRunner() + + +def test_init_command_with_defaults(tmp_path): + with runner.isolated_filesystem(temp_dir=tmp_path): + # Given + result = runner.invoke(app, ["init", "--default"]) + + # Then + assert result.exit_code == 0 + assert "Default initialization done" in result.stdout + assert Path("vertex").is_dir() + assert (Path("vertex") / "pipelines" / "dummy_pipeline.py").is_file() + assert (Path("vertex") / "configs" / "dummy_pipeline" / "test.py").is_file() + assert (Path("vertex") / "configs" / "dummy_pipeline" / "dev.py").is_file() + assert (Path("vertex") / "configs" / "dummy_pipeline" / "prod.py").is_file() + assert (Path("vertex") / "deployment" / "cloudbuild_local.yaml").is_file() + assert (Path("vertex") / "deployment" / "Dockerfile").is_file() + assert (Path("vertex") / "deployment" / "build_base_image.sh").is_file() + assert (Path("vertex") / "lib").is_dir() + assert (Path("vertex") / "components").is_dir() + assert (Path("vertex") / "components" / "dummy_component.py").is_file() + assert Path("pyproject.toml").is_file() + assert not Path("pyproject.toml").read_text() + assert Path("deployer.env").is_file() + assert Path("requirements-vertex.txt").is_file() + + +def test_init_command_with_user_input(tmp_path): + with runner.isolated_filesystem(temp_dir=tmp_path): + # Given + # Provide user inputs for the interactive prompts + user_inputs = "\n".join( + [ + "n", + "y", + "custom_value", + "", + "y", + "", + "n", + "y", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "y", + "json", + "", + "y", + "y", + "pipe", + "n", + ] + ) + + result = runner.invoke(app, ["init"], input=user_inputs) + # Then + assert result.exit_code == 0 + # Pay attention to the assertions if the user inputs are changed + assert Path("pyproject.toml").is_file() + parsed_toml = toml.loads(Path("pyproject.toml").read_text()) + for section in parsed_toml.values(): + for key, value in section.items(): + assert value not in [None, "None"], f"Found None for key: {key}" + assert "env_file" not in parsed_toml["tool"]["vertex_deployer"]["deploy"] + assert "cron" not in parsed_toml["tool"]["vertex_deployer"]["deploy"] + assert "check" not in parsed_toml["tool"]["vertex_deployer"] + assert parsed_toml["tool"]["vertex_deployer"]["vertex_folder_path"] == "custom_value" + assert parsed_toml["tool"]["vertex_deployer"]["deploy"]["upload"] is True + assert parsed_toml["tool"]["vertex_deployer"]["deploy"]["compile"] is False + assert parsed_toml["tool"]["vertex_deployer"]["create"]["config_type"] == "json" + assert Path("custom_value").is_dir() + assert (Path("custom_value") / "pipelines" / "pipe.py").is_file() + assert (Path("custom_value") / "configs" / "pipe" / "test.json").is_file() + assert (Path("custom_value") / "configs" / "pipe" / "dev.json").is_file() + assert (Path("custom_value") / "configs" / "pipe" / "prod.json").is_file() + assert (Path("custom_value") / "deployment" / "cloudbuild_local.yaml").is_file() + assert (Path("custom_value") / "deployment" / "Dockerfile").is_file() + assert (Path("custom_value") / "deployment" / "build_base_image.sh").is_file() + assert (Path("custom_value") / "lib").is_dir() + assert (Path("custom_value") / "components").is_dir() + assert (Path("custom_value") / "components" / "dummy_component.py").is_file() diff --git a/tests/unit_tests/input_files/folder/template2.yaml.jinja b/tests/unit_tests/input_files/folder/template2.yaml.jinja new file mode 100644 index 0000000..e1ca74a --- /dev/null +++ b/tests/unit_tests/input_files/folder/template2.yaml.jinja @@ -0,0 +1 @@ +This is the cloudbuild.yaml template. diff --git a/tests/unit_tests/input_files/template1.txt.jinja b/tests/unit_tests/input_files/template1.txt.jinja new file mode 100644 index 0000000..e65490f --- /dev/null +++ b/tests/unit_tests/input_files/template1.txt.jinja @@ -0,0 +1 @@ +My name is {{ name }} and I am {{ age }} years old. diff --git a/tests/unit_tests/input_files/template3.env.jinja b/tests/unit_tests/input_files/template3.env.jinja new file mode 100644 index 0000000..92dff47 --- /dev/null +++ b/tests/unit_tests/input_files/template3.env.jinja @@ -0,0 +1 @@ +This is the .env template file. diff --git a/tests/unit_tests/test_init_deployer.py b/tests/unit_tests/test_init_deployer.py new file mode 100644 index 0000000..ab87815 --- /dev/null +++ b/tests/unit_tests/test_init_deployer.py @@ -0,0 +1,67 @@ +from unittest.mock import patch + +import pytest + +from deployer.init_deployer import _create_file_from_template, _generate_templates_mapping +from deployer.utils.exceptions import TemplateFileCreationError + + +class TestCreateFileFromTemplate: + def test_create_file_from_template_valid_inputs(self, templates_path_fixture, tmp_path): + # Arrange + path = tmp_path / "test_file.txt" + template_path = templates_path_fixture / "template1.txt.jinja" + kwargs = {"name": "John", "age": 30} + + # Act + _create_file_from_template(path, template_path, **kwargs) + + # Assert + assert path.exists() + assert path.read_text() == "My name is John and I am 30 years old." + + def test_create_file_template_not_exist(self, templates_path_fixture, tmp_path): + # Arrange + path = tmp_path / "test_file.txt" + template_path = templates_path_fixture / "nonexistent_template.txt" + kwargs = {"name": "John", "age": 30} + + # Act and Assert + with pytest.raises(TemplateFileCreationError): + _create_file_from_template(path, template_path, **kwargs) + + +class TestGenerateTemplatesMapping: + def test_generate_templates_mapping_with_and_without_variables( + self, templates_path_fixture, tmp_path + ): + # Arrange + templates_dict = { + "template1": templates_path_fixture / "template1.txt.jinja", + "template2": templates_path_fixture / "folder/template2.yaml.jinja", + "template3": templates_path_fixture / "template3.env.jinja", + } + output_folder_path = tmp_path / "output" + mapping_variables = { + "name": "John", + "age": 30, + } + + with patch("deployer.init_deployer.TEMPLATES_PATH", templates_path_fixture): + # Act + result = _generate_templates_mapping( + templates_dict, mapping_variables, output_folder_path + ) + + # Assert + assert len(result) == len(templates_dict) + for _, template_path in templates_dict.items(): + output_path = output_folder_path / str( + template_path.relative_to(templates_path_fixture) + ).replace(".jinja", "") + assert output_path in result + assert result[output_path][0].resolve() == template_path.resolve() + expected_variables = ( + mapping_variables if template_path == templates_dict["template1"] else {} + ) + assert result[output_path][1] == expected_variables diff --git a/tests/unit_tests/test_utils.py b/tests/unit_tests/test_utils.py index 76d64ac..ab9caae 100644 --- a/tests/unit_tests/test_utils.py +++ b/tests/unit_tests/test_utils.py @@ -77,15 +77,26 @@ def test_pathlib_input(self): internal_path = Path(__file__) external_path = Path("tests/conftest.py") + # Dynamically find the line number where the exception is raised in conftest.py + line_number = None + with open(external_path, "r") as file: + for i, line in enumerate(file, 1): + if "raise Exception" in line: + line_number = i + break + assert line_number is not None, "Exception line not found in conftest.py" + # When internal_output = filter_lines_from(self.traceback, internal_path) external_output = filter_lines_from(exception_traceback, external_path) + + # Then assert internal_output == ( f' File "{internal_path}", line 71, in TestFilterLinesFrom\n' ' raise Exception("This is an exception.")\n' ) assert external_output == ( - f' File "{external_path.resolve()}", line 20, in \n' + f' File "{external_path.resolve()}", line {line_number}, in \n' ' raise Exception("This is an exception.")\n' ) @@ -94,16 +105,25 @@ def test_string_input(self): internal_path = str(Path(__file__)) external_path = "tests/conftest.py" + # Dynamically find the line number where the exception is raised + line_number = None + with open(external_path, "r") as file: + for i, line in enumerate(file, 1): + if "raise Exception" in line: + line_number = i + break + # When internal_output = filter_lines_from(self.traceback, internal_path) external_output = filter_lines_from(exception_traceback, external_path) - print(internal_output) + + # Then assert internal_output == ( f' File "{internal_path}", line 71, in TestFilterLinesFrom\n' ' raise Exception("This is an exception.")\n' ) assert external_output == ( - f' File "{Path(external_path).resolve()}", line 20, in \n' + f' File "{Path(external_path).resolve()}", line {line_number}, in \n' ' raise Exception("This is an exception.")\n' )