From 8cc2c0caf137889115c9d84c1c9895dae2a808c9 Mon Sep 17 00:00:00 2001 From: John Stainsby Date: Thu, 21 Nov 2024 08:36:59 +0000 Subject: [PATCH 01/38] feat: DBTP-1434 - CDN cache policy (#642) --- dbt_platform_helper/utils/validation.py | 30 ++++- .../copilot-bootstrap/default.conf.template | 4 + .../fixtures/addons_files/alb_addons.yml | 35 ++++++ .../addons_files/alb_addons_bad_data.yml | 109 +++++++++++++++++- .../platform_helper/utils/test_validation.py | 15 ++- 5 files changed, 189 insertions(+), 4 deletions(-) diff --git a/dbt_platform_helper/utils/validation.py b/dbt_platform_helper/utils/validation.py index 454142879..0d9df76ca 100644 --- a/dbt_platform_helper/utils/validation.py +++ b/dbt_platform_helper/utils/validation.py @@ -352,6 +352,32 @@ def iam_role_arn_regex(key): }, } +CACHE_POLICY_DEFINITION = { + "min_ttl": int, + "max_ttl": int, + "default_ttl": int, + "cookies_config": Or("none", "whitelist", "allExcept", "all"), + "header": Or("none", "whitelist"), + "query_string_behavior": Or("none", "whitelist", "allExcept", "all"), + Optional("cookie_list"): list, + Optional("headers_list"): list, + Optional("cache_policy_query_strings"): list, +} + +PATHS_DEFINITION = { + Optional("default"): { + "cache": str, + "request": str, + }, + Optional("additional"): list[ + { + "path": str, + "cache": str, + "request": str, + } + ], +} + ALB_DEFINITION = { "type": "alb", Optional("environments"): { @@ -379,6 +405,9 @@ def iam_role_arn_regex(key): Optional("viewer_certificate_minimum_protocol_version"): str, Optional("viewer_certificate_ssl_support_method"): str, Optional("viewer_protocol_policy"): str, + Optional("cache_policy"): dict({str: CACHE_POLICY_DEFINITION}), + Optional("origin_request_policy"): dict({str: {}}), + Optional("paths"): dict({str: PATHS_DEFINITION}), }, None, ) @@ -521,7 +550,6 @@ def validate_platform_config(config): def _validate_extension_supported_versions( config, extension_type, version_key, get_supported_versions_fn ): - extensions = config.get("extensions", {}) if not extensions: return diff --git a/images/copilot-bootstrap/default.conf.template b/images/copilot-bootstrap/default.conf.template index ca15620c6..724040476 100644 --- a/images/copilot-bootstrap/default.conf.template +++ b/images/copilot-bootstrap/default.conf.template @@ -7,6 +7,10 @@ server { index index.html index.htm; } + location /secondary-service { + alias /usr/share/nginx/html; + } + #error_page 404 /404.html; # redirect server error pages to the static page /50x.html diff --git a/tests/platform_helper/utils/fixtures/addons_files/alb_addons.yml b/tests/platform_helper/utils/fixtures/addons_files/alb_addons.yml index cf6f0ac2c..d33aa6df1 100644 --- a/tests/platform_helper/utils/fixtures/addons_files/alb_addons.yml +++ b/tests/platform_helper/utils/fixtures/addons_files/alb_addons.yml @@ -2,6 +2,41 @@ my-alb: type: alb environments: dev: + cache_policy: + "test-policy": + min_ttl: 3600 + max_ttl: 31536000 + default_ttl: 14400 + cookies_config: "whitelist" + cookie_list: [ "x-csrf-token" ] + header: "whitelist" + headers_list: [ "CloudFront-Viewer-Country" ] + query_string_behavior: "whitelist" + cache_policy_query_strings: [ "q", "market" ] + "test-policy-2": + min_ttl: 3600 + max_ttl: 31536000 + default_ttl: 14400 + cookies_config: "all" + cookie_list: [ ] + header: "none" + headers_list: [ ] + query_string_behavior: "all" + cache_policy_query_strings: [ ] + origin_request_policy: + "test-origin-request": { } + paths: + dev.application.uktrade.digital: + default: + cache: "test-policy" + request: "test-origin-request" + additional: + - path: "/static" + cache: "test-policy" + request: "test-origin-request" + - path: "/images" + cache: "test-policy-2" + request: "test-origin-request" additional_address_list: ["internal.api"] allowed_methods: ["GET", "POST", "OPTIONS"] cached_methods: ["GET", "HEAD"] diff --git a/tests/platform_helper/utils/fixtures/addons_files/alb_addons_bad_data.yml b/tests/platform_helper/utils/fixtures/addons_files/alb_addons_bad_data.yml index bb612c6bf..8ed2ab8df 100644 --- a/tests/platform_helper/utils/fixtures/addons_files/alb_addons_bad_data.yml +++ b/tests/platform_helper/utils/fixtures/addons_files/alb_addons_bad_data.yml @@ -71,7 +71,7 @@ my-alb-default-waf-should-be-a-string: type: alb environments: dev: - default_waf: [] # Should be a string + default_waf: [ ] # Should be a string my-alb-enable-logging-should-be-a-bool: @@ -134,4 +134,109 @@ my-alb-view-protocol-policy-should-be-a-string: type: alb environments: dev: - viewer_protocol_policy: [] # Should be a string + viewer_protocol_policy: [ ] # Should be a string + +my-alb-cache-policy-min-ttl-should-be-a-int: + type: alb + environments: + dev: + cache_policy: + "test-policy": + min_ttl: "four hundred" + +my-alb-cache-policy-max-ttl-should-be-a-int: + type: alb + environments: + dev: + cache_policy: + "test-policy": + max_ttl: "three hundred" + +my-alb-cache-policy-default-ttl-should-be-a-int: + type: alb + environments: + dev: + cache_policy: + "test-policy": + default_ttl: "two hundred" + +my-alb-cache-policy-cookies-config-should-be-a-string: + type: alb + environments: + dev: + cache_policy: + "test-policy": + cookies_config: 44321 + +my-alb-cache-policy-cookies-list-should-be-a-list: + type: alb + environments: + dev: + cache_policy: + "test-policy": + cookie_list: 123 + +my-alb-cache-policy-header-should-be-a-string: + type: alb + environments: + dev: + cache_policy: + "test-policy": + header: 999 + +my-alb-cache-policy-headers-list-should-be-a-list: + type: alb + environments: + dev: + cache_policy: + "test-policy": + headers_list: true + +my-alb-cache-policy-query-string-behavior-should-be-a-string: + type: alb + environments: + dev: + cache_policy: + "test-policy": + query_string_behavior: [ ] + +my-alb-cache-policy-cache-policy-query-strings-should-be-a-list: + type: alb + environments: + dev: + cache_policy: + "test-policy": + cache_policy_query_strings: 765 + +my-alb-origin-request-policy-should-be-a-dict: + type: alb + environments: + dev: + origin_request_policy: + "test-origin-request": [ ] + +my-alb-paths-default-cache-should-be-a-string: + type: alb + environments: + dev: + paths: + dev.application.uktrade.digital: + default: + cache: 12345 + +my-alb-paths-default-request-should-be-a-string: + type: alb + environments: + dev: + paths: + dev.application.uktrade.digital: + default: + request: { } + +my-alb-paths-additional-should-be-a-list: + type: alb + environments: + dev: + paths: + dev.application.uktrade.digital: + additional: false diff --git a/tests/platform_helper/utils/test_validation.py b/tests/platform_helper/utils/test_validation.py index c99dd1b41..1ecfa62c5 100644 --- a/tests/platform_helper/utils/test_validation.py +++ b/tests/platform_helper/utils/test_validation.py @@ -10,6 +10,7 @@ from dbt_platform_helper.constants import PLATFORM_CONFIG_FILE from dbt_platform_helper.constants import PLATFORM_HELPER_VERSION_FILE from dbt_platform_helper.utils.validation import S3_BUCKET_NAME_ERROR_TEMPLATE +from dbt_platform_helper.utils.validation import _validate_extension_supported_versions from dbt_platform_helper.utils.validation import config_file_check from dbt_platform_helper.utils.validation import float_between_with_halfstep from dbt_platform_helper.utils.validation import int_between @@ -20,7 +21,6 @@ from dbt_platform_helper.utils.validation import validate_platform_config from dbt_platform_helper.utils.validation import validate_s3_bucket_name from dbt_platform_helper.utils.validation import validate_string -from dbt_platform_helper.utils.validation import _validate_extension_supported_versions from tests.platform_helper.conftest import FIXTURES_DIR from tests.platform_helper.conftest import UTILS_FIXTURES_DIR @@ -227,6 +227,19 @@ def test_validate_addons_success(addons_file): "my-alb-viewer-certificate-minimum-protocol-version-should-be-a-string": r"environments.*dev.*should be instance of 'str'", "my-alb-viewer-certificate-ssl-support-method-should-be-a-string": r"environments.*dev.*should be instance of 'str'", "my-alb-view-protocol-policy-should-be-a-string": r"environments.*dev.*should be instance of 'str'", + "my-alb-cache-policy-min-ttl-should-be-a-int": r"environments.*dev.*should be instance of 'int'", + "my-alb-cache-policy-max-ttl-should-be-a-int": r"environments.*dev.*should be instance of 'int'", + "my-alb-cache-policy-default-ttl-should-be-a-int": r"environments.*dev.*should be instance of 'int'", + "my-alb-cache-policy-cookies-config-should-be-a-string": r"environments.*dev.*did not validate", + "my-alb-cache-policy-cookies-list-should-be-a-list": r"environments.*dev.*should be instance of 'list'", + "my-alb-cache-policy-header-should-be-a-string": r"environments.*dev.*did not validate", + "my-alb-cache-policy-headers-list-should-be-a-list": r"environments.*dev.*should be instance of 'list'", + "my-alb-cache-policy-query-string-behavior-should-be-a-string": r"environments.*dev.*did not validate", + "my-alb-cache-policy-cache-policy-query-strings-should-be-a-list": r"environments.*dev.*should be instance of 'list'", + "my-alb-origin-request-policy-should-be-a-dict": r"environments.*dev.*should be instance of 'dict'", + "my-alb-paths-default-cache-should-be-a-string": r"environments.*dev.*should be instance of 'str'", + "my-alb-paths-default-request-should-be-a-string": r"environments.*dev.*should be instance of 'str'", + "my-alb-paths-additional-should-be-a-list": r"environments.*dev.*raised TypeError", }, ), ], From c8d85096e564fb83ec904df0c645cb79f9d927b8 Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" <41898282+github-actions[bot]@users.noreply.github.com> Date: Thu, 21 Nov 2024 10:54:23 +0000 Subject: [PATCH 02/38] chore(main): release 12.1.0 (#637) Co-authored-by: github-actions[bot] <41898282+github-actions[bot]@users.noreply.github.com> --- CHANGELOG.md | 13 +++++++++++++ pyproject.toml | 2 +- release-manifest.json | 2 +- 3 files changed, 15 insertions(+), 2 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index dfad02fac..5af52788d 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,18 @@ # Changelog +## [12.1.0](https://github.com/uktrade/platform-tools/compare/12.0.2...12.1.0) (2024-11-21) + + +### Features + +* DBTP-1380 Get Opensearch/Redis versions from AWS API - Platform-tools changes/Caching of AWS API calls ([#624](https://github.com/uktrade/platform-tools/issues/624)) ([72d0dd7](https://github.com/uktrade/platform-tools/commit/72d0dd70396a4632e5cb5b1f6c80b2df772a89ad)) +* DBTP-1434 - CDN cache policy ([#642](https://github.com/uktrade/platform-tools/issues/642)) ([8cc2c0c](https://github.com/uktrade/platform-tools/commit/8cc2c0caf137889115c9d84c1c9895dae2a808c9)) + + +### Reverts + +* DBTP-1520 refactor conduit command ([#647](https://github.com/uktrade/platform-tools/issues/647)) ([7b56c5e](https://github.com/uktrade/platform-tools/commit/7b56c5e1a4324fbfb2585877dd38c4857c1544cc)) + ## [12.0.2](https://github.com/uktrade/platform-tools/compare/12.0.1...12.0.2) (2024-11-13) diff --git a/pyproject.toml b/pyproject.toml index 9bec6379d..e54c7d560 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -3,7 +3,7 @@ line-length = 100 [tool.poetry] name = "dbt-platform-helper" -version = "12.0.2" +version = "12.1.0" description = "Set of tools to help transfer applications/services from GOV.UK PaaS to DBT PaaS augmenting AWS Copilot." authors = ["Department for Business and Trade Platform Team "] license = "MIT" diff --git a/release-manifest.json b/release-manifest.json index 0c09e4517..ca3a51140 100644 --- a/release-manifest.json +++ b/release-manifest.json @@ -1,3 +1,3 @@ { - ".": "12.0.2" + ".": "12.1.0" } From b71d0254c26f0b8cb008ca6648919630874488cc Mon Sep 17 00:00:00 2001 From: Will Gibson <8738245+WillGibson@users.noreply.github.com> Date: Thu, 21 Nov 2024 13:11:30 +0000 Subject: [PATCH 03/38] refactor: DBTP-1520 Conduit command refactor (replay) (#649) Co-authored-by: Anthoni Gleeson --- README.md | 17 +- dbt_platform_helper/COMMANDS.md | 9 +- dbt_platform_helper/commands/conduit.py | 437 +------ dbt_platform_helper/domain/conduit.py | 171 +++ dbt_platform_helper/providers/__init__.py | 0 dbt_platform_helper/providers/aws.py | 43 + .../providers/cloudformation.py | 105 ++ dbt_platform_helper/providers/copilot.py | 268 +++++ tests/platform_helper/conftest.py | 7 + tests/platform_helper/domain/test_conduit.py | 292 +++++ .../fixtures/test_cloudformation_template.yml | 3 + tests/platform_helper/providers/test_aws.py | 97 ++ .../providers/test_cloudformation.py | 155 +++ .../platform_helper/providers/test_copilot.py | 731 ++++++++++++ tests/platform_helper/test_command_conduit.py | 311 ++--- tests/platform_helper/test_conduit_helpers.py | 1038 ----------------- 16 files changed, 1991 insertions(+), 1693 deletions(-) create mode 100644 dbt_platform_helper/domain/conduit.py create mode 100644 dbt_platform_helper/providers/__init__.py create mode 100644 dbt_platform_helper/providers/aws.py create mode 100644 dbt_platform_helper/providers/cloudformation.py create mode 100644 dbt_platform_helper/providers/copilot.py create mode 100644 tests/platform_helper/domain/test_conduit.py create mode 100644 tests/platform_helper/providers/test_aws.py create mode 100644 tests/platform_helper/providers/test_cloudformation.py create mode 100644 tests/platform_helper/providers/test_copilot.py delete mode 100644 tests/platform_helper/test_conduit_helpers.py diff --git a/README.md b/README.md index 65547e986..4b6f1455e 100644 --- a/README.md +++ b/README.md @@ -69,7 +69,9 @@ We use [Codecov](https://app.codecov.io/github/uktrade/platform-tools) to monito You may want to test any CLI changes locally. -Run `poetry build` to build your package resulting in a package file (e.g. `dbt_platform_tools-0.1.40.tar.gz`) in a `dist` folder. You may need to bump up the package version before doing so. +##### Option 1 - Build and install `platform-helper` from your local source code + +Run `poetry build` to build your package resulting in a package file (e.g. `dbt_platform_tools-0.1.40.tar.gz`) in a `dist` folder. You may need to bump up the package version before doing so. To bump the version go to `pyproject.toml/version` Copy the package file(s) to the directory where you would like to test your changes, and make sure you are in a virtual environment. Run `platform-helper --version` to check the installed package version (e.g. `0.1.39`). @@ -81,6 +83,19 @@ Run `pip install ` and confirm the installation has worked by running `pla > [!IMPORTANT] > When testing is complete, do not forget to revert the `dbt-platform-helper` installation back to what it was; e.g. `pip install dbt-platform-helper==0.1.39`. +##### Option 2 - Run the python files directly. + +This assumes that the virtual python environment where you are running them from already has the dependencies installed and the directory is at the same level as your platform-tools directory. + +Example usage: + +``` +# From -deploy + +../platform-tools/platform_helper.py +``` + + #### End to end testing Because this codebase is only fully exercised in conjunction with several others, we have [platform-end-to-end-tests](https://github.com/uktrade/platform-end-to-end-tests), which orchestrates the testing of them working together. diff --git a/dbt_platform_helper/COMMANDS.md b/dbt_platform_helper/COMMANDS.md index f6370d576..fd2330c87 100644 --- a/dbt_platform_helper/COMMANDS.md +++ b/dbt_platform_helper/COMMANDS.md @@ -256,7 +256,8 @@ platform-helper codebase deploy --app --env --codeba [↩ Parent](#platform-helper) - Create a conduit connection to an addon. + Opens a shell for a given addon_name create a conduit connection to + interact with postgres, opensearch or redis. ## Usage @@ -272,11 +273,11 @@ platform-helper conduit ## Options - `--app ` - - AWS application name + - Application name - `--env ` - - AWS environment name + - Environment name - `--access ` _Defaults to read._ - - Allow write or admin access to database addons + - Allow read, write or admin access to the database addons. - `--help ` _Defaults to False._ - Show this message and exit. diff --git a/dbt_platform_helper/commands/conduit.py b/dbt_platform_helper/commands/conduit.py index 26673c53e..fb933e608 100644 --- a/dbt_platform_helper/commands/conduit.py +++ b/dbt_platform_helper/commands/conduit.py @@ -1,453 +1,70 @@ -import json -import random -import string -import subprocess -import time - import click -from botocore.exceptions import ClientError -from cfn_tools import dump_yaml -from cfn_tools import load_yaml -from dbt_platform_helper.utils.application import Application +from dbt_platform_helper.domain.conduit import Conduit +from dbt_platform_helper.providers.aws import SecretNotFoundError +from dbt_platform_helper.providers.copilot import CONDUIT_ADDON_TYPES +from dbt_platform_helper.providers.copilot import AddonNotFoundError +from dbt_platform_helper.providers.copilot import CreateTaskTimeoutError +from dbt_platform_helper.providers.copilot import InvalidAddonTypeError +from dbt_platform_helper.providers.copilot import NoClusterError +from dbt_platform_helper.providers.copilot import ParameterNotFoundError from dbt_platform_helper.utils.application import load_application -from dbt_platform_helper.utils.aws import ( - get_postgres_connection_data_updated_with_master_secret, -) from dbt_platform_helper.utils.click import ClickDocOptCommand -from dbt_platform_helper.utils.messages import abort_with_error from dbt_platform_helper.utils.versioning import ( check_platform_helper_version_needs_update, ) - -class ConduitError(Exception): - pass - - -class InvalidAddonTypeConduitError(ConduitError): - def __init__(self, addon_type): - self.addon_type = addon_type - - -class NoClusterConduitError(ConduitError): - pass - - -class SecretNotFoundConduitError(ConduitError): - pass - - -class CreateTaskTimeoutConduitError(ConduitError): - pass - - -class ParameterNotFoundConduitError(ConduitError): - pass - - -class AddonNotFoundConduitError(ConduitError): - pass - - -CONDUIT_DOCKER_IMAGE_LOCATION = "public.ecr.aws/uktrade/tunnel" -CONDUIT_ADDON_TYPES = [ - "opensearch", - "postgres", - "redis", -] CONDUIT_ACCESS_OPTIONS = ["read", "write", "admin"] -def normalise_secret_name(addon_name: str) -> str: - return addon_name.replace("-", "_").upper() - - -def get_addon_type(app: Application, env: str, addon_name: str) -> str: - session = app.environments[env].session - ssm_client = session.client("ssm") - addon_type = None - - try: - addon_config = json.loads( - ssm_client.get_parameter( - Name=f"/copilot/applications/{app.name}/environments/{env}/addons" - )["Parameter"]["Value"] - ) - except ssm_client.exceptions.ParameterNotFound: - raise ParameterNotFoundConduitError - - if addon_name not in addon_config.keys(): - raise AddonNotFoundConduitError - - for name, config in addon_config.items(): - if name == addon_name: - addon_type = config["type"] - - if not addon_type or addon_type not in CONDUIT_ADDON_TYPES: - raise InvalidAddonTypeConduitError(addon_type) - - if "postgres" in addon_type: - addon_type = "postgres" - - return addon_type - - -def get_parameter_name( - app: Application, env: str, addon_type: str, addon_name: str, access: str -) -> str: - if addon_type == "postgres": - return f"/copilot/{app.name}/{env}/conduits/{normalise_secret_name(addon_name)}_{access.upper()}" - elif addon_type == "redis" or addon_type == "opensearch": - return f"/copilot/{app.name}/{env}/conduits/{normalise_secret_name(addon_name)}_ENDPOINT" - else: - return f"/copilot/{app.name}/{env}/conduits/{normalise_secret_name(addon_name)}" - - -def get_or_create_task_name( - app: Application, env: str, addon_name: str, parameter_name: str -) -> str: - ssm = app.environments[env].session.client("ssm") - - try: - return ssm.get_parameter(Name=parameter_name)["Parameter"]["Value"] - except ssm.exceptions.ParameterNotFound: - random_id = "".join(random.choices(string.ascii_lowercase + string.digits, k=12)) - return f"conduit-{app.name}-{env}-{addon_name}-{random_id}" - - -def get_cluster_arn(app: Application, env: str) -> str: - ecs_client = app.environments[env].session.client("ecs") - - for cluster_arn in ecs_client.list_clusters()["clusterArns"]: - tags_response = ecs_client.list_tags_for_resource(resourceArn=cluster_arn) - tags = tags_response["tags"] - - app_key_found = False - env_key_found = False - cluster_key_found = False - - for tag in tags: - if tag["key"] == "copilot-application" and tag["value"] == app.name: - app_key_found = True - if tag["key"] == "copilot-environment" and tag["value"] == env: - env_key_found = True - if tag["key"] == "aws:cloudformation:logical-id" and tag["value"] == "Cluster": - cluster_key_found = True - - if app_key_found and env_key_found and cluster_key_found: - return cluster_arn - - raise NoClusterConduitError - - -def get_connection_secret_arn(app: Application, env: str, secret_name: str) -> str: - secrets_manager = app.environments[env].session.client("secretsmanager") - ssm = app.environments[env].session.client("ssm") - - try: - return ssm.get_parameter(Name=secret_name, WithDecryption=False)["Parameter"]["ARN"] - except ssm.exceptions.ParameterNotFound: - pass - - try: - return secrets_manager.describe_secret(SecretId=secret_name)["ARN"] - except secrets_manager.exceptions.ResourceNotFoundException: - pass - - raise SecretNotFoundConduitError(secret_name) - - -def create_postgres_admin_task( - app: Application, env: str, secret_name: str, task_name: str, addon_type: str, addon_name: str -): - session = app.environments[env].session - read_only_secret_name = secret_name + "_READ_ONLY_USER" - master_secret_name = ( - f"/copilot/{app.name}/{env}/secrets/{normalise_secret_name(addon_name)}_RDS_MASTER_ARN" - ) - master_secret_arn = session.client("ssm").get_parameter( - Name=master_secret_name, WithDecryption=True - )["Parameter"]["Value"] - connection_string = json.dumps( - get_postgres_connection_data_updated_with_master_secret( - session, read_only_secret_name, master_secret_arn - ) - ) - - subprocess.call( - f"copilot task run --app {app.name} --env {env} " - f"--task-group-name {task_name} " - f"--image {CONDUIT_DOCKER_IMAGE_LOCATION}:{addon_type} " - f"--env-vars CONNECTION_SECRET='{connection_string}' " - "--platform-os linux " - "--platform-arch arm64", - shell=True, - ) - - -def create_addon_client_task( - app: Application, - env: str, - addon_type: str, - addon_name: str, - task_name: str, - access: str, -): - secret_name = f"/copilot/{app.name}/{env}/secrets/{normalise_secret_name(addon_name)}" - session = app.environments[env].session - - if addon_type == "postgres": - if access == "read": - secret_name += "_READ_ONLY_USER" - elif access == "write": - secret_name += "_APPLICATION_USER" - elif access == "admin": - create_postgres_admin_task(app, env, secret_name, task_name, addon_type, addon_name) - return - elif addon_type == "redis" or addon_type == "opensearch": - secret_name += "_ENDPOINT" - - role_name = f"{addon_name}-{app.name}-{env}-conduitEcsTask" - - try: - session.client("iam").get_role(RoleName=role_name) - execution_role = f"--execution-role {role_name} " - except ClientError as ex: - execution_role = "" - # We cannot check for botocore.errorfactory.NoSuchEntityException as botocore generates that class on the fly as part of errorfactory. - # factory. Checking the error code is the recommended way of handling these exceptions. - if ex.response.get("Error", {}).get("Code", None) != "NoSuchEntity": - abort_with_error( - f"cannot obtain Role {role_name}: {ex.response.get('Error', {}).get('Message', '')}" - ) - - subprocess.call( - f"copilot task run --app {app.name} --env {env} " - f"--task-group-name {task_name} " - f"{execution_role}" - f"--image {CONDUIT_DOCKER_IMAGE_LOCATION}:{addon_type} " - f"--secrets CONNECTION_SECRET={get_connection_secret_arn(app, env, secret_name)} " - "--platform-os linux " - "--platform-arch arm64", - shell=True, - ) - - -def addon_client_is_running(app: Application, env: str, cluster_arn: str, task_name: str) -> bool: - ecs_client = app.environments[env].session.client("ecs") - - tasks = ecs_client.list_tasks( - cluster=cluster_arn, - desiredStatus="RUNNING", - family=f"copilot-{task_name}", - ) - - if not tasks["taskArns"]: - return False - - described_tasks = ecs_client.describe_tasks(cluster=cluster_arn, tasks=tasks["taskArns"]) - - # The ExecuteCommandAgent often takes longer to start running than the task and without the - # agent it's not possible to exec into a task. - for task in described_tasks["tasks"]: - for container in task["containers"]: - for agent in container["managedAgents"]: - if agent["name"] == "ExecuteCommandAgent" and agent["lastStatus"] == "RUNNING": - return True - - return False - - -def connect_to_addon_client_task(app: Application, env: str, cluster_arn: str, task_name: str): - tries = 0 - running = False - - while tries < 15 and not running: - tries += 1 - - if addon_client_is_running(app, env, cluster_arn, task_name): - running = True - subprocess.call( - "copilot task exec " - f"--app {app.name} --env {env} " - f"--name {task_name} " - f"--command bash", - shell=True, - ) - - time.sleep(1) - - if not running: - raise CreateTaskTimeoutConduitError - - -def add_stack_delete_policy_to_task_role(app: Application, env: str, task_name: str): - session = app.environments[env].session - cloudformation_client = session.client("cloudformation") - iam_client = session.client("iam") - - conduit_stack_name = f"task-{task_name}" - conduit_stack_resources = cloudformation_client.list_stack_resources( - StackName=conduit_stack_name - )["StackResourceSummaries"] - - for resource in conduit_stack_resources: - if resource["LogicalResourceId"] == "DefaultTaskRole": - task_role_name = resource["PhysicalResourceId"] - iam_client.put_role_policy( - RoleName=task_role_name, - PolicyName="DeleteCloudFormationStack", - PolicyDocument=json.dumps( - { - "Version": "2012-10-17", - "Statement": [ - { - "Action": ["cloudformation:DeleteStack"], - "Effect": "Allow", - "Resource": f"arn:aws:cloudformation:*:*:stack/{conduit_stack_name}/*", - }, - ], - }, - ), - ) - - -def update_conduit_stack_resources( - app: Application, - env: str, - addon_type: str, - addon_name: str, - task_name: str, - parameter_name: str, - access: str, -): - session = app.environments[env].session - cloudformation_client = session.client("cloudformation") - - conduit_stack_name = f"task-{task_name}" - template = cloudformation_client.get_template(StackName=conduit_stack_name) - template_yml = load_yaml(template["TemplateBody"]) - template_yml["Resources"]["LogGroup"]["DeletionPolicy"] = "Retain" - template_yml["Resources"]["TaskNameParameter"] = load_yaml( - f""" - Type: AWS::SSM::Parameter - Properties: - Name: {parameter_name} - Type: String - Value: {task_name} - """ - ) - - iam_client = session.client("iam") - log_filter_role_arn = iam_client.get_role(RoleName="CWLtoSubscriptionFilterRole")["Role"]["Arn"] - - ssm_client = session.client("ssm") - destination_log_group_arns = json.loads( - ssm_client.get_parameter(Name="/copilot/tools/central_log_groups")["Parameter"]["Value"] - ) - - destination_arn = destination_log_group_arns["dev"] - if env.lower() in ("prod", "production"): - destination_arn = destination_log_group_arns["prod"] - - template_yml["Resources"]["SubscriptionFilter"] = load_yaml( - f""" - Type: AWS::Logs::SubscriptionFilter - DeletionPolicy: Retain - Properties: - RoleArn: {log_filter_role_arn} - LogGroupName: /copilot/{task_name} - FilterName: /copilot/conduit/{app.name}/{env}/{addon_type}/{addon_name}/{task_name.rsplit("-", 1)[1]}/{access} - FilterPattern: '' - DestinationArn: {destination_arn} - """ - ) - - params = [] - if "Parameters" in template_yml: - for param in template_yml["Parameters"]: - params.append({"ParameterKey": param, "UsePreviousValue": True}) - - cloudformation_client.update_stack( - StackName=conduit_stack_name, - TemplateBody=dump_yaml(template_yml), - Parameters=params, - Capabilities=["CAPABILITY_IAM"], - ) - - -def start_conduit( - application: Application, - env: str, - addon_type: str, - addon_name: str, - access: str = "read", -): - cluster_arn = get_cluster_arn(application, env) - parameter_name = get_parameter_name(application, env, addon_type, addon_name, access) - task_name = get_or_create_task_name(application, env, addon_name, parameter_name) - - if not addon_client_is_running(application, env, cluster_arn, task_name): - create_addon_client_task(application, env, addon_type, addon_name, task_name, access) - add_stack_delete_policy_to_task_role(application, env, task_name) - update_conduit_stack_resources( - application, env, addon_type, addon_name, task_name, parameter_name, access - ) - - connect_to_addon_client_task(application, env, cluster_arn, task_name) - - @click.command(cls=ClickDocOptCommand) @click.argument("addon_name", type=str, required=True) -@click.option("--app", help="AWS application name", required=True) -@click.option("--env", help="AWS environment name", required=True) +@click.option("--app", help="Application name", required=True) +@click.option("--env", help="Environment name", required=True) @click.option( "--access", default="read", type=click.Choice(CONDUIT_ACCESS_OPTIONS), - help="Allow write or admin access to database addons", + help="Allow read, write or admin access to the database addons.", ) def conduit(addon_name: str, app: str, env: str, access: str): - """Create a conduit connection to an addon.""" + """Opens a shell for a given addon_name create a conduit connection to + interact with postgres, opensearch or redis.""" check_platform_helper_version_needs_update() application = load_application(app) try: - addon_type = get_addon_type(application, env, addon_name) - except ParameterNotFoundConduitError: + Conduit(application).start(env, addon_name, access) + except NoClusterError: + click.secho(f"""No ECS cluster found for "{app}" in "{env}" environment.""", fg="red") + exit(1) + except SecretNotFoundError as err: click.secho( - f"""No parameter called "/copilot/applications/{app}/environments/{env}/addons". Try deploying the "{app}" "{env}" environment.""", + f"""No secret called "{err}" for "{app}" in "{env}" environment.""", fg="red", ) exit(1) - except AddonNotFoundConduitError: + except CreateTaskTimeoutError: click.secho( - f"""Addon "{addon_name}" does not exist.""", + f"""Client ({addon_name}) ECS task has failed to start for "{app}" in "{env}" environment.""", fg="red", ) exit(1) - except InvalidAddonTypeConduitError as err: + except ParameterNotFoundError: click.secho( - f"""Addon type "{err.addon_type}" is not supported, we support: {", ".join(CONDUIT_ADDON_TYPES)}.""", + f"""No parameter called "/copilot/applications/{app}/environments/{env}/addons". Try deploying the "{app}" "{env}" environment.""", fg="red", ) exit(1) - - try: - start_conduit(application, env, addon_type, addon_name, access) - except NoClusterConduitError: - click.secho(f"""No ECS cluster found for "{app}" in "{env}" environment.""", fg="red") - exit(1) - except SecretNotFoundConduitError as err: + except AddonNotFoundError: click.secho( - f"""No secret called "{err}" for "{app}" in "{env}" environment.""", + f"""Addon "{addon_name}" does not exist.""", fg="red", ) exit(1) - except CreateTaskTimeoutConduitError: + except InvalidAddonTypeError as err: click.secho( - f"""Client ({addon_name}) ECS task has failed to start for "{app}" in "{env}" environment.""", + f"""Addon type "{err.addon_type}" is not supported, we support: {", ".join(CONDUIT_ADDON_TYPES)}.""", fg="red", ) exit(1) diff --git a/dbt_platform_helper/domain/conduit.py b/dbt_platform_helper/domain/conduit.py new file mode 100644 index 000000000..a7954210d --- /dev/null +++ b/dbt_platform_helper/domain/conduit.py @@ -0,0 +1,171 @@ +import subprocess +from collections.abc import Callable + +import click + +from dbt_platform_helper.providers.cloudformation import ( + add_stack_delete_policy_to_task_role, +) +from dbt_platform_helper.providers.cloudformation import update_conduit_stack_resources +from dbt_platform_helper.providers.cloudformation import ( + wait_for_cloudformation_to_reach_status, +) +from dbt_platform_helper.providers.copilot import addon_client_is_running +from dbt_platform_helper.providers.copilot import connect_to_addon_client_task +from dbt_platform_helper.providers.copilot import create_addon_client_task +from dbt_platform_helper.providers.copilot import create_postgres_admin_task +from dbt_platform_helper.providers.copilot import get_addon_type +from dbt_platform_helper.providers.copilot import get_cluster_arn +from dbt_platform_helper.providers.copilot import get_or_create_task_name +from dbt_platform_helper.providers.copilot import get_parameter_name +from dbt_platform_helper.utils.application import Application + + +class Conduit: + def __init__( + self, + application: Application, + echo_fn: Callable[[str], str] = click.secho, + subprocess_fn: subprocess = subprocess, + addon_client_is_running_fn=addon_client_is_running, + connect_to_addon_client_task_fn=connect_to_addon_client_task, + create_addon_client_task_fn=create_addon_client_task, + create_postgres_admin_task_fn=create_postgres_admin_task, + get_addon_type_fn=get_addon_type, + get_cluster_arn_fn=get_cluster_arn, + get_parameter_name_fn=get_parameter_name, + get_or_create_task_name_fn=get_or_create_task_name, + add_stack_delete_policy_to_task_role_fn=add_stack_delete_policy_to_task_role, + update_conduit_stack_resources_fn=update_conduit_stack_resources, + wait_for_cloudformation_to_reach_status_fn=wait_for_cloudformation_to_reach_status, + ): + + self.application = application + self.subprocess_fn = subprocess_fn + self.echo_fn = echo_fn + self.addon_client_is_running_fn = addon_client_is_running_fn + self.connect_to_addon_client_task_fn = connect_to_addon_client_task_fn + self.create_addon_client_task_fn = create_addon_client_task_fn + self.create_postgres_admin_task = create_postgres_admin_task_fn + self.get_addon_type_fn = get_addon_type_fn + self.get_cluster_arn_fn = get_cluster_arn_fn + self.get_parameter_name_fn = get_parameter_name_fn + self.get_or_create_task_name_fn = get_or_create_task_name_fn + self.add_stack_delete_policy_to_task_role_fn = add_stack_delete_policy_to_task_role_fn + self.update_conduit_stack_resources_fn = update_conduit_stack_resources_fn + self.wait_for_cloudformation_to_reach_status_fn = wait_for_cloudformation_to_reach_status_fn + """ + Initialise a conduit domain which can be used to spin up a conduit + instance to connect to a service. + + Args: + application(Application): an object with the data of the deployed application + subprocess_fn: inject the subprocess function to call and execute shell commands + echo_fn: a function to echo messages too + addon_client_is_running_fn: inject the function which will check if a conduit instance to the addon is running + connect_to_addon_client_task_fn: inject the function used to connect to the conduit instance, + create_addon_client_task_fn: inject the function used to create the conduit task to connect too + create_postgres_admin_task_fn: inject the function used to create the conduit task with admin access to postgres + get_addon_type_fn=get_addon_type: inject the function used to get the addon type from addon name + get_cluster_arn_fn: inject the function used to get the cluster arn from the application name and environment + get_parameter_name_fn: inject the function used to get the parameter name from the application and addon + get_or_create_task_name_fn: inject the function used to get an existing conduit task or generate a new task + add_stack_delete_policy_to_task_role_fn: inject the function used to create the delete task permission in cloudformation + update_conduit_stack_resources_fn: inject the function used to add the conduit instance into the cloudformation stack + wait_for_cloudformation_to_reach_status_fn: inject waiter function for cloudformation + """ + + def start(self, env: str, addon_name: str, access: str = "read"): + clients = self._initialise_clients(env) + addon_type, cluster_arn, parameter_name, task_name = self._get_addon_details( + env, addon_name, access + ) + + if not self.addon_client_is_running_fn(clients["ecs"], cluster_arn, task_name): + self.echo_fn("Creating conduit task") + self.create_addon_client_task_fn( + clients["iam"], + clients["ssm"], + clients["secrets_manager"], + self.subprocess_fn, + self.application, + env, + addon_type, + addon_name, + task_name, + access, + ) + + self.echo_fn("Updating conduit task") + self._update_stack_resources( + clients["cloudformation"], + clients["iam"], + clients["ssm"], + self.application.name, + env, + addon_type, + addon_name, + task_name, + parameter_name, + access, + ) + + self.echo_fn("Connecting to conduit task") + self.connect_to_addon_client_task_fn( + clients["ecs"], self.subprocess_fn, self.application.name, env, cluster_arn, task_name + ) + + def _initialise_clients(self, env): + return { + "ecs": self.application.environments[env].session.client("ecs"), + "iam": self.application.environments[env].session.client("iam"), + "ssm": self.application.environments[env].session.client("ssm"), + "cloudformation": self.application.environments[env].session.client("cloudformation"), + "secrets_manager": self.application.environments[env].session.client("secretsmanager"), + } + + def _get_addon_details(self, env, addon_name, access): + ssm_client = self.application.environments[env].session.client("ssm") + ecs_client = self.application.environments[env].session.client("ecs") + + addon_type = self.get_addon_type_fn(ssm_client, self.application.name, env, addon_name) + cluster_arn = self.get_cluster_arn_fn(ecs_client, self.application.name, env) + parameter_name = self.get_parameter_name_fn( + self.application.name, env, addon_type, addon_name, access + ) + task_name = self.get_or_create_task_name_fn( + ssm_client, self.application.name, env, addon_name, parameter_name + ) + + return addon_type, cluster_arn, parameter_name, task_name + + def _update_stack_resources( + self, + cloudformation_client, + iam_client, + ssm_client, + app_name, + env, + addon_type, + addon_name, + task_name, + parameter_name, + access, + ): + self.add_stack_delete_policy_to_task_role_fn(cloudformation_client, iam_client, task_name) + stack_name = self.update_conduit_stack_resources_fn( + cloudformation_client, + iam_client, + ssm_client, + app_name, + env, + addon_type, + addon_name, + task_name, + parameter_name, + access, + ) + self.echo_fn("Waiting for conduit task update to complete...") + self.wait_for_cloudformation_to_reach_status_fn( + cloudformation_client, "stack_update_complete", stack_name + ) diff --git a/dbt_platform_helper/providers/__init__.py b/dbt_platform_helper/providers/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/dbt_platform_helper/providers/aws.py b/dbt_platform_helper/providers/aws.py new file mode 100644 index 000000000..67f051b61 --- /dev/null +++ b/dbt_platform_helper/providers/aws.py @@ -0,0 +1,43 @@ +import json +import urllib + + +class AWSError(Exception): + pass + + +class SecretNotFoundError(AWSError): + pass + + +# TODO Attempt to extract speicifc conduit business logic and leave the AWS specific functionality in provider layer +def get_postgres_connection_data_updated_with_master_secret( + ssm_client, secrets_manager_client, parameter_name, secret_arn +): + response = ssm_client.get_parameter(Name=parameter_name, WithDecryption=True) + parameter_value = response["Parameter"]["Value"] + + parameter_data = json.loads(parameter_value) + + secret_response = secrets_manager_client.get_secret_value(SecretId=secret_arn) + secret_value = json.loads(secret_response["SecretString"]) + + parameter_data["username"] = urllib.parse.quote(secret_value["username"]) + parameter_data["password"] = urllib.parse.quote(secret_value["password"]) + + return parameter_data + + +def get_connection_secret_arn(ssm_client, secrets_manager_client, secret_name: str) -> str: + + try: + return ssm_client.get_parameter(Name=secret_name, WithDecryption=False)["Parameter"]["ARN"] + except ssm_client.exceptions.ParameterNotFound: + pass + + try: + return secrets_manager_client.describe_secret(SecretId=secret_name)["ARN"] + except secrets_manager_client.exceptions.ResourceNotFoundException: + pass + + raise SecretNotFoundError(secret_name) diff --git a/dbt_platform_helper/providers/cloudformation.py b/dbt_platform_helper/providers/cloudformation.py new file mode 100644 index 000000000..b56befaa5 --- /dev/null +++ b/dbt_platform_helper/providers/cloudformation.py @@ -0,0 +1,105 @@ +import json + +from cfn_tools import dump_yaml +from cfn_tools import load_yaml + + +def add_stack_delete_policy_to_task_role(cloudformation_client, iam_client, task_name: str): + + stack_name = f"task-{task_name}" + stack_resources = cloudformation_client.list_stack_resources(StackName=stack_name)[ + "StackResourceSummaries" + ] + + for resource in stack_resources: + if resource["LogicalResourceId"] == "DefaultTaskRole": + task_role_name = resource["PhysicalResourceId"] + iam_client.put_role_policy( + RoleName=task_role_name, + PolicyName="DeleteCloudFormationStack", + PolicyDocument=json.dumps( + { + "Version": "2012-10-17", + "Statement": [ + { + "Action": ["cloudformation:DeleteStack"], + "Effect": "Allow", + "Resource": f"arn:aws:cloudformation:*:*:stack/{stack_name}/*", + }, + ], + }, + ), + ) + + +def update_conduit_stack_resources( + cloudformation_client, + iam_client, + ssm_client, + application_name: str, + env: str, + addon_type: str, + addon_name: str, + task_name: str, + parameter_name: str, + access: str, +): + + conduit_stack_name = f"task-{task_name}" + template = cloudformation_client.get_template(StackName=conduit_stack_name) + template_yml = load_yaml(template["TemplateBody"]) + template_yml["Resources"]["LogGroup"]["DeletionPolicy"] = "Retain" + template_yml["Resources"]["TaskNameParameter"] = load_yaml( + f""" + Type: AWS::SSM::Parameter + Properties: + Name: {parameter_name} + Type: String + Value: {task_name} + """ + ) + + log_filter_role_arn = iam_client.get_role(RoleName="CWLtoSubscriptionFilterRole")["Role"]["Arn"] + + destination_log_group_arns = json.loads( + ssm_client.get_parameter(Name="/copilot/tools/central_log_groups")["Parameter"]["Value"] + ) + + destination_arn = destination_log_group_arns["dev"] + if env.lower() in ("prod", "production"): + destination_arn = destination_log_group_arns["prod"] + + template_yml["Resources"]["SubscriptionFilter"] = load_yaml( + f""" + Type: AWS::Logs::SubscriptionFilter + DeletionPolicy: Retain + Properties: + RoleArn: {log_filter_role_arn} + LogGroupName: /copilot/{task_name} + FilterName: /copilot/conduit/{application_name}/{env}/{addon_type}/{addon_name}/{task_name.rsplit("-", 1)[1]}/{access} + FilterPattern: '' + DestinationArn: {destination_arn} + """ + ) + + params = [] + if "Parameters" in template_yml: + for param in template_yml["Parameters"]: + # TODO testing missed in codecov, update test to assert on method call below with params including ExistingParameter from cloudformation template. + params.append({"ParameterKey": param, "UsePreviousValue": True}) + + cloudformation_client.update_stack( + StackName=conduit_stack_name, + TemplateBody=dump_yaml(template_yml), + Parameters=params, + Capabilities=["CAPABILITY_IAM"], + ) + + return conduit_stack_name + + +# TODO opportunity to add error handling if cloudformation stack goes into rollback e.g. botocore.exceptions.WaiterError: Waiter StackUpdateComplete failed: Waiter encountered a terminal failure state: For expression "Stacks[].StackStatus" we matched expected path: "UPDATE_ROLLBACK_COMPLETE" at least once +def wait_for_cloudformation_to_reach_status(cloudformation_client, stack_status, stack_name): + + waiter = cloudformation_client.get_waiter(stack_status) + waiter.wait(StackName=stack_name, WaiterConfig={"Delay": 5, "MaxAttempts": 20}) diff --git a/dbt_platform_helper/providers/copilot.py b/dbt_platform_helper/providers/copilot.py new file mode 100644 index 000000000..7fa0bf430 --- /dev/null +++ b/dbt_platform_helper/providers/copilot.py @@ -0,0 +1,268 @@ +import json +import random +import string +import time + +import click +from botocore.exceptions import ClientError + +from dbt_platform_helper.providers.aws import AWSError +from dbt_platform_helper.providers.aws import get_connection_secret_arn +from dbt_platform_helper.providers.aws import ( + get_postgres_connection_data_updated_with_master_secret, +) +from dbt_platform_helper.utils.application import Application +from dbt_platform_helper.utils.messages import abort_with_error + +# TODO move to constants +CONDUIT_DOCKER_IMAGE_LOCATION = "public.ecr.aws/uktrade/tunnel" +CONDUIT_ADDON_TYPES = [ + "opensearch", + "postgres", + "redis", +] + + +class NoClusterError(AWSError): + pass + + +class CreateTaskTimeoutError(AWSError): + pass + + +class ParameterNotFoundError(AWSError): + pass + + +class AddonNotFoundError(AWSError): + pass + + +class AddonTypeMissingFromConfigError(AWSError): + pass + + +class InvalidAddonTypeError(AWSError): + def __init__(self, addon_type): + self.addon_type = addon_type + + +def get_addon_type(ssm_client, application_name: str, env: str, addon_name: str) -> str: + addon_type = None + try: + addon_config = json.loads( + ssm_client.get_parameter( + Name=f"/copilot/applications/{application_name}/environments/{env}/addons" + )["Parameter"]["Value"] + ) + except ssm_client.exceptions.ParameterNotFound: + raise ParameterNotFoundError + + if addon_name not in addon_config.keys(): + raise AddonNotFoundError + + for name, config in addon_config.items(): + if name == addon_name: + if not config.get("type"): + raise AddonTypeMissingFromConfigError() + addon_type = config["type"] + + if addon_type not in CONDUIT_ADDON_TYPES: + raise InvalidAddonTypeError(addon_type) + + if "postgres" in addon_type: + addon_type = "postgres" + + return addon_type + + +# TODO Refactor this to support passing a list of tags to check against, allowing for a more generic implementation +def get_cluster_arn(ecs_client, application_name: str, env: str) -> str: + + for cluster_arn in ecs_client.list_clusters()["clusterArns"]: + tags_response = ecs_client.list_tags_for_resource(resourceArn=cluster_arn) + tags = tags_response["tags"] + + app_key_found = False + env_key_found = False + cluster_key_found = False + + for tag in tags: + if tag["key"] == "copilot-application" and tag["value"] == application_name: + app_key_found = True + if tag["key"] == "copilot-environment" and tag["value"] == env: + env_key_found = True + if tag["key"] == "aws:cloudformation:logical-id" and tag["value"] == "Cluster": + cluster_key_found = True + + if app_key_found and env_key_found and cluster_key_found: + return cluster_arn + + raise NoClusterError + + +def get_parameter_name( + application_name: str, env: str, addon_type: str, addon_name: str, access: str +) -> str: + if addon_type == "postgres": + return f"/copilot/{application_name}/{env}/conduits/{normalise_secret_name(addon_name)}_{access.upper()}" + elif addon_type == "redis" or addon_type == "opensearch": + return f"/copilot/{application_name}/{env}/conduits/{normalise_secret_name(addon_name)}_ENDPOINT" + else: + return f"/copilot/{application_name}/{env}/conduits/{normalise_secret_name(addon_name)}" + + +# TODO ECS??? +def get_or_create_task_name( + ssm_client, application_name: str, env: str, addon_name: str, parameter_name: str +) -> str: + try: + return ssm_client.get_parameter(Name=parameter_name)["Parameter"]["Value"] + except ssm_client.exceptions.ParameterNotFound: + random_id = "".join(random.choices(string.ascii_lowercase + string.digits, k=12)) + return f"conduit-{application_name}-{env}-{addon_name}-{random_id}" + + +# TODO ECS method +def addon_client_is_running(ecs_client, cluster_arn: str, task_name: str): + tasks = ecs_client.list_tasks( + cluster=cluster_arn, + desiredStatus="RUNNING", + family=f"copilot-{task_name}", + ) + + if not tasks["taskArns"]: + return False + + return True + + +def create_addon_client_task( + iam_client, + ssm_client, + secrets_manager_client, + subprocess, + application: Application, + env: str, + addon_type: str, + addon_name: str, + task_name: str, + access: str, +): + secret_name = f"/copilot/{application.name}/{env}/secrets/{normalise_secret_name(addon_name)}" + + if addon_type == "postgres": + if access == "read": + secret_name += "_READ_ONLY_USER" + elif access == "write": + secret_name += "_APPLICATION_USER" + elif access == "admin": + create_postgres_admin_task( + ssm_client, + secrets_manager_client, + subprocess, + application, + addon_name, + addon_type, + env, + secret_name, + task_name, + ) + return + elif addon_type == "redis" or addon_type == "opensearch": + secret_name += "_ENDPOINT" + + role_name = f"{addon_name}-{application.name}-{env}-conduitEcsTask" + + try: + iam_client.get_role(RoleName=role_name) + execution_role = f"--execution-role {role_name} " + except ClientError as ex: + execution_role = "" + # We cannot check for botocore.errorfactory.NoSuchEntityException as botocore generates that class on the fly as part of errorfactory. + # factory. Checking the error code is the recommended way of handling these exceptions. + if ex.response.get("Error", {}).get("Code", None) != "NoSuchEntity": + # TODO this should raise an exception and caught at the command layer + abort_with_error( + f"cannot obtain Role {role_name}: {ex.response.get('Error', {}).get('Message', '')}" + ) + + subprocess.call( + f"copilot task run --app {application.name} --env {env} " + f"--task-group-name {task_name} " + f"{execution_role}" + f"--image {CONDUIT_DOCKER_IMAGE_LOCATION}:{addon_type} " + f"--secrets CONNECTION_SECRET={get_connection_secret_arn(ssm_client,secrets_manager_client, secret_name)} " + "--platform-os linux " + "--platform-arch arm64", + shell=True, + ) + + +def normalise_secret_name(addon_name: str) -> str: + return addon_name.replace("-", "_").upper() + + +def create_postgres_admin_task( + ssm_client, + secrets_manager_client, + subprocess, + app: Application, + addon_name: str, + addon_type: str, + env: str, + secret_name: str, + task_name: str, +): + read_only_secret_name = secret_name + "_READ_ONLY_USER" + master_secret_name = ( + f"/copilot/{app.name}/{env}/secrets/{normalise_secret_name(addon_name)}_RDS_MASTER_ARN" + ) + master_secret_arn = ssm_client.get_parameter(Name=master_secret_name, WithDecryption=True)[ + "Parameter" + ]["Value"] + connection_string = json.dumps( + get_postgres_connection_data_updated_with_master_secret( + ssm_client, secrets_manager_client, read_only_secret_name, master_secret_arn + ) + ) + + subprocess.call( + f"copilot task run --app {app.name} --env {env} " + f"--task-group-name {task_name} " + f"--image {CONDUIT_DOCKER_IMAGE_LOCATION}:{addon_type} " + f"--env-vars CONNECTION_SECRET='{connection_string}' " + "--platform-os linux " + "--platform-arch arm64", + shell=True, + ) + + +def connect_to_addon_client_task( + ecs_client, subprocess, application_name, env, cluster_arn, task_name +): + running = False + tries = 0 + while tries < 15 and not running: + tries += 1 + if addon_client_is_running(ecs_client, cluster_arn, task_name): + # TODO user ecs.describe_task to check if exec agent is running before call subprocess + # https://boto3.amazonaws.com/v1/documentation/api/latest/reference/services/ecs/client/describe_tasks.html + try: + subprocess.call( + "copilot task exec " + f"--app {application_name} --env {env} " + f"--name {task_name} " + f"--command bash", + shell=True, + ) + running = True + except ecs_client.exceptions.InvalidParameterException: + # Unable to connect, execute command agent probably isn’t running yet + click.echo("Unable to connect, execute command agent probably isn’t running yet") + + time.sleep(1) + + if not running: + raise CreateTaskTimeoutError diff --git a/tests/platform_helper/conftest.py b/tests/platform_helper/conftest.py index 65324229f..783fb7dc1 100644 --- a/tests/platform_helper/conftest.py +++ b/tests/platform_helper/conftest.py @@ -265,6 +265,12 @@ def validate_version(): @pytest.fixture(scope="function") def mock_stack(): def _create_stack(addon_name): + params = [ + { + "ParameterKey": "ExistingParameter", + "ParameterValue": "does-not-matter", + } + ] with mock_aws(): with open(FIXTURES_DIR / "test_cloudformation_template.yml") as f: template = yaml.safe_load(f) @@ -272,6 +278,7 @@ def _create_stack(addon_name): cf.create_stack( StackName=f"task-{mock_task_name(addon_name)}", TemplateBody=yaml.dump(template), + Parameters=params, ) return _create_stack diff --git a/tests/platform_helper/domain/test_conduit.py b/tests/platform_helper/domain/test_conduit.py new file mode 100644 index 000000000..fc60a5170 --- /dev/null +++ b/tests/platform_helper/domain/test_conduit.py @@ -0,0 +1,292 @@ +from unittest.mock import Mock +from unittest.mock import call + +import pytest + +from dbt_platform_helper.domain.conduit import Conduit +from dbt_platform_helper.providers.aws import SecretNotFoundError +from dbt_platform_helper.providers.copilot import AddonNotFoundError +from dbt_platform_helper.providers.copilot import CreateTaskTimeoutError +from dbt_platform_helper.providers.copilot import InvalidAddonTypeError +from dbt_platform_helper.providers.copilot import NoClusterError +from dbt_platform_helper.providers.copilot import ParameterNotFoundError +from dbt_platform_helper.utils.application import Application +from dbt_platform_helper.utils.application import Environment + +app_name = "failed_app" +addon_name = "important-db" +addon_type = "postgres" +env = "development" +cluster_name = "arn:aws:ecs:eu-west-2:123456789012:cluster/MyECSCluster1" +task_name = "task_name" +addon_name = "custom-name-rds-postgres" + + +class ConduitMocks: + def __init__(self, app_name="test-application", addon_type="postgres", *args, **kwargs): + + session = Mock() + sessions = {"000000000": session} + dummy_application = Application(app_name) + dummy_application.environments = {env: Environment(env, "000000000", sessions)} + self.application = dummy_application + + self.addon_client_is_running_fn = kwargs.get( + "addon_client_is_running_fn", Mock(return_value=False) + ) + self.connect_to_addon_client_task_fn = kwargs.get("connect_to_addon_client_task_fn", Mock()) + self.create_addon_client_task_fn = kwargs.get("create_addon_client_task_fn", Mock()) + self.create_postgres_admin_task_fn = kwargs.get("create_postgres_admin_task_fn", Mock()) + self.get_addon_type_fn = kwargs.get("get_addon_type_fn", Mock(return_value=addon_type)) + self.get_cluster_arn_fn = kwargs.get( + "get_cluster_arn_fn", + Mock(return_value="arn:aws:ecs:eu-west-2:123456789012:cluster/MyECSCluster1"), + ) + self.get_or_create_task_name_fn = kwargs.get( + "get_or_create_task_name_fn", Mock(return_value="task_name") + ) + self.add_stack_delete_policy_to_task_role_fn = kwargs.get( + "add_stack_delete_policy_to_task_role_fn", Mock() + ) + self.update_conduit_stack_resources_fn = kwargs.get( + "update_conduit_stack_resources_fn", Mock(return_value=f"task-{task_name}") + ) + self.wait_for_cloudformation_to_reach_status_fn = kwargs.get( + "wait_for_cloudformation_to_reach_status_fn", Mock() + ) + + self.subprocess = kwargs.get("subprocess", Mock(return_value="task_name")) + self.echo_fn = kwargs.get("echo_fn", Mock()) + self.get_parameter_name_fn = kwargs.get( + "get_parameter_name", Mock(return_value="parameter_name") + ) + + def params(self): + return { + "application": self.application, + "subprocess_fn": self.subprocess, + "echo_fn": self.echo_fn, + "addon_client_is_running_fn": self.addon_client_is_running_fn, + "connect_to_addon_client_task_fn": self.connect_to_addon_client_task_fn, + "create_addon_client_task_fn": self.create_addon_client_task_fn, + "create_postgres_admin_task_fn": self.create_postgres_admin_task_fn, + "get_addon_type_fn": self.get_addon_type_fn, + "get_cluster_arn_fn": self.get_cluster_arn_fn, + "get_or_create_task_name_fn": self.get_or_create_task_name_fn, + "add_stack_delete_policy_to_task_role_fn": self.add_stack_delete_policy_to_task_role_fn, + "update_conduit_stack_resources_fn": self.update_conduit_stack_resources_fn, + "wait_for_cloudformation_to_reach_status_fn": self.wait_for_cloudformation_to_reach_status_fn, + "get_parameter_name_fn": self.get_parameter_name_fn, + } + + +@pytest.mark.parametrize( + "app_name, addon_type, addon_name, access", + [ + ("app_1", "postgres", "custom-name-postgres", "read"), + ("app_2", "postgres", "custom-name-rds-postgres", "read"), + ("app_1", "redis", "custom-name-redis", "read"), + ("app_1", "opensearch", "custom-name-opensearch", "read"), + ], +) +def test_conduit(app_name, addon_type, addon_name, access): + conduit_mocks = ConduitMocks(app_name, addon_type) + conduit = Conduit(**conduit_mocks.params()) + ecs_client = conduit.application.environments[env].session.client("ecs") + ssm_client = conduit.application.environments[env].session.client("ssm") + cloudformation_client = conduit.application.environments[env].session.client("cloudformation") + iam_client = conduit.application.environments[env].session.client("iam") + secretsmanager_client = conduit.application.environments[env].session.client("secretsmanager") + + conduit.start(env, addon_name, access) + + conduit.addon_client_is_running_fn.assert_called_once_with(ecs_client, cluster_name, task_name) + conduit.connect_to_addon_client_task_fn.assert_called_once_with( + ecs_client, conduit.subprocess_fn, app_name, env, cluster_name, task_name + ) + conduit.get_addon_type_fn.assert_called_once_with(ssm_client, app_name, env, addon_name) + conduit.get_cluster_arn_fn.assert_called_once_with(ecs_client, app_name, env) + conduit.get_or_create_task_name_fn.assert_called_once_with( + ssm_client, app_name, env, addon_name, "parameter_name" + ) + + conduit.add_stack_delete_policy_to_task_role_fn.assert_called_once_with( + cloudformation_client, iam_client, task_name + ) + conduit.update_conduit_stack_resources_fn.assert_called_once_with( + cloudformation_client, + iam_client, + ssm_client, + app_name, + env, + addon_type, + addon_name, + task_name, + "parameter_name", + access, + ) + conduit.wait_for_cloudformation_to_reach_status_fn.assert_called_once_with( + cloudformation_client, "stack_update_complete", f"task-{task_name}" + ) + conduit.create_addon_client_task_fn.assert_called_once_with( + iam_client, + ssm_client, + secretsmanager_client, + conduit.subprocess_fn, + conduit.application, + env, + addon_type, + addon_name, + task_name, + access, + ) + + conduit_mocks.echo_fn.assert_has_calls( + [ + call("Creating conduit task"), + call("Updating conduit task"), + call("Waiting for conduit task update to complete..."), + call("Connecting to conduit task"), + ] + ) + + +def test_conduit_client_already_running(): + conduit_mocks = ConduitMocks( + app_name, addon_type, addon_client_is_running_fn=Mock(return_value=True) + ) + conduit = Conduit(**conduit_mocks.params()) + ecs_client = conduit.application.environments[env].session.client("ecs") + ssm_client = conduit.application.environments[env].session.client("ssm") + + conduit.start(env, addon_name, "read") + + conduit.addon_client_is_running_fn.assert_called_once_with(ecs_client, cluster_name, task_name) + conduit.connect_to_addon_client_task_fn.assert_called_once_with( + ecs_client, conduit.subprocess_fn, app_name, env, cluster_name, task_name + ) + conduit.get_addon_type_fn.assert_called_once_with(ssm_client, app_name, env, addon_name) + conduit.get_cluster_arn_fn.assert_called_once_with(ecs_client, app_name, env) + conduit.get_or_create_task_name_fn.assert_called_once_with( + ssm_client, app_name, env, addon_name, "parameter_name" + ) + conduit.add_stack_delete_policy_to_task_role_fn.assert_not_called() + conduit.update_conduit_stack_resources_fn.assert_not_called() + conduit.create_addon_client_task_fn.assert_not_called() + + conduit_mocks.echo_fn.assert_called_once_with("Connecting to conduit task") + + +def test_conduit_domain_when_no_cluster_exists(): + conduit_mocks = ConduitMocks( + app_name, addon_type, get_cluster_arn_fn=Mock(side_effect=NoClusterError()) + ) + conduit = Conduit(**conduit_mocks.params()) + ecs_client = conduit.application.environments[env].session.client("ecs") + ssm_client = conduit.application.environments[env].session.client("ssm") + + with pytest.raises(NoClusterError) as exc: + conduit.start(env, addon_name) + conduit.get_addon_type_fn.assert_called_once_with(ssm_client, app_name, env, addon_name) + conduit.get_cluster_arn_fn.assert_called_once_with(ecs_client, app_name, env) + + +def test_conduit_domain_when_no_connection_secret_exists(): + conduit_mocks = ConduitMocks( + app_name, + addon_type, + addon_client_is_running_fn=Mock(return_value=False), + create_addon_client_task_fn=Mock(side_effect=SecretNotFoundError()), + ) + + conduit = Conduit(**conduit_mocks.params()) + ecs_client = conduit.application.environments[env].session.client("ecs") + ssm_client = conduit.application.environments[env].session.client("ssm") + + with pytest.raises(SecretNotFoundError) as exc: + conduit.start(env, addon_name) + conduit.get_addon_type_fn.assert_called_once_with(ssm_client, app_name, env, addon_name) + conduit.get_cluster_arn_fn.assert_called_once_with(ecs_client, app_name, env) + conduit.get_cluster_arn_fn.assert_called_once_with(ecs_client, app_name, env) + conduit.get_or_create_task_name_fn.assert_called_once_with( + ssm_client, app_name, env, addon_name, "parameter_name" + ) + + +def test_conduit_domain_when_client_task_fails_to_start(): + conduit_mocks = ConduitMocks( + app_name, + addon_type, + connect_to_addon_client_task_fn=Mock(side_effect=CreateTaskTimeoutError()), + ) + conduit = Conduit(**conduit_mocks.params()) + ecs_client = conduit.application.environments[env].session.client("ecs") + ssm_client = conduit.application.environments[env].session.client("ssm") + + with pytest.raises(CreateTaskTimeoutError) as exc: + conduit.start(env, addon_name) + conduit.addon_client_is_running_fn.assert_called_once_with( + ecs_client, cluster_name, task_name + ) + conduit.connect_to_addon_client_task_fn.assert_called_once_with( + ecs_client, conduit.subprocess_fn, app_name, env, cluster_name, task_name + ) + conduit.get_addon_type_fn.assert_called_once_with(ssm_client, app_name, env, addon_name) + conduit.get_cluster_arn_fn.assert_called_once_with(ecs_client, app_name, env) + conduit.get_or_create_task_name_fn.assert_called_once_with( + ssm_client, app_name, env, addon_name, "parameter_name" + ) + conduit.create_addon_client_task_fn.assert_not_called() + conduit.add_stack_delete_policy_to_task_role_fn.assert_not_called() + conduit.update_conduit_stack_resources_fn.assert_not_called() + + +def test_conduit_domain_when_addon_type_is_invalid(): + addon_name = "invalid_addon" + addon_type = "invalid_addon_type" + conduit_mocks = ConduitMocks( + app_name, + addon_type, + get_addon_type_fn=Mock(side_effect=InvalidAddonTypeError(addon_type=addon_type)), + ) + + conduit = Conduit(**conduit_mocks.params()) + ecs_client = conduit.application.environments[env].session.client("ecs") + + with pytest.raises(InvalidAddonTypeError) as exc: + conduit.start(env, addon_name) + conduit.addon_client_is_running_fn.assert_called_once_with( + ecs_client, cluster_name, task_name + ) + + +def test_conduit_domain_when_addon_does_not_exist(): + addon_name = "addon_doesnt_exist" + conduit_mocks = ConduitMocks( + app_name, addon_type, get_addon_type_fn=Mock(side_effect=AddonNotFoundError()) + ) + + conduit = Conduit(**conduit_mocks.params()) + ecs_client = conduit.application.environments[env].session.client("ecs") + + with pytest.raises(AddonNotFoundError) as exc: + conduit.start(env, addon_name) + conduit.addon_client_is_running_fn.assert_called_once_with( + ecs_client, cluster_name, task_name + ) + + +def test_conduit_domain_when_no_addon_config_parameter_exists(): + addon_name = "parameter_doesnt_exist" + conduit_mocks = ConduitMocks( + app_name, addon_type, get_addon_type_fn=Mock(side_effect=ParameterNotFoundError()) + ) + + conduit = Conduit(**conduit_mocks.params()) + ecs_client = conduit.application.environments[env].session.client("ecs") + + with pytest.raises(ParameterNotFoundError) as exc: + conduit.start(env, addon_name) + conduit.addon_client_is_running_fn.assert_called_once_with( + ecs_client, cluster_name, task_name + ) diff --git a/tests/platform_helper/fixtures/test_cloudformation_template.yml b/tests/platform_helper/fixtures/test_cloudformation_template.yml index a1797a69e..e2a6c4f92 100644 --- a/tests/platform_helper/fixtures/test_cloudformation_template.yml +++ b/tests/platform_helper/fixtures/test_cloudformation_template.yml @@ -1,3 +1,6 @@ +Parameters: + ExistingParameter: + Type: String Resources: DefaultTaskRole: Type: 'AWS::IAM::Role' diff --git a/tests/platform_helper/providers/test_aws.py b/tests/platform_helper/providers/test_aws.py new file mode 100644 index 000000000..5bc6f5c9c --- /dev/null +++ b/tests/platform_helper/providers/test_aws.py @@ -0,0 +1,97 @@ +import boto3 +import pytest +from moto import mock_aws + +from dbt_platform_helper.providers.aws import SecretNotFoundError +from dbt_platform_helper.providers.aws import get_connection_secret_arn +from dbt_platform_helper.providers.aws import ( + get_postgres_connection_data_updated_with_master_secret, +) + +env = "development" + + +@mock_aws +def test_update_postgres_parameter_with_master_secret(): + session = boto3.session.Session() + parameter_name = "test-parameter" + ssm_client = session.client("ssm") + secretsmanager_client = session.client("secretsmanager") + ssm_client.put_parameter( + Name=parameter_name, + Value='{"username": "read-only-user", "password": ">G12345", "host": "test.com", "port": 5432}', + Type="String", + ) + secret_arn = session.client("secretsmanager").create_secret( + Name="master-secret", SecretString='{"username": "postgres", "password": ">G6789"}' + )["ARN"] + + updated_parameter_value = get_postgres_connection_data_updated_with_master_secret( + ssm_client, secretsmanager_client, parameter_name, secret_arn + ) + + assert updated_parameter_value == { + "username": "postgres", + "password": "%3EG6789", + "host": "test.com", + "port": 5432, + } + + +@mock_aws +def test_get_connection_secret_arn_from_secrets_manager(mock_application): + """Test that, given app, environment and secret name strings, + get_connection_secret_arn returns an ARN from secrets manager.""" + + secret_name = f"/copilot/{mock_application.name}/development/secrets/POSTGRES" + mock_secretsmanager = boto3.client("secretsmanager") + mock_secretsmanager.create_secret( + Name=secret_name, + SecretString="something-secret", + ) + + ssm_client = mock_application.environments[env].session.client("ssm") + secrets_client = mock_application.environments[env].session.client("secretsmanager") + + arn = get_connection_secret_arn(ssm_client, secrets_client, secret_name) + + assert arn.startswith( + "arn:aws:secretsmanager:eu-west-2:123456789012:secret:" + "/copilot/test-application/development/secrets/POSTGRES-" + ) + + +@mock_aws +def test_get_connection_secret_arn_from_parameter_store(mock_application): + """Test that, given app, environment and secret name strings, + get_connection_secret_arn returns an ARN from parameter store.""" + + secret_name = f"/copilot/{mock_application.name}/development/secrets/POSTGRES" + ssm_client = mock_application.environments[env].session.client("ssm") + secrets_client = mock_application.environments[env].session.client("secretsmanager") + + ssm_client.put_parameter( + Name=secret_name, + Value="something-secret", + Type="SecureString", + ) + + arn = get_connection_secret_arn(ssm_client, secrets_client, secret_name) + + assert ( + arn + == "arn:aws:ssm:eu-west-2:123456789012:parameter/copilot/test-application/development/secrets/POSTGRES" + ) + + +@mock_aws +def test_get_connection_secret_arn_when_secret_does_not_exist(mock_application): + """Test that, given app, environment and secret name strings, + get_connection_secret_arn raises an exception when no matching secret exists + in secrets manager or parameter store.""" + + ssm_client = mock_application.environments[env].session.client("ssm") + secrets_client = mock_application.environments[env].session.client("secretsmanager") + + with pytest.raises(SecretNotFoundError): + get_connection_secret_arn(ssm_client, secrets_client, "POSTGRES") diff --git a/tests/platform_helper/providers/test_cloudformation.py b/tests/platform_helper/providers/test_cloudformation.py new file mode 100644 index 000000000..432b97e45 --- /dev/null +++ b/tests/platform_helper/providers/test_cloudformation.py @@ -0,0 +1,155 @@ +import json +from unittest.mock import Mock +from unittest.mock import patch + +import boto3 +import pytest +from cfn_tools import load_yaml +from moto import mock_aws + +from dbt_platform_helper.providers.cloudformation import ( + add_stack_delete_policy_to_task_role, +) +from dbt_platform_helper.providers.cloudformation import update_conduit_stack_resources +from dbt_platform_helper.providers.cloudformation import ( + wait_for_cloudformation_to_reach_status, +) +from tests.platform_helper.conftest import mock_parameter_name +from tests.platform_helper.conftest import mock_task_name + +env = "development" + + +@mock_aws +@pytest.mark.parametrize( + "addon_type, addon_name, parameter_suffix, env", + [ + ("postgres", "custom-name-postgres", "_READ_ONLY", "development"), + ("postgres", "custom-name-rds-postgres", "_READ_ONLY", "development"), + ("redis", "custom-name-redis", "", "development"), + ("opensearch", "custom-name-opensearch", "", "development"), + ("postgres", "custom-prod-name-postgres", "", "production"), + ], +) +def test_update_conduit_stack_resources( + mock_stack, addon_type, addon_name, parameter_suffix, env, mock_application +): + """Test that, given app, env and addon name update_conduit_stack_resources + updates the conduit CloudFormation stack to add DeletionPolicy:Retain and + subscription filter to the LogGroup.""" + + boto3.client("iam").create_role( + RoleName="CWLtoSubscriptionFilterRole", + AssumeRolePolicyDocument="123", + ) + + ssm_response = { + "prod": "arn:aws:logs:eu-west-2:prod_account_id:destination:test_log_destination", + "dev": "arn:aws:logs:eu-west-2:dev_account_id:destination:test_log_destination", + } + boto3.client("ssm").put_parameter( + Name="/copilot/tools/central_log_groups", + Value=json.dumps(ssm_response), + Type="String", + ) + + mock_stack(addon_name) + task_name = mock_task_name(addon_name) + parameter_name = mock_parameter_name(mock_application, addon_type, addon_name) + cloudformation_client = mock_application.environments[env].session.client("cloudformation") + iam_client = mock_application.environments[env].session.client("iam") + ssm_client = mock_application.environments[env].session.client("ssm") + + update_conduit_stack_resources( + cloudformation_client, + iam_client, + ssm_client, + mock_application.name, + env, + addon_type, + addon_name, + task_name, + parameter_name, + "read", + ) + + template = boto3.client("cloudformation").get_template(StackName=f"task-{task_name}") + template_yml = load_yaml(template["TemplateBody"]) + + assert template_yml["Resources"]["LogGroup"]["DeletionPolicy"] == "Retain" + assert template_yml["Resources"]["TaskNameParameter"]["Properties"]["Name"] == parameter_name + assert ( + template_yml["Resources"]["SubscriptionFilter"]["Properties"]["LogGroupName"] + == f"/copilot/{task_name}" + ) + assert ("dev_account_id" if "dev" in env else "prod_account_id") in template_yml["Resources"][ + "SubscriptionFilter" + ]["Properties"]["DestinationArn"] + assert ( + template_yml["Resources"]["SubscriptionFilter"]["Properties"]["FilterName"] + == f"/copilot/conduit/{mock_application.name}/{env}/{addon_type}/{addon_name}/{task_name.rsplit('-', 1)[1]}/read" + ) + + +@mock_aws +@pytest.mark.parametrize( + "addon_name", + ["postgres", "redis", "opensearch", "rds-postgres"], +) +@patch("time.sleep", return_value=None) +def test_add_stack_delete_policy_to_task_role(sleep, mock_stack, addon_name, mock_application): + """Test that, given app, env and addon name + add_stack_delete_policy_to_task_role adds a policy to the IAM role in a + CloudFormation stack.""" + + task_name = mock_task_name(addon_name) + stack_name = f"task-{task_name}" + cloudformation_client = mock_application.environments[env].session.client("cloudformation") + iam_client = mock_application.environments[env].session.client("iam") + + mock_stack(addon_name) + mock_policy = { + "Version": "2012-10-17", + "Statement": [ + { + "Action": ["cloudformation:DeleteStack"], + "Effect": "Allow", + "Resource": f"arn:aws:cloudformation:*:*:stack/{stack_name}/*", + }, + ], + } + + add_stack_delete_policy_to_task_role(cloudformation_client, iam_client, task_name) + + stack_resources = boto3.client("cloudformation").list_stack_resources(StackName=stack_name)[ + "StackResourceSummaries" + ] + + policy_name = None + policy_document = None + for resource in stack_resources: + if resource["LogicalResourceId"] == "DefaultTaskRole": + policy = boto3.client("iam").get_role_policy( + RoleName=resource["PhysicalResourceId"], PolicyName="DeleteCloudFormationStack" + ) + policy_name = policy["PolicyName"] + policy_document = policy["PolicyDocument"] + + assert policy_name == "DeleteCloudFormationStack" + assert policy_document == mock_policy + + +def test_wait_for_cloudformation_to_reach_status(): + + cloudformation_client = Mock() + mock_return = Mock() + mock_waiter = Mock(return_value=mock_return) + cloudformation_client.get_waiter = mock_waiter + + wait_for_cloudformation_to_reach_status( + cloudformation_client, "stack_update_complete", "task-stack-name" + ) + mock_waiter.assert_called() + mock_return.wait.assert_called_with( + StackName="task-stack-name", WaiterConfig={"Delay": 5, "MaxAttempts": 20} + ) diff --git a/tests/platform_helper/providers/test_copilot.py b/tests/platform_helper/providers/test_copilot.py new file mode 100644 index 000000000..ae3426c47 --- /dev/null +++ b/tests/platform_helper/providers/test_copilot.py @@ -0,0 +1,731 @@ +import json +from unittest.mock import Mock +from unittest.mock import patch + +import boto3 +import pytest +from botocore.exceptions import ClientError +from moto import mock_aws + +from dbt_platform_helper.providers.aws import SecretNotFoundError +from dbt_platform_helper.providers.copilot import AddonNotFoundError +from dbt_platform_helper.providers.copilot import AddonTypeMissingFromConfigError +from dbt_platform_helper.providers.copilot import CreateTaskTimeoutError +from dbt_platform_helper.providers.copilot import InvalidAddonTypeError +from dbt_platform_helper.providers.copilot import NoClusterError +from dbt_platform_helper.providers.copilot import ParameterNotFoundError +from dbt_platform_helper.providers.copilot import addon_client_is_running +from dbt_platform_helper.providers.copilot import connect_to_addon_client_task +from dbt_platform_helper.providers.copilot import create_addon_client_task +from dbt_platform_helper.providers.copilot import create_postgres_admin_task +from dbt_platform_helper.providers.copilot import get_addon_type +from dbt_platform_helper.providers.copilot import get_cluster_arn +from dbt_platform_helper.providers.copilot import get_or_create_task_name +from dbt_platform_helper.providers.copilot import get_parameter_name +from dbt_platform_helper.providers.copilot import normalise_secret_name +from tests.platform_helper.conftest import NoSuchEntityException +from tests.platform_helper.conftest import add_addon_config_parameter +from tests.platform_helper.conftest import expected_connection_secret_name +from tests.platform_helper.conftest import mock_parameter_name +from tests.platform_helper.conftest import mock_task_name + +env = "development" + + +@pytest.mark.parametrize( + "test_string", + [ + ("app-rds-postgres", "APP_RDS_POSTGRES"), + ("APP-POSTGRES", "APP_POSTGRES"), + ("APP-OpenSearch", "APP_OPENSEARCH"), + ], +) +def test_normalise_secret_name(test_string): + """Test that given an addon name, normalise_secret_name produces the + expected result.""" + + assert normalise_secret_name(test_string[0]) == test_string[1] + + +@mock_aws +def test_get_cluster_arn(mocked_cluster, mock_application): + """Test that, given app and environment strings, get_cluster_arn returns the + arn of a cluster tagged with these strings.""" + + assert ( + get_cluster_arn( + mock_application.environments[env].session.client("ecs"), mock_application.name, env + ) + == mocked_cluster["cluster"]["clusterArn"] + ) + + +@mock_aws +def test_get_cluster_arn_when_there_is_no_cluster(mock_application): + """Test that, given app and environment strings, get_cluster_arn raises an + exception when no cluster tagged with these strings exists.""" + + env = "staging" + + with pytest.raises(NoClusterError): + get_cluster_arn( + mock_application.environments[env].session.client("ecs"), mock_application.name, env + ) + + +@mock_aws +@patch( # Nested function within provider function + "dbt_platform_helper.providers.copilot.get_postgres_connection_data_updated_with_master_secret", + return_value="connection string", +) +def test_create_postgres_admin_task(mock_update_parameter, mock_application): + + addon_name = "dummy-postgres" + master_secret_name = f"/copilot/{mock_application.name}/{env}/secrets/{normalise_secret_name(addon_name)}_RDS_MASTER_ARN" + ssm_client = mock_application.environments[env].session.client("ssm") + secrets_manager_client = mock_application.environments[env].session.client("secretsmanager") + + boto3.client("ssm").put_parameter( + Name=master_secret_name, Value="master-secret-arn", Type="String" + ) + mock_subprocess = Mock() + + create_postgres_admin_task( + ssm_client, + secrets_manager_client, + mock_subprocess, + mock_application, + addon_name, + "postgres", + env, + "POSTGRES_SECRET_NAME", + "test-task", + ) + + mock_update_parameter.assert_called_once_with( + ssm_client, + secrets_manager_client, + "POSTGRES_SECRET_NAME_READ_ONLY_USER", + "master-secret-arn", + ) + + mock_subprocess.call.assert_called_once_with( + f"copilot task run --app {mock_application.name} --env {env} " + f"--task-group-name test-task " + "--image public.ecr.aws/uktrade/tunnel:postgres " + "--env-vars CONNECTION_SECRET='\"connection string\"' " + "--platform-os linux " + "--platform-arch arm64", + shell=True, + ) + + +@pytest.mark.parametrize( + "access", + [ + "read", + "write", + "admin", + ], +) +@pytest.mark.parametrize( + "addon_type, addon_name", + [ + ("redis", "custom-name-redis"), + ("opensearch", "custom-name-opensearch"), + ], +) +@patch("dbt_platform_helper.providers.copilot.get_connection_secret_arn", return_value="test-arn") +def test_create_redis_or_opensearch_addon_client_task( + get_connection_secret_arn, + access, + addon_type, + addon_name, +): + """Test that, given app, env and permissions, create_addon_client_task calls + get_connection_secret_arn with the default secret name and subsequently + subprocess.call with the correct secret ARN and execution role.""" + + mock_application = Mock() + mock_application.name = "test-application" + mock_application.environments = {"development": Mock()} + task_name = mock_task_name(addon_name) + mock_subprocess = Mock() + + iam_client = mock_application.environments[env].session.client("iam") + ssm_client = mock_application.environments[env].session.client("ssm") + secretsmanager_client = mock_application.environments[env].session.client("secretsmanager") + + create_addon_client_task( + iam_client, + ssm_client, + secretsmanager_client, + mock_subprocess, + mock_application, + env, + addon_type, + addon_name, + task_name, + access, + ) + + secret_name = expected_connection_secret_name(mock_application, addon_type, addon_name, access) + get_connection_secret_arn.assert_called_once_with( + ssm_client, secretsmanager_client, secret_name + ) + mock_subprocess.call.assert_called() + mock_subprocess.call.assert_called_once_with( + f"copilot task run --app test-application --env {env} " + f"--task-group-name {task_name} " + f"--execution-role {addon_name}-{mock_application.name}-{env}-conduitEcsTask " + f"--image public.ecr.aws/uktrade/tunnel:{addon_type} " + "--secrets CONNECTION_SECRET=test-arn " + "--platform-os linux " + "--platform-arch arm64", + shell=True, + ) + + +@pytest.mark.parametrize( + "access", + [ + "read", + "write", + ], +) +@patch("dbt_platform_helper.providers.copilot.get_connection_secret_arn", return_value="test-arn") +def test_create_postgres_addon_client_task( + get_connection_secret_arn, + access, +): + + addon_name = "custom-name-postgres" + addon_type = "postgres" + mock_application = Mock() + mock_application.name = "test-application" + mock_application.environments = {"development": Mock()} + task_name = mock_task_name(addon_name) + mock_subprocess = Mock() + + iam_client = mock_application.environments[env].session.client("iam") + ssm_client = mock_application.environments[env].session.client("ssm") + secretsmanager_client = mock_application.environments[env].session.client("secretsmanager") + + create_addon_client_task( + iam_client, + ssm_client, + secretsmanager_client, + mock_subprocess, + mock_application, + env, + "postgres", + addon_name, + task_name, + access, + ) + secret_name = expected_connection_secret_name(mock_application, addon_type, addon_name, access) + get_connection_secret_arn.assert_called_once_with( + ssm_client, secretsmanager_client, secret_name + ) + mock_subprocess.call.assert_called() + mock_subprocess.call.assert_called_once_with( + f"copilot task run --app test-application --env {env} " + f"--task-group-name {task_name} " + f"--execution-role {addon_name}-{mock_application.name}-{env}-conduitEcsTask " + f"--image public.ecr.aws/uktrade/tunnel:{addon_type} " + "--secrets CONNECTION_SECRET=test-arn " + "--platform-os linux " + "--platform-arch arm64", + shell=True, + ) + + +@patch("dbt_platform_helper.providers.copilot.create_postgres_admin_task") +def test_create_postgres_addon_client_task_admin( + mock_create_postgres_admin_task, + mock_application, +): + + addon_name = "custom-name-postgres" + task_name = mock_task_name(addon_name) + mock_subprocess = Mock() + + iam_client = mock_application.environments[env].session.client("iam") + ssm_client = mock_application.environments[env].session.client("ssm") + secretsmanager_client = mock_application.environments[env].session.client("secretsmanager") + create_addon_client_task( + iam_client, + ssm_client, + secretsmanager_client, + mock_subprocess, + mock_application, + env, + "postgres", + addon_name, + task_name, + "admin", + ) + secret_name = expected_connection_secret_name(mock_application, "postgres", addon_name, "admin") + + mock_create_postgres_admin_task.assert_called_once_with( + ssm_client, + secretsmanager_client, + mock_subprocess, + mock_application, + addon_name, + "postgres", + env, + secret_name, + task_name, + ) + + +@patch("dbt_platform_helper.providers.copilot.get_connection_secret_arn", return_value="test-arn") +def test_create_addon_client_task_does_not_add_execution_role_if_role_not_found( + get_connection_secret_arn, + mock_application, +): + """Test that, given app, env and permissions, create_addon_client_task calls + get_connection_secret_arn with the default secret name and subsequently + subprocess.call with the correct secret ARN but no execution role.""" + + addon_name = "postgres" + addon_type = "custom-name-postgres" + access = "read" + mock_subprocess = Mock() + mock_application.environments[env] = Mock() + mock_application.environments[env].session.client.return_value = Mock() + mock_application.environments[env].session.client.return_value.get_role.side_effect = ( + NoSuchEntityException() + ) + task_name = mock_task_name(addon_name) + + ssm_client = mock_application.environments[env].session.client("ssm") + secretsmanager_client = mock_application.environments[env].session.client("secretsmanager") + + create_addon_client_task( + mock_application.environments[env].session.client("iam"), + ssm_client, + secretsmanager_client, + mock_subprocess, + mock_application, + env, + addon_type, + addon_name, + task_name, + access, + ) + + secret_name = expected_connection_secret_name(mock_application, addon_type, addon_name, access) + get_connection_secret_arn.assert_called_once_with( + ssm_client, secretsmanager_client, secret_name + ) + + mock_subprocess.call.assert_called_once_with( + f"copilot task run --app test-application --env {env} " + f"--task-group-name {task_name} " + f"--image public.ecr.aws/uktrade/tunnel:{addon_type} " + "--secrets CONNECTION_SECRET=test-arn " + "--platform-os linux " + "--platform-arch arm64", + shell=True, + ) + + +@patch("dbt_platform_helper.providers.copilot.get_connection_secret_arn", return_value="test-arn") +@patch("click.secho") +def test_create_addon_client_task_abort_with_message_on_other_exceptions( + mock_secho, + get_connection_secret_arn, + mock_application, +): + """Test that if an unexpected ClientError is throw when trying to get the + execution role, create_addon_client_task aborts with a message.""" + + addon_name = "postgres" + addon_type = "custom-name-postgres" + access = "read" + mock_subprocess = Mock() + mock_application.environments[env] = Mock() + mock_application.environments[env].session.client.return_value = Mock() + mock_application.environments[env].session.client.return_value.get_role.side_effect = ( + ClientError( + operation_name="something_else", + error_response={"Error": {"Message": "Something went wrong"}}, + ) + ) + task_name = mock_task_name(addon_name) + iam_client = mock_application.environments[env].session.client("iam") + ssm_client = mock_application.environments[env].session.client("ssm") + secretsmanager_client = mock_application.environments[env].session.client("secretsmanager") + + with pytest.raises(SystemExit) as exc_info: + create_addon_client_task( + iam_client, + ssm_client, + secretsmanager_client, + mock_subprocess, + mock_application, + env, + addon_type, + addon_name, + task_name, + access, + ) + + assert exc_info.value.code == 1 + assert mock_secho.call_count > 0 + assert ( + mock_secho.call_args[0][0] + == f"Error: cannot obtain Role {addon_name}-{mock_application.name}-{env}-conduitEcsTask: Something went wrong" + ) + + +@patch("dbt_platform_helper.providers.copilot.get_connection_secret_arn") +def test_create_addon_client_task_when_no_secret_found(get_connection_secret_arn): + """Test that, given app, environment and secret name strings, + create_addon_client_task raises a NoConnectionSecretError and does not call + subprocess.call.""" + + mock_application = Mock() + mock_application.name = "test-application" + mock_application.environments = {"development": Mock()} + mock_subprocess = Mock() + iam_client = mock_application.environments[env].session.client("iam") + ssm_client = mock_application.environments[env].session.client("ssm") + secretsmanager_client = mock_application.environments[env].session.client("secretsmanager") + + get_connection_secret_arn.side_effect = SecretNotFoundError + + with pytest.raises(SecretNotFoundError): + create_addon_client_task( + iam_client, + ssm_client, + secretsmanager_client, + mock_subprocess, + mock_application, + env, + "postgres", + "named-postgres", + mock_task_name("named-postgres"), + "read", + ) + + mock_subprocess.call.assert_not_called() + + +@pytest.mark.parametrize( + "addon_type", + ["postgres", "redis", "opensearch"], +) +def test_addon_client_is_running( + mock_cluster_client_task, mocked_cluster, addon_type, mock_application +): + """Test that, given cluster ARN, addon type and with a running agent, + addon_client_is_running returns True.""" + + mocked_cluster_for_client = mock_cluster_client_task(addon_type) + mocked_cluster_arn = mocked_cluster["cluster"]["clusterArn"] + ecs_client = mock_application.environments[env].session.client("ecs") + + with patch( + "dbt_platform_helper.utils.application.boto3.client", return_value=mocked_cluster_for_client + ): + assert addon_client_is_running(ecs_client, mocked_cluster_arn, mock_task_name(addon_type)) + + +@pytest.mark.parametrize( + "addon_type", + ["postgres", "redis", "opensearch"], +) +def test_addon_client_is_running_when_no_client_task_running( + mock_cluster_client_task, mocked_cluster, addon_type, mock_application +): + """Test that, given cluster ARN, addon type and without a running client + task, addon_client_is_running returns False.""" + + mocked_cluster_for_client = mock_cluster_client_task(addon_type, task_running=False) + mocked_cluster_arn = mocked_cluster["cluster"]["clusterArn"] + ecs_client = mock_application.environments[env].session.client("ecs") + + with patch( + "dbt_platform_helper.utils.application.boto3.client", return_value=mocked_cluster_for_client + ): + assert ( + addon_client_is_running(ecs_client, mocked_cluster_arn, mock_task_name(addon_type)) + is False + ) + + +@mock_aws +@pytest.mark.parametrize( + "addon_type", + ["postgres", "redis", "opensearch"], +) +def test_addon_client_is_running_when_no_client_agent_running( + addon_type, mock_application, mocked_cluster +): + ecs_client = mock_application.environments[env].session.client("ecs") + cluster_arn = mocked_cluster["cluster"]["clusterArn"] + task_name = "some-task-name" + ec2 = boto3.resource("ec2") + vpc = ec2.create_vpc(CidrBlock="10.0.0.0/16") + subnet = ec2.create_subnet(VpcId=vpc.id, CidrBlock="10.0.0.0/18") + + mocked_task_definition_arn = ecs_client.register_task_definition( + family=f"copilot-foobar", + requiresCompatibilities=["FARGATE"], + networkMode="awsvpc", + containerDefinitions=[ + { + "name": "test_container", + "image": "test_image", + "cpu": 256, + "memory": 512, + "essential": True, + } + ], + )["taskDefinition"]["taskDefinitionArn"] + ecs_client.run_task( + taskDefinition=mocked_task_definition_arn, + launchType="FARGATE", + networkConfiguration={ + "awsvpcConfiguration": { + "subnets": [subnet.id], + "securityGroups": ["something-sg"], + } + }, + ) + + assert addon_client_is_running(ecs_client, cluster_arn, task_name) is False + + +@mock_aws +def test_get_or_create_task_name(mock_application): + """Test that get_or_create_task_name retrieves the task name from the + parameter store when it has been stored.""" + + addon_name = "app-postgres" + parameter_name = mock_parameter_name(mock_application, "postgres", addon_name) + mock_application.environments[env].session.client("ssm") + mock_ssm = boto3.client("ssm") + mock_ssm.put_parameter( + Name=parameter_name, + Type="String", + Value=mock_task_name(addon_name), + ) + + task_name = get_or_create_task_name( + mock_ssm, mock_application.name, env, addon_name, parameter_name + ) + + assert task_name == mock_task_name(addon_name) + + +@mock_aws +def test_get_or_create_task_name_when_name_does_not_exist(mock_application): + """Test that get_or_create_task_name creates the task name and appends it + with a 12 digit lowercase alphanumeric string when it does not exist in the + parameter store.""" + + addon_name = "app-postgres" + ssm_client = mock_application.environments[env].session.client("ssm") + parameter_name = mock_parameter_name(mock_application, "postgres", addon_name) + task_name = get_or_create_task_name( + ssm_client, mock_application.name, env, addon_name, parameter_name + ) + random_id = task_name.rsplit("-", 1)[1] + + assert task_name.rsplit("-", 1)[0] == mock_task_name("app-postgres").rsplit("-", 1)[0] + assert random_id.isalnum() and random_id.islower() and len(random_id) == 12 + + +@mock_aws +@pytest.mark.parametrize( + "access", + [ + "read", + "write", + "admin", + ], +) +@pytest.mark.parametrize( + "addon_type, addon_name", + [ + ("postgres", "custom-name-postgres"), + ("postgres", "custom-name-rds-postgres"), + ("redis", "custom-name-redis"), + ("opensearch", "custom-name-opensearch"), + ("s3", "custon-name-s3"), + ], +) +def test_get_parameter_name(access, addon_type, addon_name, mock_application): + """Test that get_parameter_name builds the correct parameter name given the + addon_name, addon_type and permission.""" + + parameter_name = get_parameter_name( + mock_application.name, "development", addon_type, addon_name, access + ) + assert parameter_name == mock_parameter_name(mock_application, addon_type, addon_name, access) + + +@pytest.mark.parametrize( + "addon_type", + ["postgres", "redis", "opensearch"], +) +@patch("dbt_platform_helper.providers.copilot.addon_client_is_running", return_value=True) +def test_connect_to_addon_client_task(addon_client_is_running, addon_type, mock_application): + """ + Test that, given app, env, ECS cluster ARN and addon type, + connect_to_addon_client_task calls addon_client_is_running with cluster ARN + and addon type. + + It then subsequently calls subprocess.call with the correct app, env and + addon type. + """ + + task_name = mock_task_name(addon_type) + ecs_client = mock_application.environments[env].session.client("ecs") + mock_subprocess = Mock() + + connect_to_addon_client_task( + ecs_client, mock_subprocess, mock_application.name, env, "test-arn", task_name + ) + + addon_client_is_running.assert_called_once_with(ecs_client, "test-arn", task_name) + mock_subprocess.call.assert_called_once_with( + f"copilot task exec --app test-application --env {env} " + f"--name {task_name} " + f"--command bash", + shell=True, + ) + + +# Todo: Implement this test +# @patch("dbt_platform_helper.providers.copilot.addon_client_is_running", return_value=True) +# def test_connect_to_addon_client_task_waits_for_command_agent(addon_client_is_running, mock_application): +# task_name = mock_task_name("postgres") # Addon type for this test does not matter +# ecs_client = mock_application.environments[env].session.client("ecs") +# mock_subprocess = Mock() +# # We want this to throw InvalidParameterException the first time, then behave as normal +# +# connect_to_addon_client_task( +# ecs_client, mock_subprocess, mock_application.name, env, "test-arn", task_name +# ) +# +# # Assert "Unable to connect, execute command agent probably isn’t running yet" in output +# # If it doesn't bomb out with CreateTaskTimeoutError all is good + + +@pytest.mark.parametrize( + "addon_type", + ["postgres", "redis", "opensearch"], +) +@patch("time.sleep", return_value=None) +@patch("dbt_platform_helper.providers.copilot.addon_client_is_running", return_value=False) +def test_connect_to_addon_client_task_with_timeout_reached_throws_exception( + addon_client_is_running, sleep, addon_type, mock_application +): + """Test that, given app, env, ECS cluster ARN and addon type, when the + client agent fails to start, connect_to_addon_client_task calls + addon_client_is_running with cluster ARN and addon type 15 times, but does + not call subprocess.call.""" + + task_name = mock_task_name(addon_type) + ecs_client = mock_application.environments[env].session.client("ecs") + mock_subprocess = Mock() + + with pytest.raises(CreateTaskTimeoutError): + connect_to_addon_client_task( + ecs_client, mock_subprocess, mock_application, env, "test-arn", task_name + ) + + addon_client_is_running.assert_called_with(ecs_client, "test-arn", task_name) + assert addon_client_is_running.call_count == 15 + mock_subprocess.call.assert_not_called() + + +@mock_aws +@pytest.mark.parametrize( + "addon_name, expected_type", + [ + ("custom-name-postgres", "postgres"), + ("custom-name-redis", "redis"), + ("custom-name-opensearch", "opensearch"), + ], +) +def test_get_addon_type(addon_name, expected_type, mock_application): + """Test that get_addon_type returns the expected addon type.""" + + ssm_client = mock_application.environments[env].session.client("ssm") + + add_addon_config_parameter() + addon_type = get_addon_type(ssm_client, mock_application.name, env, addon_name) + + assert addon_type == expected_type + + +@mock_aws +def test_get_addon_type_with_not_found_throws_exception(mock_application): + """Test that get_addon_type raises the expected error when the addon is not + found in the config file.""" + + add_addon_config_parameter({"different-name": {"type": "redis"}}) + ssm_client = mock_application.environments[env].session.client("ssm") + + with pytest.raises(AddonNotFoundError): + get_addon_type(ssm_client, mock_application.name, env, "custom-name-postgres") + + +@mock_aws +def test_get_addon_type_with_parameter_not_found_throws_exception(mock_application): + """Test that get_addon_type raises the expected error when the addon config + parameter is not found.""" + + ssm_client = mock_application.environments[env].session.client("ssm") + + mock_ssm = boto3.client("ssm") + mock_ssm.put_parameter( + Name=f"/copilot/applications/test-application/environments/development/invalid-parameter", + Type="String", + Value=json.dumps({"custom-name-postgres": {"type": "postgres"}}), + ) + + with pytest.raises(ParameterNotFoundError): + get_addon_type(ssm_client, mock_application.name, env, "custom-name-postgres") + + +@mock_aws +def test_get_addon_type_with_invalid_type_throws_exception(mock_application): + """Test that get_addon_type raises the expected error when the config + contains an invalid addon type.""" + + add_addon_config_parameter(param_value={"invalid-extension": {"type": "invalid"}}) + ssm_client = mock_application.environments[env].session.client("ssm") + + with pytest.raises(InvalidAddonTypeError): + get_addon_type(ssm_client, mock_application.name, env, "invalid-extension") + + +@mock_aws +def test_get_addon_type_with_blank_type_throws_exception(mock_application): + """Test that get_addon_type raises the expected error when the config + contains an empty addon type.""" + + add_addon_config_parameter(param_value={"blank-extension": {}}) + ssm_client = mock_application.environments[env].session.client("ssm") + + with pytest.raises(AddonTypeMissingFromConfigError): + get_addon_type(ssm_client, mock_application.name, env, "blank-extension") + + +@mock_aws +def test_get_addon_type_with_unspecified_type_throws_exception(mock_application): + """Test that get_addon_type raises the expected error when the config + contains an empty addon type.""" + + add_addon_config_parameter(param_value={"addon-type-unspecified": {"type": None}}) + ssm_client = mock_application.environments[env].session.client("ssm") + + with pytest.raises(AddonTypeMissingFromConfigError): + get_addon_type(ssm_client, mock_application.name, env, "addon-type-unspecified") diff --git a/tests/platform_helper/test_command_conduit.py b/tests/platform_helper/test_command_conduit.py index a0fd45cc2..23a1eed85 100644 --- a/tests/platform_helper/test_command_conduit.py +++ b/tests/platform_helper/test_command_conduit.py @@ -3,32 +3,38 @@ import pytest from click.testing import CliRunner -from moto import mock_aws -from tests.platform_helper.conftest import add_addon_config_parameter +from dbt_platform_helper.commands.conduit import conduit +from dbt_platform_helper.providers.aws import SecretNotFoundError +from dbt_platform_helper.providers.copilot import AddonNotFoundError +from dbt_platform_helper.providers.copilot import CreateTaskTimeoutError +from dbt_platform_helper.providers.copilot import InvalidAddonTypeError +from dbt_platform_helper.providers.copilot import NoClusterError +from dbt_platform_helper.providers.copilot import ParameterNotFoundError -@mock_aws @pytest.mark.parametrize( - "addon_type, addon_name", + "addon_name", [ - ("postgres", "custom-name-postgres"), - ("redis", "custom-name-redis"), - ("opensearch", "custom-name-opensearch"), + "custom-name-postgres", + "custom-name-rds-postgres", + "custom-name-redis", + "custom-name-opensearch", ], ) +@patch("dbt_platform_helper.commands.conduit.Conduit") @patch( - "dbt_platform_helper.utils.versioning.running_as_installed_package", new=Mock(return_value=True) + "dbt_platform_helper.utils.versioning.running_as_installed_package", + new=Mock(return_value=True), ) -@patch("dbt_platform_helper.commands.conduit.start_conduit") -def test_conduit_command(start_conduit, addon_type, addon_name, validate_version, mock_application): +@patch("dbt_platform_helper.commands.conduit.load_application") +def test_start_conduit(mock_application, mock_conduit_object, addon_name, validate_version): """Test that given an app, env and addon name strings, the conduit command calls start_conduit with app, env, addon type and addon name.""" - from dbt_platform_helper.commands.conduit import conduit - add_addon_config_parameter() + mock_conduit_instance = mock_conduit_object.return_value - CliRunner().invoke( + result = CliRunner().invoke( conduit, [ addon_name, @@ -39,174 +45,69 @@ def test_conduit_command(start_conduit, addon_type, addon_name, validate_version ], ) - validate_version.assert_called_once() - start_conduit.assert_called_once_with( - mock_application, "development", addon_type, addon_name, "read" - ) - - -@mock_aws -@patch("click.secho") -@patch( - "dbt_platform_helper.utils.versioning.running_as_installed_package", new=Mock(return_value=True) -) -@patch("dbt_platform_helper.commands.conduit.start_conduit") -@patch( - "dbt_platform_helper.commands.conduit.get_addon_type", new=Mock(return_value="mock_addon_type") -) -def test_conduit_command_when_no_cluster_exists(start_conduit, secho, validate_version): - """Test that given an app, env and addon name strings, when there is no ECS - Cluster available, the conduit command handles the NoClusterConduitError - exception.""" - from dbt_platform_helper.commands.conduit import NoClusterConduitError - from dbt_platform_helper.commands.conduit import conduit - - start_conduit.side_effect = NoClusterConduitError - - result = CliRunner().invoke( - conduit, - [ - "mock_addon", - "--app", - "test-application", - "--env", - "development", - ], - ) + assert result.exit_code == 0 - assert result.exit_code == 1 validate_version.assert_called_once() - secho.assert_called_once_with( - """No ECS cluster found for "test-application" in "development" environment.""", fg="red" - ) + mock_conduit_instance.start.assert_called_with("development", addon_name, "read") -@mock_aws -@patch("click.secho") -@patch( - "dbt_platform_helper.utils.versioning.running_as_installed_package", new=Mock(return_value=True) -) -@patch("dbt_platform_helper.commands.conduit.start_conduit") -@patch( - "dbt_platform_helper.commands.conduit.get_addon_type", new=Mock(return_value="mock_addon_type") -) -def test_conduit_command_when_no_connection_secret_exists(start_conduit, secho, validate_version): - """Test that given an app, env and addon name strings, when there is no - connection secret available, the conduit command handles the - NoConnectionSecretError exception.""" - from dbt_platform_helper.commands.conduit import SecretNotFoundConduitError - from dbt_platform_helper.commands.conduit import conduit - - mock_addon_name = "mock_addon" - start_conduit.side_effect = SecretNotFoundConduitError(mock_addon_name) - - result = CliRunner().invoke( - conduit, - [ - mock_addon_name, - "--app", - "test-application", - "--env", - "development", - ], - ) - - assert result.exit_code == 1 - validate_version.assert_called_once() - secho.assert_called_once_with( - f"""No secret called "{mock_addon_name}" for "test-application" in "development" environment.""", - fg="red", - ) - - -@mock_aws -@patch("click.secho") -@patch( - "dbt_platform_helper.utils.versioning.running_as_installed_package", new=Mock(return_value=True) +@pytest.mark.parametrize( + "exception_type,exception_input_params,expected_message", + [ + ( + SecretNotFoundError, + {}, + """No secret called "" for "test-application" in "development" environment.""", + ), + (AddonNotFoundError, {}, """Addon "important-db" does not exist."""), + ( + CreateTaskTimeoutError, + {}, + """Client (important-db) ECS task has failed to start for "test-application" in "development" environment.""", + ), + ( + NoClusterError, + {}, + """No ECS cluster found for "test-application" in "development" environment.""", + ), + ( + ParameterNotFoundError, + {}, + """No parameter called "/copilot/applications/test-application/environments/development/addons". Try deploying the "test-application" "development" environment.""", + ), + ( + InvalidAddonTypeError, + {"addon_type": "fake-postgres"}, + """Addon type "fake-postgres" is not supported, we support: opensearch, postgres, redis.""", + ), + ], ) -@patch("dbt_platform_helper.commands.conduit.start_conduit") +@patch("dbt_platform_helper.commands.conduit.Conduit") @patch( - "dbt_platform_helper.commands.conduit.get_addon_type", new=Mock(return_value="mock_addon_type") + "dbt_platform_helper.utils.versioning.running_as_installed_package", + new=Mock(return_value=True), ) -def test_conduit_command_when_client_task_fails_to_start(start_conduit, secho, validate_version): - """Test that given an app, env and addon name strings, when the ECS client - task fails to start, the conduit command handles the - TaskConnectionTimeoutError exception.""" - from dbt_platform_helper.commands.conduit import CreateTaskTimeoutConduitError - from dbt_platform_helper.commands.conduit import conduit - - mock_addon_name = "mock_addon" - start_conduit.side_effect = CreateTaskTimeoutConduitError - - result = CliRunner().invoke( - conduit, - [ - mock_addon_name, - "--app", - "test-application", - "--env", - "development", - ], - ) - - assert result.exit_code == 1 - validate_version.assert_called_once() - secho.assert_called_once_with( - f"""Client ({mock_addon_name}) ECS task has failed to start for "test-application" in "development" environment.""", - fg="red", - ) - - -@mock_aws +@patch("dbt_platform_helper.commands.conduit.load_application") @patch("click.secho") -@patch( - "dbt_platform_helper.utils.versioning.running_as_installed_package", new=Mock(return_value=True) -) -@patch("dbt_platform_helper.commands.conduit.start_conduit") -def test_conduit_command_when_addon_type_is_invalid(start_conduit, secho, validate_version): - """Test that given an app, env and addon name strings, if the addon type is - invalid the conduit command handles the exception.""" - from dbt_platform_helper.commands.conduit import conduit - - add_addon_config_parameter({"custom-name-postgres": {"type": "nope"}}) - - result = CliRunner().invoke( - conduit, - [ - "custom-name-postgres", - "--app", - "test-application", - "--env", - "development", - ], - ) - - assert result.exit_code == 1 - validate_version.assert_called_once() - start_conduit.assert_not_called() - secho.assert_called_once_with( - """Addon type "nope" is not supported, we support: opensearch, postgres, redis.""", - fg="red", - ) - - -@mock_aws -@patch("click.secho") -@patch( - "dbt_platform_helper.utils.versioning.running_as_installed_package", new=Mock(return_value=True) -) -@patch("dbt_platform_helper.commands.conduit.start_conduit") -def test_conduit_command_when_addon_does_not_exist(start_conduit, secho, validate_version): - """Test that given an app, env and invalid addon name strings, the conduit - command handles the exception.""" - from dbt_platform_helper.commands.conduit import conduit - - add_addon_config_parameter({"non-existent-addon": {"type": "redis"}}) +def test_start_conduit_exception_is_raised( + mock_click, + mock_application, + mock_conduit_object, + validate_version, + exception_type, + exception_input_params, + expected_message, +): + """Test that given an app, env and addon name strings, the conduit command + calls start_conduit with app, env, addon type and addon name.""" + mock_conduit_instance = mock_conduit_object.return_value + mock_conduit_instance.start.side_effect = exception_type(**exception_input_params) + addon_name = "important-db" result = CliRunner().invoke( conduit, [ - "custom-name-postgres", + addon_name, "--app", "test-application", "--env", @@ -214,78 +115,8 @@ def test_conduit_command_when_addon_does_not_exist(start_conduit, secho, validat ], ) - assert result.exit_code == 1 - validate_version.assert_called_once() - start_conduit.assert_not_called() - secho.assert_called_once_with( - """Addon "custom-name-postgres" does not exist.""", - fg="red", - ) - - -@mock_aws -@patch("click.secho") -@patch( - "dbt_platform_helper.utils.versioning.running_as_installed_package", new=Mock(return_value=True) -) -def test_conduit_command_when_no_addon_config_parameter_exists(secho, validate_version): - """Test that given an app, env and addon name strings, when there is no - addon config parameter available, the conduit command handles the - ParameterNotFoundConduitError exception.""" - from dbt_platform_helper.commands.conduit import conduit - - result = CliRunner().invoke( - conduit, - [ - "mock_addon", - "--app", - "test-application", - "--env", - "development", - ], - ) + mock_click.assert_called_with(expected_message, fg="red") assert result.exit_code == 1 - validate_version.assert_called_once() - secho.assert_called_once_with( - f"""No parameter called "/copilot/applications/test-application/environments/development/addons". Try deploying the "test-application" "development" environment.""", - fg="red", - ) - - -@mock_aws -@pytest.mark.parametrize("access", ["read", "write", "admin"]) -@patch( - "dbt_platform_helper.utils.versioning.running_as_installed_package", new=Mock(return_value=True) -) -@patch("dbt_platform_helper.commands.conduit.start_conduit") -@patch("dbt_platform_helper.commands.conduit.get_addon_type", new=Mock(return_value="postgres")) -def test_conduit_command_flags( - start_conduit, - access, - validate_version, - mock_application, -): - """Test that given an app, env, addon name strings and optional permission - flags, the conduit command calls start_conduit with app, env, addon type, - addon name and the correct boolean values.""" - from dbt_platform_helper.commands.conduit import conduit - - mock_addon_name = "mock_addon" - CliRunner().invoke( - conduit, - [ - mock_addon_name, - "--app", - "test-application", - "--env", - "development", - "--access", - f"{access}", - ], - ) validate_version.assert_called_once() - start_conduit.assert_called_once_with( - mock_application, "development", "postgres", mock_addon_name, access - ) diff --git a/tests/platform_helper/test_conduit_helpers.py b/tests/platform_helper/test_conduit_helpers.py deleted file mode 100644 index ef0ec4ed5..000000000 --- a/tests/platform_helper/test_conduit_helpers.py +++ /dev/null @@ -1,1038 +0,0 @@ -import json -from unittest.mock import Mock -from unittest.mock import patch - -import boto3 -import pytest -from botocore.exceptions import ClientError -from cfn_tools import load_yaml -from moto import mock_aws - -from tests.platform_helper.conftest import NoSuchEntityException -from tests.platform_helper.conftest import add_addon_config_parameter -from tests.platform_helper.conftest import expected_connection_secret_name -from tests.platform_helper.conftest import mock_parameter_name -from tests.platform_helper.conftest import mock_task_name - - -@pytest.mark.parametrize( - "test_string", - [ - ("app-rds-postgres", "APP_RDS_POSTGRES"), - ("APP-POSTGRES", "APP_POSTGRES"), - ("APP-OpenSearch", "APP_OPENSEARCH"), - ], -) -def test_normalise_secret_name(test_string): - """Test that given an addon name, normalise_secret_name produces the - expected result.""" - from dbt_platform_helper.commands.conduit import normalise_secret_name - - assert normalise_secret_name(test_string[0]) == test_string[1] - - -@mock_aws -def test_get_cluster_arn(mocked_cluster, mock_application): - """Test that, given app and environment strings, get_cluster_arn returns the - arn of a cluster tagged with these strings.""" - from dbt_platform_helper.commands.conduit import get_cluster_arn - - assert ( - get_cluster_arn(mock_application, "development") == mocked_cluster["cluster"]["clusterArn"] - ) - - -@mock_aws -def test_get_cluster_arn_when_there_is_no_cluster(mock_application): - """Test that, given app and environment strings, get_cluster_arn raises an - exception when no cluster tagged with these strings exists.""" - from dbt_platform_helper.commands.conduit import NoClusterConduitError - from dbt_platform_helper.commands.conduit import get_cluster_arn - - with pytest.raises(NoClusterConduitError): - get_cluster_arn(mock_application, "staging") - - -@mock_aws -def test_get_connection_secret_arn_from_secrets_manager(mock_application): - """Test that, given app, environment and secret name strings, - get_connection_secret_arn returns an ARN from secrets manager.""" - from dbt_platform_helper.commands.conduit import get_connection_secret_arn - - secret_name = f"/copilot/{mock_application.name}/development/secrets/POSTGRES" - mock_secretsmanager = boto3.client("secretsmanager") - mock_secretsmanager.create_secret( - Name=secret_name, - SecretString="something-secret", - ) - - arn = get_connection_secret_arn(mock_application, "development", secret_name) - - assert arn.startswith( - "arn:aws:secretsmanager:eu-west-2:123456789012:secret:" - "/copilot/test-application/development/secrets/POSTGRES-" - ) - - -@mock_aws -def test_get_connection_secret_arn_from_parameter_store(mock_application): - """Test that, given app, environment and secret name strings, - get_connection_secret_arn returns an ARN from parameter store.""" - from dbt_platform_helper.commands.conduit import get_connection_secret_arn - - secret_name = f"/copilot/{mock_application.name}/development/secrets/POSTGRES" - mock_ssm = boto3.client("ssm") - mock_ssm.put_parameter( - Name=secret_name, - Value="something-secret", - Type="SecureString", - ) - - arn = get_connection_secret_arn(mock_application, "development", secret_name) - - assert ( - arn - == "arn:aws:ssm:eu-west-2:123456789012:parameter/copilot/test-application/development/secrets/POSTGRES" - ) - - -@mock_aws -def test_get_connection_secret_arn_when_secret_does_not_exist(mock_application): - """Test that, given app, environment and secret name strings, - get_connection_secret_arn raises an exception when no matching secret exists - in secrets manager or parameter store.""" - from dbt_platform_helper.commands.conduit import SecretNotFoundConduitError - from dbt_platform_helper.commands.conduit import get_connection_secret_arn - - with pytest.raises(SecretNotFoundConduitError): - get_connection_secret_arn(mock_application, "development", "POSTGRES") - - -@mock_aws -@patch("subprocess.call") -@patch( - "dbt_platform_helper.commands.conduit.get_postgres_connection_data_updated_with_master_secret", - return_value="connection string", -) -def test_create_postgres_admin_task(mock_update_parameter, mock_subprocess_call, mock_application): - from dbt_platform_helper.commands.conduit import create_postgres_admin_task - from dbt_platform_helper.commands.conduit import normalise_secret_name - - env = "development" - addon_name = "dummy-postgres" - master_secret_name = f"/copilot/{mock_application.name}/{env}/secrets/{normalise_secret_name(addon_name)}_RDS_MASTER_ARN" - boto3.client("ssm").put_parameter( - Name=master_secret_name, Value="master-secret-arn", Type="String" - ) - - create_postgres_admin_task( - mock_application, env, "POSTGRES_SECRET_NAME", "test-task", "postgres", addon_name - ) - - mock_update_parameter.assert_called_once_with( - mock_application.environments[env].session, - "POSTGRES_SECRET_NAME_READ_ONLY_USER", - "master-secret-arn", - ) - mock_subprocess_call.assert_called_once_with( - f"copilot task run --app {mock_application.name} --env {env} " - f"--task-group-name test-task " - "--image public.ecr.aws/uktrade/tunnel:postgres " - "--env-vars CONNECTION_SECRET='\"connection string\"' " - "--platform-os linux " - "--platform-arch arm64", - shell=True, - ) - - -@pytest.mark.parametrize( - "access", - [ - "read", - "write", - "admin", - ], -) -@pytest.mark.parametrize( - "addon_type, addon_name", - [ - ("redis", "custom-name-redis"), - ("opensearch", "custom-name-opensearch"), - ], -) -@patch("subprocess.call") -@patch("dbt_platform_helper.commands.conduit.get_connection_secret_arn", return_value="test-arn") -def test_create_redis_or_opensearch_addon_client_task( - get_connection_secret_arn, - subprocess_call, - access, - addon_type, - addon_name, -): - """Test that, given app, env and permissions, create_addon_client_task calls - get_connection_secret_arn with the default secret name and subsequently - subprocess.call with the correct secret ARN and execution role.""" - from dbt_platform_helper.commands.conduit import create_addon_client_task - - env = "development" - mock_application = Mock() - mock_application.name = "test-application" - mock_application.environments = {"development": Mock()} - task_name = mock_task_name(addon_name) - - create_addon_client_task(mock_application, env, addon_type, addon_name, task_name, access) - - secret_name = expected_connection_secret_name(mock_application, addon_type, addon_name, access) - get_connection_secret_arn.assert_called_once_with(mock_application, env, secret_name) - subprocess_call.assert_called_once_with( - f"copilot task run --app test-application --env {env} " - f"--task-group-name {task_name} " - f"--execution-role {addon_name}-{mock_application.name}-{env}-conduitEcsTask " - f"--image public.ecr.aws/uktrade/tunnel:{addon_type} " - "--secrets CONNECTION_SECRET=test-arn " - "--platform-os linux " - "--platform-arch arm64", - shell=True, - ) - - -@patch("dbt_platform_helper.commands.conduit.create_postgres_admin_task") -def test_create_postgres_addon_client_task( - mock_create_postgres_admin_task, - mock_application, -): - from dbt_platform_helper.commands.conduit import create_addon_client_task - - addon_name = "custom-name-postgres" - task_name = mock_task_name(addon_name) - - create_addon_client_task( - mock_application, "development", "postgres", addon_name, task_name, "admin" - ) - secret_name = expected_connection_secret_name(mock_application, "postgres", addon_name, "admin") - - mock_create_postgres_admin_task.assert_called_once_with( - mock_application, "development", secret_name, task_name, "postgres", addon_name - ) - - -@patch("subprocess.call") -@patch("dbt_platform_helper.commands.conduit.get_connection_secret_arn", return_value="test-arn") -def test_create_addon_client_task_does_not_add_execution_role_if_role_not_found( - get_connection_secret_arn, - subprocess_call, - mock_application, -): - """Test that, given app, env and permissions, create_addon_client_task calls - get_connection_secret_arn with the default secret name and subsequently - subprocess.call with the correct secret ARN but no execution role.""" - from dbt_platform_helper.commands.conduit import create_addon_client_task - - addon_name = "postgres" - addon_type = "custom-name-postgres" - access = "read" - env = "development" - mock_application.environments[env] = Mock() - mock_application.environments[env].session.client.return_value = Mock() - mock_application.environments[env].session.client.return_value.get_role.side_effect = ( - NoSuchEntityException() - ) - task_name = mock_task_name(addon_name) - - create_addon_client_task(mock_application, env, addon_type, addon_name, task_name, access) - - secret_name = expected_connection_secret_name(mock_application, addon_type, addon_name, access) - get_connection_secret_arn.assert_called_once_with(mock_application, env, secret_name) - subprocess_call.assert_called_once_with( - f"copilot task run --app test-application --env {env} " - f"--task-group-name {task_name} " - f"--image public.ecr.aws/uktrade/tunnel:{addon_type} " - "--secrets CONNECTION_SECRET=test-arn " - "--platform-os linux " - "--platform-arch arm64", - shell=True, - ) - - -@patch("subprocess.call") -@patch("dbt_platform_helper.commands.conduit.get_connection_secret_arn", return_value="test-arn") -@patch("click.secho") -def test_create_addon_client_task_abort_with_message_on_other_exceptions( - mock_secho, - get_connection_secret_arn, - subprocess_call, - mock_application, -): - """Test that if an unexpected ClientError is throw when trying to get the - execution role, create_addon_client_task aborts with a message.""" - from dbt_platform_helper.commands.conduit import create_addon_client_task - - addon_name = "postgres" - addon_type = "custom-name-postgres" - access = "read" - env = "development" - mock_application.environments[env] = Mock() - mock_application.environments[env].session.client.return_value = Mock() - mock_application.environments[env].session.client.return_value.get_role.side_effect = ( - ClientError( - operation_name="something_else", - error_response={"Error": {"Message": "Something went wrong"}}, - ) - ) - task_name = mock_task_name(addon_name) - - with pytest.raises(SystemExit) as exc_info: - create_addon_client_task(mock_application, env, addon_type, addon_name, task_name, access) - - assert exc_info.value.code == 1 - assert mock_secho.call_count > 0 - assert ( - mock_secho.call_args[0][0] - == f"Error: cannot obtain Role {addon_name}-{mock_application.name}-{env}-conduitEcsTask: Something went wrong" - ) - - -@patch("subprocess.call") -@patch("dbt_platform_helper.commands.conduit.get_connection_secret_arn") -def test_create_addon_client_task_when_no_secret_found(get_connection_secret_arn, subprocess_call): - """Test that, given app, environment and secret name strings, - create_addon_client_task raises a NoConnectionSecretError and does not call - subprocess.call.""" - from dbt_platform_helper.commands.conduit import SecretNotFoundConduitError - from dbt_platform_helper.commands.conduit import create_addon_client_task - - mock_application = Mock() - mock_application.name = "test-application" - mock_application.environments = {"development": Mock()} - - get_connection_secret_arn.side_effect = SecretNotFoundConduitError - - with pytest.raises(SecretNotFoundConduitError): - create_addon_client_task( - mock_application, - "development", - "postgres", - "named-postgres", - mock_task_name("named-postgres"), - "read", - ) - - subprocess_call.assert_not_called() - - -@pytest.mark.parametrize( - "addon_type", - ["postgres", "redis", "opensearch"], -) -def test_addon_client_is_running( - mock_cluster_client_task, mocked_cluster, addon_type, mock_application -): - """Test that, given cluster ARN, addon type and with a running agent, - addon_client_is_running returns True.""" - from dbt_platform_helper.commands.conduit import addon_client_is_running - - mocked_cluster_for_client = mock_cluster_client_task(addon_type) - mocked_cluster_arn = mocked_cluster["cluster"]["clusterArn"] - - with patch( - "dbt_platform_helper.utils.application.boto3.client", return_value=mocked_cluster_for_client - ): - assert addon_client_is_running( - mock_application, "development", mocked_cluster_arn, mock_task_name(addon_type) - ) - - -@pytest.mark.parametrize( - "addon_type", - ["postgres", "redis", "opensearch"], -) -def test_addon_client_is_running_when_no_client_task_running( - mock_cluster_client_task, mocked_cluster, addon_type, mock_application -): - """Test that, given cluster ARN, addon type and without a running client - task, addon_client_is_running returns False.""" - from dbt_platform_helper.commands.conduit import addon_client_is_running - - mocked_cluster_for_client = mock_cluster_client_task(addon_type, task_running=False) - mocked_cluster_arn = mocked_cluster["cluster"]["clusterArn"] - - with patch( - "dbt_platform_helper.utils.application.boto3.client", return_value=mocked_cluster_for_client - ): - assert ( - addon_client_is_running( - mock_application, "development", mocked_cluster_arn, mock_task_name(addon_type) - ) - is False - ) - - -@pytest.mark.parametrize( - "addon_type", - ["postgres", "redis", "opensearch"], -) -def test_addon_client_is_running_when_no_client_agent_running( - mock_cluster_client_task, mocked_cluster, addon_type, mock_application -): - """Test that, given cluster ARN, addon type and without a running agent, - addon_client_is_running returns False.""" - from dbt_platform_helper.commands.conduit import addon_client_is_running - - mocked_cluster_for_client = mock_cluster_client_task(addon_type, "ACTIVATING") - mocked_cluster_arn = mocked_cluster["cluster"]["clusterArn"] - - with patch( - "dbt_platform_helper.utils.application.boto3.client", return_value=mocked_cluster_for_client - ): - assert ( - addon_client_is_running( - mock_application, "development", mocked_cluster_arn, mock_task_name(addon_type) - ) - is False - ) - - -@mock_aws -@pytest.mark.parametrize( - "addon_name", - ["postgres", "redis", "opensearch", "rds-postgres"], -) -@patch("time.sleep", return_value=None) -def test_add_stack_delete_policy_to_task_role(sleep, mock_stack, addon_name, mock_application): - """Test that, given app, env and addon name - add_stack_delete_policy_to_task_role adds a policy to the IAM role in a - CloudFormation stack.""" - from dbt_platform_helper.commands.conduit import ( - add_stack_delete_policy_to_task_role, - ) - - task_name = mock_task_name(addon_name) - stack_name = f"task-{task_name}" - - mock_stack(addon_name) - mock_policy = { - "Version": "2012-10-17", - "Statement": [ - { - "Action": ["cloudformation:DeleteStack"], - "Effect": "Allow", - "Resource": f"arn:aws:cloudformation:*:*:stack/{stack_name}/*", - }, - ], - } - - add_stack_delete_policy_to_task_role(mock_application, "development", task_name) - - stack_resources = boto3.client("cloudformation").list_stack_resources(StackName=stack_name)[ - "StackResourceSummaries" - ] - - policy_name = None - policy_document = None - for resource in stack_resources: - if resource["LogicalResourceId"] == "DefaultTaskRole": - policy = boto3.client("iam").get_role_policy( - RoleName=resource["PhysicalResourceId"], PolicyName="DeleteCloudFormationStack" - ) - policy_name = policy["PolicyName"] - policy_document = policy["PolicyDocument"] - - assert policy_name == "DeleteCloudFormationStack" - assert policy_document == mock_policy - - -@mock_aws -@pytest.mark.parametrize( - "addon_type, addon_name, parameter_suffix", - [ - ("postgres", "custom-name-postgres", "_READ_ONLY"), - ("postgres", "custom-name-rds-postgres", "_READ_ONLY"), - ("redis", "custom-name-redis", ""), - ("opensearch", "custom-name-opensearch", ""), - ], -) -def test_update_conduit_stack_resources( - mock_stack, addon_type, addon_name, parameter_suffix, mock_application -): - """Test that, given app, env and addon name update_conduit_stack_resources - updates the conduit CloudFormation stack to add DeletionPolicy:Retain and - subscription filter to the LogGroup.""" - from dbt_platform_helper.commands.conduit import update_conduit_stack_resources - - boto3.client("iam").create_role( - RoleName="CWLtoSubscriptionFilterRole", - AssumeRolePolicyDocument="123", - ) - - boto3.client("ssm").put_parameter( - Name="/copilot/tools/central_log_groups", - Value=json.dumps( - { - "prod": "arn:aws:logs:eu-west-2:prod_account_id:destination:test_log_destination", - "dev": "arn:aws:logs:eu-west-2:dev_account_id:destination:test_log_destination", - } - ), - Type="String", - ) - - mock_stack(addon_name) - task_name = mock_task_name(addon_name) - parameter_name = mock_parameter_name(mock_application, addon_type, addon_name) - - update_conduit_stack_resources( - mock_application, "development", addon_type, addon_name, task_name, parameter_name, "read" - ) - - template = boto3.client("cloudformation").get_template(StackName=f"task-{task_name}") - template_yml = load_yaml(template["TemplateBody"]) - assert template_yml["Resources"]["LogGroup"]["DeletionPolicy"] == "Retain" - assert template_yml["Resources"]["TaskNameParameter"]["Properties"]["Name"] == parameter_name - assert ( - template_yml["Resources"]["SubscriptionFilter"]["Properties"]["LogGroupName"] - == f"/copilot/{task_name}" - ) - assert ( - "dev_account_id" - in template_yml["Resources"]["SubscriptionFilter"]["Properties"]["DestinationArn"] - ) - assert ( - template_yml["Resources"]["SubscriptionFilter"]["Properties"]["FilterName"] - == f"/copilot/conduit/{mock_application.name}/development/{addon_type}/{addon_name}/{task_name.rsplit('-', 1)[1]}/read" - ) - - -@mock_aws -def test_get_or_create_task_name(mock_application): - """Test that get_or_create_task_name retrieves the task name from the - parameter store when it has been stored.""" - from dbt_platform_helper.commands.conduit import get_or_create_task_name - - addon_name = "app-postgres" - parameter_name = mock_parameter_name(mock_application, "postgres", addon_name) - mock_ssm = boto3.client("ssm") - mock_ssm.put_parameter( - Name=parameter_name, - Type="String", - Value=mock_task_name(addon_name), - ) - - task_name = get_or_create_task_name(mock_application, "development", addon_name, parameter_name) - - assert task_name == mock_task_name(addon_name) - - -@mock_aws -def test_get_or_create_task_name_when_name_does_not_exist(mock_application): - """Test that get_or_create_task_name creates the task name and appends it - with a 12 digit lowercase alphanumeric string when it does not exist in the - parameter store.""" - from dbt_platform_helper.commands.conduit import get_or_create_task_name - - addon_name = "app-postgres" - parameter_name = mock_parameter_name(mock_application, "postgres", addon_name) - task_name = get_or_create_task_name(mock_application, "development", addon_name, parameter_name) - random_id = task_name.rsplit("-", 1)[1] - - assert task_name.rsplit("-", 1)[0] == mock_task_name("app-postgres").rsplit("-", 1)[0] - assert random_id.isalnum() and random_id.islower() and len(random_id) == 12 - - -@mock_aws -@pytest.mark.parametrize( - "access", - [ - "read", - "write", - "admin", - ], -) -@pytest.mark.parametrize( - "addon_type, addon_name", - [ - ("postgres", "custom-name-postgres"), - ("postgres", "custom-name-rds-postgres"), - ("redis", "custom-name-redis"), - ("opensearch", "custom-name-opensearch"), - ], -) -def test_get_parameter_name(access, addon_type, addon_name, mock_application): - """Test that get_parameter_name builds the correct parameter name given the - addon_name, addon_type and permission.""" - from dbt_platform_helper.commands.conduit import get_parameter_name - - parameter_name = get_parameter_name( - mock_application, "development", addon_type, addon_name, access - ) - assert parameter_name == mock_parameter_name(mock_application, addon_type, addon_name, access) - - -@pytest.mark.parametrize( - "addon_type", - ["postgres", "redis", "opensearch"], -) -@patch("subprocess.call") -@patch("dbt_platform_helper.commands.conduit.addon_client_is_running", return_value=True) -def test_connect_to_addon_client_task( - addon_client_is_running, subprocess_call, addon_type, mock_application -): - """ - Test that, given app, env, ECS cluster ARN and addon type, - connect_to_addon_client_task calls addon_client_is_running with cluster ARN - and addon type. - - It then subsequently calls subprocess.call with the correct app, env and - addon type. - """ - from dbt_platform_helper.commands.conduit import addon_client_is_running - from dbt_platform_helper.commands.conduit import connect_to_addon_client_task - - task_name = mock_task_name(addon_type) - connect_to_addon_client_task(mock_application, "development", "test-arn", task_name) - - addon_client_is_running.assert_called_once_with( - mock_application, "development", "test-arn", task_name - ) - subprocess_call.assert_called_once_with( - f"copilot task exec --app test-application --env development " - f"--name {task_name} " - f"--command bash", - shell=True, - ) - - -@pytest.mark.parametrize( - "addon_type", - ["postgres", "redis", "opensearch"], -) -@patch("time.sleep", return_value=None) -@patch("subprocess.call") -@patch("dbt_platform_helper.commands.conduit.addon_client_is_running", return_value=False) -def test_connect_to_addon_client_task_when_timeout_reached( - addon_client_is_running, subprocess_call, sleep, addon_type, mock_application -): - """Test that, given app, env, ECS cluster ARN and addon type, when the - client agent fails to start, connect_to_addon_client_task calls - addon_client_is_running with cluster ARN and addon type 15 times, but does - not call subprocess.call.""" - from dbt_platform_helper.commands.conduit import CreateTaskTimeoutConduitError - from dbt_platform_helper.commands.conduit import connect_to_addon_client_task - - task_name = mock_task_name(addon_type) - with pytest.raises(CreateTaskTimeoutConduitError): - connect_to_addon_client_task(mock_application, "development", "test-arn", task_name) - - addon_client_is_running.assert_called_with( - mock_application, "development", "test-arn", task_name - ) - assert addon_client_is_running.call_count == 15 - subprocess_call.assert_not_called() - - -@pytest.mark.parametrize( - "addon_type, addon_name", - [("postgres", "app-postgres"), ("redis", "app-redis"), ("opensearch", "app-opensearch")], -) -@patch("dbt_platform_helper.commands.conduit.get_cluster_arn", return_value="test-arn") -@patch("dbt_platform_helper.commands.conduit.get_parameter_name") -@patch("dbt_platform_helper.commands.conduit.get_or_create_task_name") -@patch("dbt_platform_helper.commands.conduit.addon_client_is_running", return_value=False) -@patch("dbt_platform_helper.commands.conduit.create_addon_client_task") -@patch("dbt_platform_helper.commands.conduit.connect_to_addon_client_task") -@patch("dbt_platform_helper.commands.conduit.add_stack_delete_policy_to_task_role") -@patch("dbt_platform_helper.commands.conduit.update_conduit_stack_resources") -def test_start_conduit( - update_conduit_stack_resources, - add_stack_delete_policy_to_task_role, - connect_to_addon_client_task, - create_addon_client_task, - addon_client_is_running, - get_or_create_task_name, - get_parameter_name, - get_cluster_arn, - addon_type, - addon_name, - mock_application, -): - """Test that given app, env and addon type strings, start_conduit calls - get_cluster_arn, addon_client_is_running, created_addon_client_task, - add_stack_delete_policy_to_task_role and connect_to_addon_client_task.""" - from dbt_platform_helper.commands.conduit import start_conduit - - task_name = mock_task_name(addon_name) - get_or_create_task_name.side_effect = [task_name] - parameter_name = mock_parameter_name(mock_application, addon_type, addon_name) - get_parameter_name.side_effect = [parameter_name] - - start_conduit(mock_application, "development", addon_type, addon_name) - - get_cluster_arn.assert_called_once_with(mock_application, "development") - get_parameter_name.assert_called_once_with( - mock_application, "development", addon_type, addon_name, "read" - ) - get_or_create_task_name.assert_called_once_with( - mock_application, "development", addon_name, parameter_name - ) - addon_client_is_running.assert_called_with( - mock_application, "development", "test-arn", task_name - ) - create_addon_client_task.assert_called_once_with( - mock_application, "development", addon_type, addon_name, task_name, "read" - ) - add_stack_delete_policy_to_task_role.assert_called_once_with( - mock_application, "development", task_name - ) - update_conduit_stack_resources.assert_called_once_with( - mock_application, "development", addon_type, addon_name, task_name, parameter_name, "read" - ) - connect_to_addon_client_task.assert_called_once_with( - mock_application, "development", "test-arn", task_name - ) - - -@pytest.mark.parametrize( - "addon_type", - ["postgres", "redis", "opensearch"], -) -@patch("dbt_platform_helper.commands.conduit.get_cluster_arn") -@patch("dbt_platform_helper.commands.conduit.get_parameter_name") -@patch("dbt_platform_helper.commands.conduit.get_or_create_task_name") -@patch("dbt_platform_helper.commands.conduit.addon_client_is_running", return_value=False) -@patch("dbt_platform_helper.commands.conduit.create_addon_client_task") -@patch("dbt_platform_helper.commands.conduit.connect_to_addon_client_task") -@patch("dbt_platform_helper.commands.conduit.add_stack_delete_policy_to_task_role") -@patch("dbt_platform_helper.commands.conduit.update_conduit_stack_resources") -def test_start_conduit_when_no_cluster_present( - update_conduit_stack_resources, - add_stack_delete_policy_to_task_role, - connect_to_addon_client_task, - create_addon_client_task, - addon_client_is_running, - get_or_create_task_name, - get_parameter_name, - get_cluster_arn, - addon_type, - mock_application, -): - """ - Test that given app, env, addon type and no available ecs cluster, - start_conduit calls get_cluster_arn and the NoClusterConduitError is raised. - - Neither created_addon_client_task, addon_client_is_running, - connect_to_addon_client_task or add_stack_delete_policy_to_task_role are - called. - """ - from dbt_platform_helper.commands.conduit import NoClusterConduitError - from dbt_platform_helper.commands.conduit import start_conduit - - get_cluster_arn.side_effect = NoClusterConduitError - - with pytest.raises(NoClusterConduitError): - start_conduit(mock_application, "development", addon_type, "custom-addon-name") - - get_cluster_arn.assert_called_once_with(mock_application, "development") - get_parameter_name.assert_not_called() - get_or_create_task_name.assert_not_called() - addon_client_is_running.assert_not_called() - create_addon_client_task.assert_not_called() - add_stack_delete_policy_to_task_role.assert_not_called() - update_conduit_stack_resources.assert_not_called() - connect_to_addon_client_task.assert_not_called() - - -@pytest.mark.parametrize( - "addon_type, addon_name", - [("postgres", "app-postgres"), ("redis", "app-redis"), ("opensearch", "app-opensearch")], -) -@patch("dbt_platform_helper.commands.conduit.get_cluster_arn", return_value="test-arn") -@patch("dbt_platform_helper.commands.conduit.get_parameter_name") -@patch("dbt_platform_helper.commands.conduit.get_or_create_task_name") -@patch("dbt_platform_helper.commands.conduit.addon_client_is_running", return_value=False) -@patch("dbt_platform_helper.commands.conduit.create_addon_client_task") -@patch("dbt_platform_helper.commands.conduit.connect_to_addon_client_task") -@patch("dbt_platform_helper.commands.conduit.add_stack_delete_policy_to_task_role") -@patch("dbt_platform_helper.commands.conduit.update_conduit_stack_resources") -def test_start_conduit_when_no_secret_exists( - update_conduit_stack_resources, - add_stack_delete_policy_to_task_role, - connect_to_addon_client_task, - create_addon_client_task, - addon_client_is_running, - get_or_create_task_name, - get_parameter_name, - get_cluster_arn, - addon_type, - addon_name, - mock_application, -): - """Test that given app, env, addon type and no available secret, - start_conduit calls get_cluster_arn, then addon_client_is_running and - create_addon_client_task and the NoConnectionSecretError is raised and - add_stack_delete_policy_to_task_role and connect_to_addon_client_task are - not called.""" - from dbt_platform_helper.commands.conduit import SecretNotFoundConduitError - from dbt_platform_helper.commands.conduit import start_conduit - - task_name = mock_task_name(addon_name) - get_or_create_task_name.side_effect = [task_name] - parameter_name = mock_parameter_name(mock_application, addon_type, addon_name) - get_parameter_name.side_effect = [parameter_name] - - create_addon_client_task.side_effect = SecretNotFoundConduitError - with pytest.raises(SecretNotFoundConduitError): - start_conduit(mock_application, "development", addon_type, addon_name) - - get_cluster_arn.assert_called_once_with(mock_application, "development") - get_parameter_name.assert_called_once_with( - mock_application, "development", addon_type, addon_name, "read" - ) - get_or_create_task_name.assert_called_once_with( - mock_application, "development", addon_name, parameter_name - ) - addon_client_is_running.assert_called_with( - mock_application, "development", "test-arn", task_name - ) - create_addon_client_task.assert_called_once_with( - mock_application, "development", addon_type, addon_name, task_name, "read" - ) - add_stack_delete_policy_to_task_role.assert_not_called() - update_conduit_stack_resources.assert_not_called() - connect_to_addon_client_task.assert_not_called() - - -@pytest.mark.parametrize( - "addon_type, addon_name", - [("postgres", "app-postgres"), ("redis", "app-redis"), ("opensearch", "app-opensearch")], -) -@patch("dbt_platform_helper.commands.conduit.get_cluster_arn", return_value="test-arn") -@patch("dbt_platform_helper.commands.conduit.get_parameter_name") -@patch("dbt_platform_helper.commands.conduit.get_or_create_task_name") -@patch("dbt_platform_helper.commands.conduit.addon_client_is_running", return_value=False) -@patch("dbt_platform_helper.commands.conduit.create_addon_client_task") -@patch("dbt_platform_helper.commands.conduit.connect_to_addon_client_task") -@patch("dbt_platform_helper.commands.conduit.add_stack_delete_policy_to_task_role") -@patch("dbt_platform_helper.commands.conduit.update_conduit_stack_resources") -def test_start_conduit_when_addon_client_task_fails_to_start( - update_conduit_stack_resources, - add_stack_delete_policy_to_task_role, - connect_to_addon_client_task, - create_addon_client_task, - addon_client_is_running, - get_or_create_task_name, - get_parameter_name, - get_cluster_arn, - addon_type, - addon_name, - mock_application, -): - """Test that given app, env, and addon type strings when the client task - fails to start, start_conduit calls get_cluster_arn, - addon_client_is_running, create_addon_client_task, - add_stack_delete_policy_to_task_role and connect_to_addon_client_task then - the NoConnectionSecretError is raised.""" - from dbt_platform_helper.commands.conduit import CreateTaskTimeoutConduitError - from dbt_platform_helper.commands.conduit import start_conduit - - task_name = mock_task_name(addon_name) - get_or_create_task_name.side_effect = [task_name] - parameter_name = mock_parameter_name(mock_application, addon_type, addon_name) - get_parameter_name.side_effect = [parameter_name] - - connect_to_addon_client_task.side_effect = CreateTaskTimeoutConduitError - with pytest.raises(CreateTaskTimeoutConduitError): - start_conduit(mock_application, "development", addon_type, addon_name) - - get_cluster_arn.assert_called_once_with(mock_application, "development") - get_parameter_name.assert_called_once_with( - mock_application, "development", addon_type, addon_name, "read" - ) - get_or_create_task_name.assert_called_once_with( - mock_application, "development", addon_name, parameter_name - ) - addon_client_is_running.assert_called_with( - mock_application, "development", "test-arn", task_name - ) - create_addon_client_task.assert_called_once_with( - mock_application, "development", addon_type, addon_name, task_name, "read" - ) - add_stack_delete_policy_to_task_role.assert_called_once_with( - mock_application, "development", task_name - ) - update_conduit_stack_resources.assert_called_once_with( - mock_application, "development", addon_type, addon_name, task_name, parameter_name, "read" - ) - connect_to_addon_client_task.assert_called_once_with( - mock_application, "development", "test-arn", task_name - ) - - -@pytest.mark.parametrize( - "addon_type, addon_name", - [("postgres", "app-postgres"), ("redis", "app-redis"), ("opensearch", "app-opensearch")], -) -@patch("dbt_platform_helper.commands.conduit.get_cluster_arn", return_value="test-arn") -@patch("dbt_platform_helper.commands.conduit.get_parameter_name") -@patch("dbt_platform_helper.commands.conduit.get_or_create_task_name") -@patch("dbt_platform_helper.commands.conduit.create_addon_client_task") -@patch("dbt_platform_helper.commands.conduit.addon_client_is_running", return_value=True) -@patch("dbt_platform_helper.commands.conduit.connect_to_addon_client_task") -@patch("dbt_platform_helper.commands.conduit.add_stack_delete_policy_to_task_role") -@patch("dbt_platform_helper.commands.conduit.update_conduit_stack_resources") -def test_start_conduit_when_addon_client_task_is_already_running( - update_conduit_stack_resources, - add_stack_delete_policy_to_task_role, - connect_to_addon_client_task, - addon_client_is_running, - create_addon_client_task, - get_or_create_task_name, - get_parameter_name, - get_cluster_arn, - addon_type, - addon_name, - mock_application, -): - """Test that given app, env, and addon type strings when the client task is - already running, start_conduit calls get_cluster_arn, - addon_client_is_running and connect_to_addon_client_task then the - create_addon_client_task and add_stack_delete_policy_to_task_role are not - called.""" - from dbt_platform_helper.commands.conduit import start_conduit - - task_name = mock_task_name(addon_name) - get_or_create_task_name.side_effect = [task_name] - parameter_name = mock_parameter_name(mock_application, addon_type, addon_name) - get_parameter_name.side_effect = [parameter_name] - - start_conduit(mock_application, "development", addon_type, addon_name) - - get_cluster_arn.assert_called_once_with(mock_application, "development") - get_parameter_name.assert_called_once_with( - mock_application, "development", addon_type, addon_name, "read" - ) - get_or_create_task_name.assert_called_once_with( - mock_application, "development", addon_name, parameter_name - ) - addon_client_is_running.assert_called_once_with( - mock_application, "development", "test-arn", task_name - ) - create_addon_client_task.assert_not_called() - add_stack_delete_policy_to_task_role.assert_not_called() - update_conduit_stack_resources.assert_not_called() - connect_to_addon_client_task.assert_called_once_with( - mock_application, "development", "test-arn", task_name - ) - - -@pytest.mark.parametrize( - "access", - ["read", "write", "admin"], -) -@pytest.mark.parametrize( - "addon_type, addon_name", - [("postgres", "app-postgres"), ("redis", "app-redis"), ("opensearch", "app-opensearch")], -) -@patch("dbt_platform_helper.commands.conduit.get_cluster_arn", return_value="test-arn") -@patch("dbt_platform_helper.commands.conduit.get_parameter_name") -@patch("dbt_platform_helper.commands.conduit.get_or_create_task_name") -@patch("dbt_platform_helper.commands.conduit.addon_client_is_running", return_value=False) -@patch("dbt_platform_helper.commands.conduit.create_addon_client_task") -@patch("dbt_platform_helper.commands.conduit.connect_to_addon_client_task") -@patch("dbt_platform_helper.commands.conduit.add_stack_delete_policy_to_task_role") -@patch("dbt_platform_helper.commands.conduit.update_conduit_stack_resources") -def test_start_conduit_with_access_permissions( - update_conduit_stack_resources, - add_stack_delete_policy_to_task_role, - connect_to_addon_client_task, - create_addon_client_task, - addon_client_is_running, - get_or_create_task_name, - get_parameter_name, - get_cluster_arn, - addon_type, - addon_name, - access, - mock_application, -): - """Test that given app, env, addon type and access, start_conduit calls - get_cluster_arn, addon_client_is_running, created_addon_client_task, - add_stack_delete_policy_to_task_role and connect_to_addon_client_task.""" - from dbt_platform_helper.commands.conduit import start_conduit - - task_name = mock_task_name(addon_name) - get_or_create_task_name.side_effect = [task_name] - parameter_name = mock_parameter_name(mock_application, addon_type, addon_name) - get_parameter_name.side_effect = [parameter_name] - - start_conduit(mock_application, "development", addon_type, addon_name, access) - - get_cluster_arn.assert_called_once_with(mock_application, "development") - get_parameter_name.assert_called_once_with( - mock_application, "development", addon_type, addon_name, access - ) - get_or_create_task_name.assert_called_once_with( - mock_application, "development", addon_name, parameter_name - ) - addon_client_is_running.assert_called_with( - mock_application, "development", "test-arn", task_name - ) - create_addon_client_task.assert_called_once_with( - mock_application, "development", addon_type, addon_name, task_name, access - ) - add_stack_delete_policy_to_task_role.assert_called_once_with( - mock_application, "development", task_name - ) - update_conduit_stack_resources.assert_called_once_with( - mock_application, "development", addon_type, addon_name, task_name, parameter_name, access - ) - connect_to_addon_client_task.assert_called_once_with( - mock_application, "development", "test-arn", task_name - ) - - -@mock_aws -@pytest.mark.parametrize( - "addon_name, expected_type", - [ - ("custom-name-postgres", "postgres"), - ("custom-name-redis", "redis"), - ("custom-name-opensearch", "opensearch"), - ], -) -def test_get_addon_type(addon_name, expected_type, mock_application): - """Test that get_addon_type returns the expected addon type.""" - from dbt_platform_helper.commands.conduit import get_addon_type - - add_addon_config_parameter() - addon_type = get_addon_type(mock_application, "development", addon_name) - - assert addon_type == expected_type - - -@mock_aws -def test_get_addon_type_when_addon_not_found(mock_application): - """Test that get_addon_type raises the expected error when the addon is not - found in the config file.""" - from dbt_platform_helper.commands.conduit import AddonNotFoundConduitError - from dbt_platform_helper.commands.conduit import get_addon_type - - add_addon_config_parameter({"different-name": {"type": "redis"}}) - - with pytest.raises(AddonNotFoundConduitError): - get_addon_type(mock_application, "development", "custom-name-postgres") - - -@mock_aws -def test_get_addon_type_when_parameter_not_found(mock_application): - """Test that get_addon_type raises the expected error when the addon config - parameter is not found.""" - from dbt_platform_helper.commands.conduit import ParameterNotFoundConduitError - from dbt_platform_helper.commands.conduit import get_addon_type - - mock_ssm = boto3.client("ssm") - mock_ssm.put_parameter( - Name=f"/copilot/applications/test-application/environments/development/invalid-parameter", - Type="String", - Value=json.dumps({"custom-name-postgres": {"type": "postgres"}}), - ) - - with pytest.raises(ParameterNotFoundConduitError): - get_addon_type(mock_application, "development", "custom-name-postgres") From 389681ad46dff99d7ee16d12d131321d51119e48 Mon Sep 17 00:00:00 2001 From: Chiara <95863059+chiaramapellimt@users.noreply.github.com> Date: Thu, 21 Nov 2024 15:47:58 +0000 Subject: [PATCH 04/38] refactor: DBTP-1520 final refactor (#653) --- dbt_platform_helper/commands/conduit.py | 21 ++- dbt_platform_helper/constants.py | 14 +- dbt_platform_helper/domain/conduit.py | 9 +- dbt_platform_helper/exceptions.py | 29 ++++ dbt_platform_helper/providers/aws.py | 43 ------ dbt_platform_helper/providers/copilot.py | 145 ++---------------- dbt_platform_helper/providers/ecs.py | 52 +++++++ dbt_platform_helper/providers/secrets.py | 85 ++++++++++ tests/platform_helper/domain/test_conduit.py | 12 +- .../providers/test_cloudformation.py | 1 - .../platform_helper/providers/test_copilot.py | 26 ++-- .../{test_aws.py => test_secrets.py} | 61 ++++---- tests/platform_helper/test_command_conduit.py | 16 +- 13 files changed, 270 insertions(+), 244 deletions(-) delete mode 100644 dbt_platform_helper/providers/aws.py create mode 100644 dbt_platform_helper/providers/ecs.py create mode 100644 dbt_platform_helper/providers/secrets.py rename tests/platform_helper/providers/{test_aws.py => test_secrets.py} (94%) diff --git a/dbt_platform_helper/commands/conduit.py b/dbt_platform_helper/commands/conduit.py index fb933e608..c7d3eaa4d 100644 --- a/dbt_platform_helper/commands/conduit.py +++ b/dbt_platform_helper/commands/conduit.py @@ -1,13 +1,14 @@ import click +from dbt_platform_helper.constants import CONDUIT_ADDON_TYPES from dbt_platform_helper.domain.conduit import Conduit -from dbt_platform_helper.providers.aws import SecretNotFoundError -from dbt_platform_helper.providers.copilot import CONDUIT_ADDON_TYPES -from dbt_platform_helper.providers.copilot import AddonNotFoundError -from dbt_platform_helper.providers.copilot import CreateTaskTimeoutError -from dbt_platform_helper.providers.copilot import InvalidAddonTypeError -from dbt_platform_helper.providers.copilot import NoClusterError -from dbt_platform_helper.providers.copilot import ParameterNotFoundError +from dbt_platform_helper.exceptions import AddonNotFoundError +from dbt_platform_helper.exceptions import AddonTypeMissingFromConfigError +from dbt_platform_helper.exceptions import CreateTaskTimeoutError +from dbt_platform_helper.exceptions import InvalidAddonTypeError +from dbt_platform_helper.exceptions import NoClusterError +from dbt_platform_helper.exceptions import ParameterNotFoundError +from dbt_platform_helper.providers.secrets import SecretNotFoundError from dbt_platform_helper.utils.application import load_application from dbt_platform_helper.utils.click import ClickDocOptCommand from dbt_platform_helper.utils.versioning import ( @@ -68,3 +69,9 @@ def conduit(addon_name: str, app: str, env: str, access: str): fg="red", ) exit(1) + except AddonTypeMissingFromConfigError: + click.secho( + f"""The configuration for the addon {addon_name}, is missconfigured and missing the addon type.""", + fg="red", + ) + exit(1) diff --git a/dbt_platform_helper/constants.py b/dbt_platform_helper/constants.py index d7d1ed649..f6b1b13b5 100644 --- a/dbt_platform_helper/constants.py +++ b/dbt_platform_helper/constants.py @@ -1,6 +1,16 @@ PLATFORM_CONFIG_FILE = "platform-config.yml" PLATFORM_HELPER_VERSION_FILE = ".platform-helper-version" -CODEBASE_PIPELINES_KEY = "codebase_pipelines" -ENVIRONMENTS_KEY = "environments" DEFAULT_TERRAFORM_PLATFORM_MODULES_VERSION = "5" PLATFORM_HELPER_CACHE_FILE = ".platform-helper-config-cache.yml" + +# Keys +CODEBASE_PIPELINES_KEY = "codebase_pipelines" +ENVIRONMENTS_KEY = "environments" + +# Conduit +CONDUIT_ADDON_TYPES = [ + "opensearch", + "postgres", + "redis", +] +CONDUIT_DOCKER_IMAGE_LOCATION = "public.ecr.aws/uktrade/tunnel" diff --git a/dbt_platform_helper/domain/conduit.py b/dbt_platform_helper/domain/conduit.py index a7954210d..896467488 100644 --- a/dbt_platform_helper/domain/conduit.py +++ b/dbt_platform_helper/domain/conduit.py @@ -14,10 +14,11 @@ from dbt_platform_helper.providers.copilot import connect_to_addon_client_task from dbt_platform_helper.providers.copilot import create_addon_client_task from dbt_platform_helper.providers.copilot import create_postgres_admin_task -from dbt_platform_helper.providers.copilot import get_addon_type -from dbt_platform_helper.providers.copilot import get_cluster_arn -from dbt_platform_helper.providers.copilot import get_or_create_task_name -from dbt_platform_helper.providers.copilot import get_parameter_name +from dbt_platform_helper.providers.ecs import addon_client_is_running +from dbt_platform_helper.providers.ecs import get_cluster_arn +from dbt_platform_helper.providers.ecs import get_or_create_task_name +from dbt_platform_helper.providers.secrets import get_addon_type +from dbt_platform_helper.providers.secrets import get_parameter_name from dbt_platform_helper.utils.application import Application diff --git a/dbt_platform_helper/exceptions.py b/dbt_platform_helper/exceptions.py index b77561067..11b005685 100644 --- a/dbt_platform_helper/exceptions.py +++ b/dbt_platform_helper/exceptions.py @@ -20,6 +20,31 @@ def __init__(self, app_version: str, check_version: str): self.check_version = check_version +class NoClusterError(AWSException): + pass + + +class CreateTaskTimeoutError(AWSException): + pass + + +class ParameterNotFoundError(AWSException): + pass + + +class AddonNotFoundError(AWSException): + pass + + +class InvalidAddonTypeError(AWSException): + def __init__(self, addon_type): + self.addon_type = addon_type + + +class AddonTypeMissingFromConfigError(AWSException): + pass + + class CopilotCodebaseNotFoundError(Exception): pass @@ -46,3 +71,7 @@ class ApplicationNotFoundError(Exception): class ApplicationEnvironmentNotFoundError(Exception): pass + + +class SecretNotFoundError(AWSException): + pass diff --git a/dbt_platform_helper/providers/aws.py b/dbt_platform_helper/providers/aws.py deleted file mode 100644 index 67f051b61..000000000 --- a/dbt_platform_helper/providers/aws.py +++ /dev/null @@ -1,43 +0,0 @@ -import json -import urllib - - -class AWSError(Exception): - pass - - -class SecretNotFoundError(AWSError): - pass - - -# TODO Attempt to extract speicifc conduit business logic and leave the AWS specific functionality in provider layer -def get_postgres_connection_data_updated_with_master_secret( - ssm_client, secrets_manager_client, parameter_name, secret_arn -): - response = ssm_client.get_parameter(Name=parameter_name, WithDecryption=True) - parameter_value = response["Parameter"]["Value"] - - parameter_data = json.loads(parameter_value) - - secret_response = secrets_manager_client.get_secret_value(SecretId=secret_arn) - secret_value = json.loads(secret_response["SecretString"]) - - parameter_data["username"] = urllib.parse.quote(secret_value["username"]) - parameter_data["password"] = urllib.parse.quote(secret_value["password"]) - - return parameter_data - - -def get_connection_secret_arn(ssm_client, secrets_manager_client, secret_name: str) -> str: - - try: - return ssm_client.get_parameter(Name=secret_name, WithDecryption=False)["Parameter"]["ARN"] - except ssm_client.exceptions.ParameterNotFound: - pass - - try: - return secrets_manager_client.describe_secret(SecretId=secret_name)["ARN"] - except secrets_manager_client.exceptions.ResourceNotFoundException: - pass - - raise SecretNotFoundError(secret_name) diff --git a/dbt_platform_helper/providers/copilot.py b/dbt_platform_helper/providers/copilot.py index 7fa0bf430..d98b9e448 100644 --- a/dbt_platform_helper/providers/copilot.py +++ b/dbt_platform_helper/providers/copilot.py @@ -1,142 +1,19 @@ import json -import random -import string import time import click from botocore.exceptions import ClientError -from dbt_platform_helper.providers.aws import AWSError -from dbt_platform_helper.providers.aws import get_connection_secret_arn -from dbt_platform_helper.providers.aws import ( +from dbt_platform_helper.constants import CONDUIT_DOCKER_IMAGE_LOCATION +from dbt_platform_helper.exceptions import CreateTaskTimeoutError +from dbt_platform_helper.providers.ecs import addon_client_is_running +from dbt_platform_helper.providers.secrets import get_connection_secret_arn +from dbt_platform_helper.providers.secrets import ( get_postgres_connection_data_updated_with_master_secret, ) from dbt_platform_helper.utils.application import Application from dbt_platform_helper.utils.messages import abort_with_error -# TODO move to constants -CONDUIT_DOCKER_IMAGE_LOCATION = "public.ecr.aws/uktrade/tunnel" -CONDUIT_ADDON_TYPES = [ - "opensearch", - "postgres", - "redis", -] - - -class NoClusterError(AWSError): - pass - - -class CreateTaskTimeoutError(AWSError): - pass - - -class ParameterNotFoundError(AWSError): - pass - - -class AddonNotFoundError(AWSError): - pass - - -class AddonTypeMissingFromConfigError(AWSError): - pass - - -class InvalidAddonTypeError(AWSError): - def __init__(self, addon_type): - self.addon_type = addon_type - - -def get_addon_type(ssm_client, application_name: str, env: str, addon_name: str) -> str: - addon_type = None - try: - addon_config = json.loads( - ssm_client.get_parameter( - Name=f"/copilot/applications/{application_name}/environments/{env}/addons" - )["Parameter"]["Value"] - ) - except ssm_client.exceptions.ParameterNotFound: - raise ParameterNotFoundError - - if addon_name not in addon_config.keys(): - raise AddonNotFoundError - - for name, config in addon_config.items(): - if name == addon_name: - if not config.get("type"): - raise AddonTypeMissingFromConfigError() - addon_type = config["type"] - - if addon_type not in CONDUIT_ADDON_TYPES: - raise InvalidAddonTypeError(addon_type) - - if "postgres" in addon_type: - addon_type = "postgres" - - return addon_type - - -# TODO Refactor this to support passing a list of tags to check against, allowing for a more generic implementation -def get_cluster_arn(ecs_client, application_name: str, env: str) -> str: - - for cluster_arn in ecs_client.list_clusters()["clusterArns"]: - tags_response = ecs_client.list_tags_for_resource(resourceArn=cluster_arn) - tags = tags_response["tags"] - - app_key_found = False - env_key_found = False - cluster_key_found = False - - for tag in tags: - if tag["key"] == "copilot-application" and tag["value"] == application_name: - app_key_found = True - if tag["key"] == "copilot-environment" and tag["value"] == env: - env_key_found = True - if tag["key"] == "aws:cloudformation:logical-id" and tag["value"] == "Cluster": - cluster_key_found = True - - if app_key_found and env_key_found and cluster_key_found: - return cluster_arn - - raise NoClusterError - - -def get_parameter_name( - application_name: str, env: str, addon_type: str, addon_name: str, access: str -) -> str: - if addon_type == "postgres": - return f"/copilot/{application_name}/{env}/conduits/{normalise_secret_name(addon_name)}_{access.upper()}" - elif addon_type == "redis" or addon_type == "opensearch": - return f"/copilot/{application_name}/{env}/conduits/{normalise_secret_name(addon_name)}_ENDPOINT" - else: - return f"/copilot/{application_name}/{env}/conduits/{normalise_secret_name(addon_name)}" - - -# TODO ECS??? -def get_or_create_task_name( - ssm_client, application_name: str, env: str, addon_name: str, parameter_name: str -) -> str: - try: - return ssm_client.get_parameter(Name=parameter_name)["Parameter"]["Value"] - except ssm_client.exceptions.ParameterNotFound: - random_id = "".join(random.choices(string.ascii_lowercase + string.digits, k=12)) - return f"conduit-{application_name}-{env}-{addon_name}-{random_id}" - - -# TODO ECS method -def addon_client_is_running(ecs_client, cluster_arn: str, task_name: str): - tasks = ecs_client.list_tasks( - cluster=cluster_arn, - desiredStatus="RUNNING", - family=f"copilot-{task_name}", - ) - - if not tasks["taskArns"]: - return False - - return True - def create_addon_client_task( iam_client, @@ -150,7 +27,7 @@ def create_addon_client_task( task_name: str, access: str, ): - secret_name = f"/copilot/{application.name}/{env}/secrets/{normalise_secret_name(addon_name)}" + secret_name = f"/copilot/{application.name}/{env}/secrets/{_normalise_secret_name(addon_name)}" if addon_type == "postgres": if access == "read": @@ -200,10 +77,6 @@ def create_addon_client_task( ) -def normalise_secret_name(addon_name: str) -> str: - return addon_name.replace("-", "_").upper() - - def create_postgres_admin_task( ssm_client, secrets_manager_client, @@ -217,7 +90,7 @@ def create_postgres_admin_task( ): read_only_secret_name = secret_name + "_READ_ONLY_USER" master_secret_name = ( - f"/copilot/{app.name}/{env}/secrets/{normalise_secret_name(addon_name)}_RDS_MASTER_ARN" + f"/copilot/{app.name}/{env}/secrets/{_normalise_secret_name(addon_name)}_RDS_MASTER_ARN" ) master_secret_arn = ssm_client.get_parameter(Name=master_secret_name, WithDecryption=True)[ "Parameter" @@ -266,3 +139,7 @@ def connect_to_addon_client_task( if not running: raise CreateTaskTimeoutError + + +def _normalise_secret_name(addon_name: str) -> str: + return addon_name.replace("-", "_").upper() diff --git a/dbt_platform_helper/providers/ecs.py b/dbt_platform_helper/providers/ecs.py new file mode 100644 index 000000000..6bd534edc --- /dev/null +++ b/dbt_platform_helper/providers/ecs.py @@ -0,0 +1,52 @@ +import random +import string + +from dbt_platform_helper.exceptions import NoClusterError + + +# TODO Refactor this to support passing a list of tags to check against, allowing for a more generic implementation +def get_cluster_arn(ecs_client, application_name: str, env: str) -> str: + for cluster_arn in ecs_client.list_clusters()["clusterArns"]: + tags_response = ecs_client.list_tags_for_resource(resourceArn=cluster_arn) + tags = tags_response["tags"] + + app_key_found = False + env_key_found = False + cluster_key_found = False + + for tag in tags: + if tag["key"] == "copilot-application" and tag["value"] == application_name: + app_key_found = True + if tag["key"] == "copilot-environment" and tag["value"] == env: + env_key_found = True + if tag["key"] == "aws:cloudformation:logical-id" and tag["value"] == "Cluster": + cluster_key_found = True + + if app_key_found and env_key_found and cluster_key_found: + return cluster_arn + + raise NoClusterError + + +def get_or_create_task_name( + ssm_client, application_name: str, env: str, addon_name: str, parameter_name: str +) -> str: + try: + return ssm_client.get_parameter(Name=parameter_name)["Parameter"]["Value"] + except ssm_client.exceptions.ParameterNotFound: + random_id = "".join(random.choices(string.ascii_lowercase + string.digits, k=12)) + return f"conduit-{application_name}-{env}-{addon_name}-{random_id}" + + +# TODO Rename and extract ECS family as parameter / make more general +def addon_client_is_running(ecs_client, cluster_arn: str, task_name: str): + tasks = ecs_client.list_tasks( + cluster=cluster_arn, + desiredStatus="RUNNING", + family=f"copilot-{task_name}", + ) + + if not tasks["taskArns"]: + return False + + return True diff --git a/dbt_platform_helper/providers/secrets.py b/dbt_platform_helper/providers/secrets.py new file mode 100644 index 000000000..feeaf0ae1 --- /dev/null +++ b/dbt_platform_helper/providers/secrets.py @@ -0,0 +1,85 @@ +import json +import urllib + +from dbt_platform_helper.constants import CONDUIT_ADDON_TYPES +from dbt_platform_helper.exceptions import AddonNotFoundError +from dbt_platform_helper.exceptions import AddonTypeMissingFromConfigError +from dbt_platform_helper.exceptions import InvalidAddonTypeError +from dbt_platform_helper.exceptions import ParameterNotFoundError +from dbt_platform_helper.exceptions import SecretNotFoundError + + +def get_postgres_connection_data_updated_with_master_secret( + ssm_client, secrets_manager_client, parameter_name, secret_arn +): + response = ssm_client.get_parameter(Name=parameter_name, WithDecryption=True) + parameter_value = response["Parameter"]["Value"] + + parameter_data = json.loads(parameter_value) + + secret_response = secrets_manager_client.get_secret_value(SecretId=secret_arn) + secret_value = json.loads(secret_response["SecretString"]) + + parameter_data["username"] = urllib.parse.quote(secret_value["username"]) + parameter_data["password"] = urllib.parse.quote(secret_value["password"]) + + return parameter_data + + +def get_connection_secret_arn(ssm_client, secrets_manager_client, secret_name: str) -> str: + + try: + return ssm_client.get_parameter(Name=secret_name, WithDecryption=False)["Parameter"]["ARN"] + except ssm_client.exceptions.ParameterNotFound: + pass + + try: + return secrets_manager_client.describe_secret(SecretId=secret_name)["ARN"] + except secrets_manager_client.exceptions.ResourceNotFoundException: + pass + + raise SecretNotFoundError(secret_name) + + +def get_addon_type(ssm_client, application_name: str, env: str, addon_name: str) -> str: + addon_type = None + try: + addon_config = json.loads( + ssm_client.get_parameter( + Name=f"/copilot/applications/{application_name}/environments/{env}/addons" + )["Parameter"]["Value"] + ) + except ssm_client.exceptions.ParameterNotFound: + raise ParameterNotFoundError + + if addon_name not in addon_config.keys(): + raise AddonNotFoundError + + for name, config in addon_config.items(): + if name == addon_name: + if not config.get("type"): + raise AddonTypeMissingFromConfigError() + addon_type = config["type"] + + if not addon_type or addon_type not in CONDUIT_ADDON_TYPES: + raise InvalidAddonTypeError(addon_type) + + if "postgres" in addon_type: + addon_type = "postgres" + + return addon_type + + +def get_parameter_name( + application_name: str, env: str, addon_type: str, addon_name: str, access: str +) -> str: + if addon_type == "postgres": + return f"/copilot/{application_name}/{env}/conduits/{_normalise_secret_name(addon_name)}_{access.upper()}" + elif addon_type == "redis" or addon_type == "opensearch": + return f"/copilot/{application_name}/{env}/conduits/{_normalise_secret_name(addon_name)}_ENDPOINT" + else: + return f"/copilot/{application_name}/{env}/conduits/{_normalise_secret_name(addon_name)}" + + +def _normalise_secret_name(addon_name: str) -> str: + return addon_name.replace("-", "_").upper() diff --git a/tests/platform_helper/domain/test_conduit.py b/tests/platform_helper/domain/test_conduit.py index fc60a5170..4db64a251 100644 --- a/tests/platform_helper/domain/test_conduit.py +++ b/tests/platform_helper/domain/test_conduit.py @@ -4,12 +4,12 @@ import pytest from dbt_platform_helper.domain.conduit import Conduit -from dbt_platform_helper.providers.aws import SecretNotFoundError -from dbt_platform_helper.providers.copilot import AddonNotFoundError -from dbt_platform_helper.providers.copilot import CreateTaskTimeoutError -from dbt_platform_helper.providers.copilot import InvalidAddonTypeError -from dbt_platform_helper.providers.copilot import NoClusterError -from dbt_platform_helper.providers.copilot import ParameterNotFoundError +from dbt_platform_helper.exceptions import AddonNotFoundError +from dbt_platform_helper.exceptions import CreateTaskTimeoutError +from dbt_platform_helper.exceptions import InvalidAddonTypeError +from dbt_platform_helper.exceptions import NoClusterError +from dbt_platform_helper.exceptions import ParameterNotFoundError +from dbt_platform_helper.exceptions import SecretNotFoundError from dbt_platform_helper.utils.application import Application from dbt_platform_helper.utils.application import Environment diff --git a/tests/platform_helper/providers/test_cloudformation.py b/tests/platform_helper/providers/test_cloudformation.py index 432b97e45..6d44869f9 100644 --- a/tests/platform_helper/providers/test_cloudformation.py +++ b/tests/platform_helper/providers/test_cloudformation.py @@ -75,7 +75,6 @@ def test_update_conduit_stack_resources( template = boto3.client("cloudformation").get_template(StackName=f"task-{task_name}") template_yml = load_yaml(template["TemplateBody"]) - assert template_yml["Resources"]["LogGroup"]["DeletionPolicy"] == "Retain" assert template_yml["Resources"]["TaskNameParameter"]["Properties"]["Name"] == parameter_name assert ( diff --git a/tests/platform_helper/providers/test_copilot.py b/tests/platform_helper/providers/test_copilot.py index ae3426c47..ad80046ee 100644 --- a/tests/platform_helper/providers/test_copilot.py +++ b/tests/platform_helper/providers/test_copilot.py @@ -7,22 +7,24 @@ from botocore.exceptions import ClientError from moto import mock_aws -from dbt_platform_helper.providers.aws import SecretNotFoundError -from dbt_platform_helper.providers.copilot import AddonNotFoundError -from dbt_platform_helper.providers.copilot import AddonTypeMissingFromConfigError +from dbt_platform_helper.exceptions import AddonNotFoundError +from dbt_platform_helper.exceptions import AddonTypeMissingFromConfigError +from dbt_platform_helper.exceptions import InvalidAddonTypeError +from dbt_platform_helper.exceptions import NoClusterError +from dbt_platform_helper.exceptions import ParameterNotFoundError from dbt_platform_helper.providers.copilot import CreateTaskTimeoutError -from dbt_platform_helper.providers.copilot import InvalidAddonTypeError -from dbt_platform_helper.providers.copilot import NoClusterError -from dbt_platform_helper.providers.copilot import ParameterNotFoundError -from dbt_platform_helper.providers.copilot import addon_client_is_running from dbt_platform_helper.providers.copilot import connect_to_addon_client_task from dbt_platform_helper.providers.copilot import create_addon_client_task from dbt_platform_helper.providers.copilot import create_postgres_admin_task -from dbt_platform_helper.providers.copilot import get_addon_type -from dbt_platform_helper.providers.copilot import get_cluster_arn -from dbt_platform_helper.providers.copilot import get_or_create_task_name -from dbt_platform_helper.providers.copilot import get_parameter_name -from dbt_platform_helper.providers.copilot import normalise_secret_name +from dbt_platform_helper.providers.ecs import addon_client_is_running +from dbt_platform_helper.providers.ecs import get_cluster_arn +from dbt_platform_helper.providers.ecs import get_or_create_task_name +from dbt_platform_helper.providers.secrets import SecretNotFoundError +from dbt_platform_helper.providers.secrets import ( + _normalise_secret_name as normalise_secret_name, +) +from dbt_platform_helper.providers.secrets import get_addon_type +from dbt_platform_helper.providers.secrets import get_parameter_name from tests.platform_helper.conftest import NoSuchEntityException from tests.platform_helper.conftest import add_addon_config_parameter from tests.platform_helper.conftest import expected_connection_secret_name diff --git a/tests/platform_helper/providers/test_aws.py b/tests/platform_helper/providers/test_secrets.py similarity index 94% rename from tests/platform_helper/providers/test_aws.py rename to tests/platform_helper/providers/test_secrets.py index 5bc6f5c9c..cea50b10b 100644 --- a/tests/platform_helper/providers/test_aws.py +++ b/tests/platform_helper/providers/test_secrets.py @@ -2,42 +2,15 @@ import pytest from moto import mock_aws -from dbt_platform_helper.providers.aws import SecretNotFoundError -from dbt_platform_helper.providers.aws import get_connection_secret_arn -from dbt_platform_helper.providers.aws import ( +from dbt_platform_helper.providers.copilot import ( get_postgres_connection_data_updated_with_master_secret, ) +from dbt_platform_helper.providers.secrets import SecretNotFoundError +from dbt_platform_helper.providers.secrets import get_connection_secret_arn env = "development" -@mock_aws -def test_update_postgres_parameter_with_master_secret(): - session = boto3.session.Session() - parameter_name = "test-parameter" - ssm_client = session.client("ssm") - secretsmanager_client = session.client("secretsmanager") - ssm_client.put_parameter( - Name=parameter_name, - Value='{"username": "read-only-user", "password": ">G12345", "host": "test.com", "port": 5432}', - Type="String", - ) - secret_arn = session.client("secretsmanager").create_secret( - Name="master-secret", SecretString='{"username": "postgres", "password": ">G6789"}' - )["ARN"] - - updated_parameter_value = get_postgres_connection_data_updated_with_master_secret( - ssm_client, secretsmanager_client, parameter_name, secret_arn - ) - - assert updated_parameter_value == { - "username": "postgres", - "password": "%3EG6789", - "host": "test.com", - "port": 5432, - } - - @mock_aws def test_get_connection_secret_arn_from_secrets_manager(mock_application): """Test that, given app, environment and secret name strings, @@ -95,3 +68,31 @@ def test_get_connection_secret_arn_when_secret_does_not_exist(mock_application): with pytest.raises(SecretNotFoundError): get_connection_secret_arn(ssm_client, secrets_client, "POSTGRES") + + +@mock_aws +def test_update_postgres_parameter_with_master_secret(): + session = boto3.session.Session() + + parameter_name = "test-parameter" + ssm_client = session.client("ssm") + secretsmanager_client = session.client("secretsmanager") + ssm_client.put_parameter( + Name=parameter_name, + Value='{"username": "read-only-user", "password": ">G12345", "host": "test.com", "port": 5432}', + Type="String", + ) + secret_arn = session.client("secretsmanager").create_secret( + Name="master-secret", SecretString='{"username": "postgres", "password": ">G6789"}' + )["ARN"] + + updated_parameter_value = get_postgres_connection_data_updated_with_master_secret( + ssm_client, secretsmanager_client, parameter_name, secret_arn + ) + + assert updated_parameter_value == { + "username": "postgres", + "password": "%3EG6789", + "host": "test.com", + "port": 5432, + } diff --git a/tests/platform_helper/test_command_conduit.py b/tests/platform_helper/test_command_conduit.py index 23a1eed85..c5c9990b5 100644 --- a/tests/platform_helper/test_command_conduit.py +++ b/tests/platform_helper/test_command_conduit.py @@ -5,12 +5,13 @@ from click.testing import CliRunner from dbt_platform_helper.commands.conduit import conduit -from dbt_platform_helper.providers.aws import SecretNotFoundError -from dbt_platform_helper.providers.copilot import AddonNotFoundError +from dbt_platform_helper.exceptions import AddonNotFoundError +from dbt_platform_helper.exceptions import AddonTypeMissingFromConfigError +from dbt_platform_helper.exceptions import InvalidAddonTypeError +from dbt_platform_helper.exceptions import NoClusterError +from dbt_platform_helper.exceptions import ParameterNotFoundError from dbt_platform_helper.providers.copilot import CreateTaskTimeoutError -from dbt_platform_helper.providers.copilot import InvalidAddonTypeError -from dbt_platform_helper.providers.copilot import NoClusterError -from dbt_platform_helper.providers.copilot import ParameterNotFoundError +from dbt_platform_helper.providers.secrets import SecretNotFoundError @pytest.mark.parametrize( @@ -80,6 +81,11 @@ def test_start_conduit(mock_application, mock_conduit_object, addon_name, valida {"addon_type": "fake-postgres"}, """Addon type "fake-postgres" is not supported, we support: opensearch, postgres, redis.""", ), + ( + AddonTypeMissingFromConfigError, + {}, + """The configuration for the addon important-db, is missconfigured and missing the addon type.""", + ), ], ) @patch("dbt_platform_helper.commands.conduit.Conduit") From 031e14e291d9270d626f21494f4349a40901d1dd Mon Sep 17 00:00:00 2001 From: Will Gibson <8738245+WillGibson@users.noreply.github.com> Date: Thu, 21 Nov 2024 16:09:24 +0000 Subject: [PATCH 05/38] chore: Add some context to why we cannot get rid of the application commands yet (#651) --- dbt_platform_helper/commands/application.py | 1 + 1 file changed, 1 insertion(+) diff --git a/dbt_platform_helper/commands/application.py b/dbt_platform_helper/commands/application.py index 48a14de84..91ca98223 100644 --- a/dbt_platform_helper/commands/application.py +++ b/dbt_platform_helper/commands/application.py @@ -1,6 +1,7 @@ #!/usr/bin/env python # application commands are deprecated, do not spend time refactoring them +# Service teams are trained to use them as a replacement for cf app(s) import time from datetime import datetime From 8906c8082844bd08d13601c3eeaad3d38dfd0cbe Mon Sep 17 00:00:00 2001 From: Will Gibson <8738245+WillGibson@users.noreply.github.com> Date: Thu, 21 Nov 2024 16:09:59 +0000 Subject: [PATCH 06/38] chore: Add the auto generated header to the prometheus addon file (#650) --- dbt_platform_helper/templates/addons/svc/prometheus-policy.yml | 2 ++ 1 file changed, 2 insertions(+) diff --git a/dbt_platform_helper/templates/addons/svc/prometheus-policy.yml b/dbt_platform_helper/templates/addons/svc/prometheus-policy.yml index df1505249..945f6892e 100644 --- a/dbt_platform_helper/templates/addons/svc/prometheus-policy.yml +++ b/dbt_platform_helper/templates/addons/svc/prometheus-policy.yml @@ -1,3 +1,5 @@ +# {% extra_header %} +# {% version_info %} Parameters: App: Type: String From 22eafa0c8388b3132663d953bf97c85887c94999 Mon Sep 17 00:00:00 2001 From: A Gleeson Date: Mon, 25 Nov 2024 13:44:21 +0000 Subject: [PATCH 07/38] fix: DBTP-1577 Fix conduit (ecs) exec race condition (#656) Co-authored-by: Connor Hindle Co-authored-by: Will Gibson <8738245+WillGibson@users.noreply.github.com> --- dbt_platform_helper/domain/conduit.py | 50 ++--- dbt_platform_helper/exceptions.py | 4 + dbt_platform_helper/providers/copilot.py | 35 ++-- dbt_platform_helper/providers/ecs.py | 35 +++- tests/platform_helper/domain/test_conduit.py | 87 +++++---- .../platform_helper/providers/test_copilot.py | 180 ++---------------- tests/platform_helper/providers/test_ecs.py | 160 ++++++++++++++++ 7 files changed, 301 insertions(+), 250 deletions(-) create mode 100644 tests/platform_helper/providers/test_ecs.py diff --git a/dbt_platform_helper/domain/conduit.py b/dbt_platform_helper/domain/conduit.py index 896467488..349df2617 100644 --- a/dbt_platform_helper/domain/conduit.py +++ b/dbt_platform_helper/domain/conduit.py @@ -3,6 +3,7 @@ import click +from dbt_platform_helper.exceptions import ECSAgentNotRunning from dbt_platform_helper.providers.cloudformation import ( add_stack_delete_policy_to_task_role, ) @@ -10,16 +11,17 @@ from dbt_platform_helper.providers.cloudformation import ( wait_for_cloudformation_to_reach_status, ) -from dbt_platform_helper.providers.copilot import addon_client_is_running from dbt_platform_helper.providers.copilot import connect_to_addon_client_task from dbt_platform_helper.providers.copilot import create_addon_client_task from dbt_platform_helper.providers.copilot import create_postgres_admin_task -from dbt_platform_helper.providers.ecs import addon_client_is_running +from dbt_platform_helper.providers.ecs import ecs_exec_is_available from dbt_platform_helper.providers.ecs import get_cluster_arn +from dbt_platform_helper.providers.ecs import get_ecs_task_arns from dbt_platform_helper.providers.ecs import get_or_create_task_name from dbt_platform_helper.providers.secrets import get_addon_type from dbt_platform_helper.providers.secrets import get_parameter_name from dbt_platform_helper.utils.application import Application +from dbt_platform_helper.utils.messages import abort_with_error class Conduit: @@ -28,53 +30,37 @@ def __init__( application: Application, echo_fn: Callable[[str], str] = click.secho, subprocess_fn: subprocess = subprocess, - addon_client_is_running_fn=addon_client_is_running, + get_ecs_task_arns_fn=get_ecs_task_arns, connect_to_addon_client_task_fn=connect_to_addon_client_task, create_addon_client_task_fn=create_addon_client_task, create_postgres_admin_task_fn=create_postgres_admin_task, get_addon_type_fn=get_addon_type, + ecs_exec_is_available_fn=ecs_exec_is_available, get_cluster_arn_fn=get_cluster_arn, get_parameter_name_fn=get_parameter_name, get_or_create_task_name_fn=get_or_create_task_name, add_stack_delete_policy_to_task_role_fn=add_stack_delete_policy_to_task_role, update_conduit_stack_resources_fn=update_conduit_stack_resources, wait_for_cloudformation_to_reach_status_fn=wait_for_cloudformation_to_reach_status, + abort_fn=abort_with_error, ): self.application = application self.subprocess_fn = subprocess_fn self.echo_fn = echo_fn - self.addon_client_is_running_fn = addon_client_is_running_fn + self.get_ecs_task_arns_fn = get_ecs_task_arns_fn self.connect_to_addon_client_task_fn = connect_to_addon_client_task_fn self.create_addon_client_task_fn = create_addon_client_task_fn self.create_postgres_admin_task = create_postgres_admin_task_fn self.get_addon_type_fn = get_addon_type_fn + self.ecs_exec_is_available_fn = ecs_exec_is_available_fn self.get_cluster_arn_fn = get_cluster_arn_fn self.get_parameter_name_fn = get_parameter_name_fn self.get_or_create_task_name_fn = get_or_create_task_name_fn self.add_stack_delete_policy_to_task_role_fn = add_stack_delete_policy_to_task_role_fn self.update_conduit_stack_resources_fn = update_conduit_stack_resources_fn self.wait_for_cloudformation_to_reach_status_fn = wait_for_cloudformation_to_reach_status_fn - """ - Initialise a conduit domain which can be used to spin up a conduit - instance to connect to a service. - - Args: - application(Application): an object with the data of the deployed application - subprocess_fn: inject the subprocess function to call and execute shell commands - echo_fn: a function to echo messages too - addon_client_is_running_fn: inject the function which will check if a conduit instance to the addon is running - connect_to_addon_client_task_fn: inject the function used to connect to the conduit instance, - create_addon_client_task_fn: inject the function used to create the conduit task to connect too - create_postgres_admin_task_fn: inject the function used to create the conduit task with admin access to postgres - get_addon_type_fn=get_addon_type: inject the function used to get the addon type from addon name - get_cluster_arn_fn: inject the function used to get the cluster arn from the application name and environment - get_parameter_name_fn: inject the function used to get the parameter name from the application and addon - get_or_create_task_name_fn: inject the function used to get an existing conduit task or generate a new task - add_stack_delete_policy_to_task_role_fn: inject the function used to create the delete task permission in cloudformation - update_conduit_stack_resources_fn: inject the function used to add the conduit instance into the cloudformation stack - wait_for_cloudformation_to_reach_status_fn: inject waiter function for cloudformation - """ + self.abort_fn = abort_fn def start(self, env: str, addon_name: str, access: str = "read"): clients = self._initialise_clients(env) @@ -82,7 +68,9 @@ def start(self, env: str, addon_name: str, access: str = "read"): env, addon_name, access ) - if not self.addon_client_is_running_fn(clients["ecs"], cluster_arn, task_name): + self.echo_fn(f"Checking if a conduit task is already running for {addon_type}") + task_arn = self.get_ecs_task_arns_fn(clients["ecs"], cluster_arn, task_name) + if not task_arn: self.echo_fn("Creating conduit task") self.create_addon_client_task_fn( clients["iam"], @@ -111,6 +99,18 @@ def start(self, env: str, addon_name: str, access: str = "read"): access, ) + task_arn = self.get_ecs_task_arns_fn(clients["ecs"], cluster_arn, task_name) + + else: + self.echo_fn("Conduit task already running") + + self.echo_fn(f"Checking if exec is available for conduit task...") + + try: + self.ecs_exec_is_available_fn(clients["ecs"], cluster_arn, task_arn) + except ECSAgentNotRunning: + self.abort_fn('ECS exec agent never reached "RUNNING" status') + self.echo_fn("Connecting to conduit task") self.connect_to_addon_client_task_fn( clients["ecs"], self.subprocess_fn, self.application.name, env, cluster_arn, task_name diff --git a/dbt_platform_helper/exceptions.py b/dbt_platform_helper/exceptions.py index 11b005685..2917d1059 100644 --- a/dbt_platform_helper/exceptions.py +++ b/dbt_platform_helper/exceptions.py @@ -75,3 +75,7 @@ class ApplicationEnvironmentNotFoundError(Exception): class SecretNotFoundError(AWSException): pass + + +class ECSAgentNotRunning(AWSException): + pass diff --git a/dbt_platform_helper/providers/copilot.py b/dbt_platform_helper/providers/copilot.py index d98b9e448..c0ceb73b5 100644 --- a/dbt_platform_helper/providers/copilot.py +++ b/dbt_platform_helper/providers/copilot.py @@ -1,12 +1,11 @@ import json import time -import click from botocore.exceptions import ClientError from dbt_platform_helper.constants import CONDUIT_DOCKER_IMAGE_LOCATION from dbt_platform_helper.exceptions import CreateTaskTimeoutError -from dbt_platform_helper.providers.ecs import addon_client_is_running +from dbt_platform_helper.providers.ecs import get_ecs_task_arns from dbt_platform_helper.providers.secrets import get_connection_secret_arn from dbt_platform_helper.providers.secrets import ( get_postgres_connection_data_updated_with_master_secret, @@ -113,27 +112,27 @@ def create_postgres_admin_task( def connect_to_addon_client_task( - ecs_client, subprocess, application_name, env, cluster_arn, task_name + ecs_client, + subprocess, + application_name, + env, + cluster_arn, + task_name, + addon_client_is_running_fn=get_ecs_task_arns, ): running = False tries = 0 while tries < 15 and not running: tries += 1 - if addon_client_is_running(ecs_client, cluster_arn, task_name): - # TODO user ecs.describe_task to check if exec agent is running before call subprocess - # https://boto3.amazonaws.com/v1/documentation/api/latest/reference/services/ecs/client/describe_tasks.html - try: - subprocess.call( - "copilot task exec " - f"--app {application_name} --env {env} " - f"--name {task_name} " - f"--command bash", - shell=True, - ) - running = True - except ecs_client.exceptions.InvalidParameterException: - # Unable to connect, execute command agent probably isn’t running yet - click.echo("Unable to connect, execute command agent probably isn’t running yet") + if addon_client_is_running_fn(ecs_client, cluster_arn, task_name): + subprocess.call( + "copilot task exec " + f"--app {application_name} --env {env} " + f"--name {task_name} " + f"--command bash", + shell=True, + ) + running = True time.sleep(1) diff --git a/dbt_platform_helper/providers/ecs.py b/dbt_platform_helper/providers/ecs.py index 6bd534edc..2160dfc12 100644 --- a/dbt_platform_helper/providers/ecs.py +++ b/dbt_platform_helper/providers/ecs.py @@ -1,6 +1,9 @@ import random import string +import time +from typing import List +from dbt_platform_helper.exceptions import ECSAgentNotRunning from dbt_platform_helper.exceptions import NoClusterError @@ -38,8 +41,8 @@ def get_or_create_task_name( return f"conduit-{application_name}-{env}-{addon_name}-{random_id}" -# TODO Rename and extract ECS family as parameter / make more general -def addon_client_is_running(ecs_client, cluster_arn: str, task_name: str): +def get_ecs_task_arns(ecs_client, cluster_arn: str, task_name: str): + tasks = ecs_client.list_tasks( cluster=cluster_arn, desiredStatus="RUNNING", @@ -47,6 +50,30 @@ def addon_client_is_running(ecs_client, cluster_arn: str, task_name: str): ) if not tasks["taskArns"]: - return False + return [] + + return tasks["taskArns"] + + +def ecs_exec_is_available(ecs_client, cluster_arn: str, task_arns: List[str]): + + current_attemps = 0 + execute_command_agent_status = "" + + while execute_command_agent_status != "RUNNING" and current_attemps < 25: + + current_attemps += 1 + + task_details = ecs_client.describe_tasks(cluster=cluster_arn, tasks=task_arns) + + managed_agents = task_details["tasks"][0]["containers"][0]["managedAgents"] + execute_command_agent_status = [ + agent["lastStatus"] + for agent in managed_agents + if agent["name"] == "ExecuteCommandAgent" + ][0] + + time.sleep(1) - return True + if execute_command_agent_status != "RUNNING": + raise ECSAgentNotRunning diff --git a/tests/platform_helper/domain/test_conduit.py b/tests/platform_helper/domain/test_conduit.py index 4db64a251..385566bd7 100644 --- a/tests/platform_helper/domain/test_conduit.py +++ b/tests/platform_helper/domain/test_conduit.py @@ -1,3 +1,4 @@ +from unittest.mock import MagicMock from unittest.mock import Mock from unittest.mock import call @@ -29,14 +30,16 @@ def __init__(self, app_name="test-application", addon_type="postgres", *args, ** sessions = {"000000000": session} dummy_application = Application(app_name) dummy_application.environments = {env: Environment(env, "000000000", sessions)} - self.application = dummy_application - - self.addon_client_is_running_fn = kwargs.get( - "addon_client_is_running_fn", Mock(return_value=False) + self.add_stack_delete_policy_to_task_role_fn = kwargs.get( + "add_stack_delete_policy_to_task_role_fn", Mock() ) + self.get_ecs_task_arns_fn = kwargs.get("get_ecs_task_arns_fn", Mock(return_value=[])) + self.application = dummy_application + self.ecs_exec_is_available_fn = kwargs.get("ecs_exec_is_available_fn", Mock()) self.connect_to_addon_client_task_fn = kwargs.get("connect_to_addon_client_task_fn", Mock()) self.create_addon_client_task_fn = kwargs.get("create_addon_client_task_fn", Mock()) self.create_postgres_admin_task_fn = kwargs.get("create_postgres_admin_task_fn", Mock()) + self.echo_fn = kwargs.get("echo_fn", Mock()) self.get_addon_type_fn = kwargs.get("get_addon_type_fn", Mock(return_value=addon_type)) self.get_cluster_arn_fn = kwargs.get( "get_cluster_arn_fn", @@ -45,9 +48,10 @@ def __init__(self, app_name="test-application", addon_type="postgres", *args, ** self.get_or_create_task_name_fn = kwargs.get( "get_or_create_task_name_fn", Mock(return_value="task_name") ) - self.add_stack_delete_policy_to_task_role_fn = kwargs.get( - "add_stack_delete_policy_to_task_role_fn", Mock() + self.get_parameter_name_fn = kwargs.get( + "get_parameter_name", Mock(return_value="parameter_name") ) + self.subprocess = kwargs.get("subprocess", Mock(return_value="task_name")) self.update_conduit_stack_resources_fn = kwargs.get( "update_conduit_stack_resources_fn", Mock(return_value=f"task-{task_name}") ) @@ -55,28 +59,23 @@ def __init__(self, app_name="test-application", addon_type="postgres", *args, ** "wait_for_cloudformation_to_reach_status_fn", Mock() ) - self.subprocess = kwargs.get("subprocess", Mock(return_value="task_name")) - self.echo_fn = kwargs.get("echo_fn", Mock()) - self.get_parameter_name_fn = kwargs.get( - "get_parameter_name", Mock(return_value="parameter_name") - ) - def params(self): return { + "add_stack_delete_policy_to_task_role_fn": self.add_stack_delete_policy_to_task_role_fn, + "get_ecs_task_arns_fn": self.get_ecs_task_arns_fn, "application": self.application, - "subprocess_fn": self.subprocess, - "echo_fn": self.echo_fn, - "addon_client_is_running_fn": self.addon_client_is_running_fn, + "ecs_exec_is_available_fn": self.ecs_exec_is_available_fn, "connect_to_addon_client_task_fn": self.connect_to_addon_client_task_fn, "create_addon_client_task_fn": self.create_addon_client_task_fn, "create_postgres_admin_task_fn": self.create_postgres_admin_task_fn, + "echo_fn": self.echo_fn, "get_addon_type_fn": self.get_addon_type_fn, "get_cluster_arn_fn": self.get_cluster_arn_fn, "get_or_create_task_name_fn": self.get_or_create_task_name_fn, - "add_stack_delete_policy_to_task_role_fn": self.add_stack_delete_policy_to_task_role_fn, + "get_parameter_name_fn": self.get_parameter_name_fn, + "subprocess_fn": self.subprocess, "update_conduit_stack_resources_fn": self.update_conduit_stack_resources_fn, "wait_for_cloudformation_to_reach_status_fn": self.wait_for_cloudformation_to_reach_status_fn, - "get_parameter_name_fn": self.get_parameter_name_fn, } @@ -100,7 +99,9 @@ def test_conduit(app_name, addon_type, addon_name, access): conduit.start(env, addon_name, access) - conduit.addon_client_is_running_fn.assert_called_once_with(ecs_client, cluster_name, task_name) + conduit.get_ecs_task_arns_fn.assert_has_calls( + [call(ecs_client, cluster_name, task_name), call(ecs_client, cluster_name, task_name)] + ) conduit.connect_to_addon_client_task_fn.assert_called_once_with( ecs_client, conduit.subprocess_fn, app_name, env, cluster_name, task_name ) @@ -146,14 +147,19 @@ def test_conduit(app_name, addon_type, addon_name, access): call("Creating conduit task"), call("Updating conduit task"), call("Waiting for conduit task update to complete..."), + call("Checking if exec is available for conduit task..."), call("Connecting to conduit task"), ] ) -def test_conduit_client_already_running(): +def test_conduit_with_task_already_running(): conduit_mocks = ConduitMocks( - app_name, addon_type, addon_client_is_running_fn=Mock(return_value=True) + app_name, + addon_type, + get_ecs_task_arns_fn=MagicMock( + return_value=["arn:aws:ecs:eu-west-2:12345678:task/does-not-matter/1234qwer"] + ), ) conduit = Conduit(**conduit_mocks.params()) ecs_client = conduit.application.environments[env].session.client("ecs") @@ -161,7 +167,7 @@ def test_conduit_client_already_running(): conduit.start(env, addon_name, "read") - conduit.addon_client_is_running_fn.assert_called_once_with(ecs_client, cluster_name, task_name) + conduit.get_ecs_task_arns_fn.assert_called_once_with(ecs_client, cluster_name, task_name) conduit.connect_to_addon_client_task_fn.assert_called_once_with( ecs_client, conduit.subprocess_fn, app_name, env, cluster_name, task_name ) @@ -174,7 +180,14 @@ def test_conduit_client_already_running(): conduit.update_conduit_stack_resources_fn.assert_not_called() conduit.create_addon_client_task_fn.assert_not_called() - conduit_mocks.echo_fn.assert_called_once_with("Connecting to conduit task") + conduit_mocks.echo_fn.assert_has_calls( + [ + call("Checking if a conduit task is already running for postgres"), + call("Conduit task already running"), + call("Checking if exec is available for conduit task..."), + call("Connecting to conduit task"), + ] + ) def test_conduit_domain_when_no_cluster_exists(): @@ -185,7 +198,7 @@ def test_conduit_domain_when_no_cluster_exists(): ecs_client = conduit.application.environments[env].session.client("ecs") ssm_client = conduit.application.environments[env].session.client("ssm") - with pytest.raises(NoClusterError) as exc: + with pytest.raises(NoClusterError): conduit.start(env, addon_name) conduit.get_addon_type_fn.assert_called_once_with(ssm_client, app_name, env, addon_name) conduit.get_cluster_arn_fn.assert_called_once_with(ecs_client, app_name, env) @@ -195,7 +208,7 @@ def test_conduit_domain_when_no_connection_secret_exists(): conduit_mocks = ConduitMocks( app_name, addon_type, - addon_client_is_running_fn=Mock(return_value=False), + get_ecs_task_arns_fn=Mock(return_value=False), create_addon_client_task_fn=Mock(side_effect=SecretNotFoundError()), ) @@ -203,7 +216,7 @@ def test_conduit_domain_when_no_connection_secret_exists(): ecs_client = conduit.application.environments[env].session.client("ecs") ssm_client = conduit.application.environments[env].session.client("ssm") - with pytest.raises(SecretNotFoundError) as exc: + with pytest.raises(SecretNotFoundError): conduit.start(env, addon_name) conduit.get_addon_type_fn.assert_called_once_with(ssm_client, app_name, env, addon_name) conduit.get_cluster_arn_fn.assert_called_once_with(ecs_client, app_name, env) @@ -223,11 +236,9 @@ def test_conduit_domain_when_client_task_fails_to_start(): ecs_client = conduit.application.environments[env].session.client("ecs") ssm_client = conduit.application.environments[env].session.client("ssm") - with pytest.raises(CreateTaskTimeoutError) as exc: + with pytest.raises(CreateTaskTimeoutError): conduit.start(env, addon_name) - conduit.addon_client_is_running_fn.assert_called_once_with( - ecs_client, cluster_name, task_name - ) + conduit.get_ecs_task_arns_fn.assert_called_once_with(ecs_client, cluster_name, task_name) conduit.connect_to_addon_client_task_fn.assert_called_once_with( ecs_client, conduit.subprocess_fn, app_name, env, cluster_name, task_name ) @@ -253,11 +264,9 @@ def test_conduit_domain_when_addon_type_is_invalid(): conduit = Conduit(**conduit_mocks.params()) ecs_client = conduit.application.environments[env].session.client("ecs") - with pytest.raises(InvalidAddonTypeError) as exc: + with pytest.raises(InvalidAddonTypeError): conduit.start(env, addon_name) - conduit.addon_client_is_running_fn.assert_called_once_with( - ecs_client, cluster_name, task_name - ) + conduit.get_ecs_task_arns_fn.assert_called_once_with(ecs_client, cluster_name, task_name) def test_conduit_domain_when_addon_does_not_exist(): @@ -269,11 +278,9 @@ def test_conduit_domain_when_addon_does_not_exist(): conduit = Conduit(**conduit_mocks.params()) ecs_client = conduit.application.environments[env].session.client("ecs") - with pytest.raises(AddonNotFoundError) as exc: + with pytest.raises(AddonNotFoundError): conduit.start(env, addon_name) - conduit.addon_client_is_running_fn.assert_called_once_with( - ecs_client, cluster_name, task_name - ) + conduit.get_ecs_task_arns_fn.assert_called_once_with(ecs_client, cluster_name, task_name) def test_conduit_domain_when_no_addon_config_parameter_exists(): @@ -285,8 +292,6 @@ def test_conduit_domain_when_no_addon_config_parameter_exists(): conduit = Conduit(**conduit_mocks.params()) ecs_client = conduit.application.environments[env].session.client("ecs") - with pytest.raises(ParameterNotFoundError) as exc: + with pytest.raises(ParameterNotFoundError): conduit.start(env, addon_name) - conduit.addon_client_is_running_fn.assert_called_once_with( - ecs_client, cluster_name, task_name - ) + conduit.get_ecs_task_arns_fn.assert_called_once_with(ecs_client, cluster_name, task_name) diff --git a/tests/platform_helper/providers/test_copilot.py b/tests/platform_helper/providers/test_copilot.py index ad80046ee..c01aa304d 100644 --- a/tests/platform_helper/providers/test_copilot.py +++ b/tests/platform_helper/providers/test_copilot.py @@ -10,15 +10,11 @@ from dbt_platform_helper.exceptions import AddonNotFoundError from dbt_platform_helper.exceptions import AddonTypeMissingFromConfigError from dbt_platform_helper.exceptions import InvalidAddonTypeError -from dbt_platform_helper.exceptions import NoClusterError from dbt_platform_helper.exceptions import ParameterNotFoundError from dbt_platform_helper.providers.copilot import CreateTaskTimeoutError from dbt_platform_helper.providers.copilot import connect_to_addon_client_task from dbt_platform_helper.providers.copilot import create_addon_client_task from dbt_platform_helper.providers.copilot import create_postgres_admin_task -from dbt_platform_helper.providers.ecs import addon_client_is_running -from dbt_platform_helper.providers.ecs import get_cluster_arn -from dbt_platform_helper.providers.ecs import get_or_create_task_name from dbt_platform_helper.providers.secrets import SecretNotFoundError from dbt_platform_helper.providers.secrets import ( _normalise_secret_name as normalise_secret_name, @@ -49,32 +45,6 @@ def test_normalise_secret_name(test_string): assert normalise_secret_name(test_string[0]) == test_string[1] -@mock_aws -def test_get_cluster_arn(mocked_cluster, mock_application): - """Test that, given app and environment strings, get_cluster_arn returns the - arn of a cluster tagged with these strings.""" - - assert ( - get_cluster_arn( - mock_application.environments[env].session.client("ecs"), mock_application.name, env - ) - == mocked_cluster["cluster"]["clusterArn"] - ) - - -@mock_aws -def test_get_cluster_arn_when_there_is_no_cluster(mock_application): - """Test that, given app and environment strings, get_cluster_arn raises an - exception when no cluster tagged with these strings exists.""" - - env = "staging" - - with pytest.raises(NoClusterError): - get_cluster_arn( - mock_application.environments[env].session.client("ecs"), mock_application.name, env - ) - - @mock_aws @patch( # Nested function within provider function "dbt_platform_helper.providers.copilot.get_postgres_connection_data_updated_with_master_secret", @@ -416,132 +386,6 @@ def test_create_addon_client_task_when_no_secret_found(get_connection_secret_arn mock_subprocess.call.assert_not_called() -@pytest.mark.parametrize( - "addon_type", - ["postgres", "redis", "opensearch"], -) -def test_addon_client_is_running( - mock_cluster_client_task, mocked_cluster, addon_type, mock_application -): - """Test that, given cluster ARN, addon type and with a running agent, - addon_client_is_running returns True.""" - - mocked_cluster_for_client = mock_cluster_client_task(addon_type) - mocked_cluster_arn = mocked_cluster["cluster"]["clusterArn"] - ecs_client = mock_application.environments[env].session.client("ecs") - - with patch( - "dbt_platform_helper.utils.application.boto3.client", return_value=mocked_cluster_for_client - ): - assert addon_client_is_running(ecs_client, mocked_cluster_arn, mock_task_name(addon_type)) - - -@pytest.mark.parametrize( - "addon_type", - ["postgres", "redis", "opensearch"], -) -def test_addon_client_is_running_when_no_client_task_running( - mock_cluster_client_task, mocked_cluster, addon_type, mock_application -): - """Test that, given cluster ARN, addon type and without a running client - task, addon_client_is_running returns False.""" - - mocked_cluster_for_client = mock_cluster_client_task(addon_type, task_running=False) - mocked_cluster_arn = mocked_cluster["cluster"]["clusterArn"] - ecs_client = mock_application.environments[env].session.client("ecs") - - with patch( - "dbt_platform_helper.utils.application.boto3.client", return_value=mocked_cluster_for_client - ): - assert ( - addon_client_is_running(ecs_client, mocked_cluster_arn, mock_task_name(addon_type)) - is False - ) - - -@mock_aws -@pytest.mark.parametrize( - "addon_type", - ["postgres", "redis", "opensearch"], -) -def test_addon_client_is_running_when_no_client_agent_running( - addon_type, mock_application, mocked_cluster -): - ecs_client = mock_application.environments[env].session.client("ecs") - cluster_arn = mocked_cluster["cluster"]["clusterArn"] - task_name = "some-task-name" - ec2 = boto3.resource("ec2") - vpc = ec2.create_vpc(CidrBlock="10.0.0.0/16") - subnet = ec2.create_subnet(VpcId=vpc.id, CidrBlock="10.0.0.0/18") - - mocked_task_definition_arn = ecs_client.register_task_definition( - family=f"copilot-foobar", - requiresCompatibilities=["FARGATE"], - networkMode="awsvpc", - containerDefinitions=[ - { - "name": "test_container", - "image": "test_image", - "cpu": 256, - "memory": 512, - "essential": True, - } - ], - )["taskDefinition"]["taskDefinitionArn"] - ecs_client.run_task( - taskDefinition=mocked_task_definition_arn, - launchType="FARGATE", - networkConfiguration={ - "awsvpcConfiguration": { - "subnets": [subnet.id], - "securityGroups": ["something-sg"], - } - }, - ) - - assert addon_client_is_running(ecs_client, cluster_arn, task_name) is False - - -@mock_aws -def test_get_or_create_task_name(mock_application): - """Test that get_or_create_task_name retrieves the task name from the - parameter store when it has been stored.""" - - addon_name = "app-postgres" - parameter_name = mock_parameter_name(mock_application, "postgres", addon_name) - mock_application.environments[env].session.client("ssm") - mock_ssm = boto3.client("ssm") - mock_ssm.put_parameter( - Name=parameter_name, - Type="String", - Value=mock_task_name(addon_name), - ) - - task_name = get_or_create_task_name( - mock_ssm, mock_application.name, env, addon_name, parameter_name - ) - - assert task_name == mock_task_name(addon_name) - - -@mock_aws -def test_get_or_create_task_name_when_name_does_not_exist(mock_application): - """Test that get_or_create_task_name creates the task name and appends it - with a 12 digit lowercase alphanumeric string when it does not exist in the - parameter store.""" - - addon_name = "app-postgres" - ssm_client = mock_application.environments[env].session.client("ssm") - parameter_name = mock_parameter_name(mock_application, "postgres", addon_name) - task_name = get_or_create_task_name( - ssm_client, mock_application.name, env, addon_name, parameter_name - ) - random_id = task_name.rsplit("-", 1)[1] - - assert task_name.rsplit("-", 1)[0] == mock_task_name("app-postgres").rsplit("-", 1)[0] - assert random_id.isalnum() and random_id.islower() and len(random_id) == 12 - - @mock_aws @pytest.mark.parametrize( "access", @@ -575,8 +419,7 @@ def test_get_parameter_name(access, addon_type, addon_name, mock_application): "addon_type", ["postgres", "redis", "opensearch"], ) -@patch("dbt_platform_helper.providers.copilot.addon_client_is_running", return_value=True) -def test_connect_to_addon_client_task(addon_client_is_running, addon_type, mock_application): +def test_connect_to_addon_client_task(addon_type, mock_application): """ Test that, given app, env, ECS cluster ARN and addon type, connect_to_addon_client_task calls addon_client_is_running with cluster ARN @@ -589,9 +432,16 @@ def test_connect_to_addon_client_task(addon_client_is_running, addon_type, mock_ task_name = mock_task_name(addon_type) ecs_client = mock_application.environments[env].session.client("ecs") mock_subprocess = Mock() + addon_client_is_running = Mock(return_value=True) connect_to_addon_client_task( - ecs_client, mock_subprocess, mock_application.name, env, "test-arn", task_name + ecs_client, + mock_subprocess, + mock_application.name, + env, + "test-arn", + task_name, + addon_client_is_running, ) addon_client_is_running.assert_called_once_with(ecs_client, "test-arn", task_name) @@ -624,9 +474,8 @@ def test_connect_to_addon_client_task(addon_client_is_running, addon_type, mock_ ["postgres", "redis", "opensearch"], ) @patch("time.sleep", return_value=None) -@patch("dbt_platform_helper.providers.copilot.addon_client_is_running", return_value=False) def test_connect_to_addon_client_task_with_timeout_reached_throws_exception( - addon_client_is_running, sleep, addon_type, mock_application + sleep, addon_type, mock_application ): """Test that, given app, env, ECS cluster ARN and addon type, when the client agent fails to start, connect_to_addon_client_task calls @@ -636,10 +485,17 @@ def test_connect_to_addon_client_task_with_timeout_reached_throws_exception( task_name = mock_task_name(addon_type) ecs_client = mock_application.environments[env].session.client("ecs") mock_subprocess = Mock() + addon_client_is_running = Mock(return_value=False) with pytest.raises(CreateTaskTimeoutError): connect_to_addon_client_task( - ecs_client, mock_subprocess, mock_application, env, "test-arn", task_name + ecs_client, + mock_subprocess, + mock_application, + env, + "test-arn", + task_name, + addon_client_is_running_fn=addon_client_is_running, ) addon_client_is_running.assert_called_with(ecs_client, "test-arn", task_name) diff --git a/tests/platform_helper/providers/test_ecs.py b/tests/platform_helper/providers/test_ecs.py new file mode 100644 index 000000000..2d7c96a55 --- /dev/null +++ b/tests/platform_helper/providers/test_ecs.py @@ -0,0 +1,160 @@ +from unittest.mock import patch + +import boto3 +import pytest +from moto import mock_aws + +from dbt_platform_helper.exceptions import ECSAgentNotRunning +from dbt_platform_helper.exceptions import NoClusterError +from dbt_platform_helper.providers.ecs import ecs_exec_is_available +from dbt_platform_helper.providers.ecs import get_cluster_arn +from dbt_platform_helper.providers.ecs import get_ecs_task_arns +from dbt_platform_helper.providers.ecs import get_or_create_task_name +from tests.platform_helper.conftest import mock_parameter_name +from tests.platform_helper.conftest import mock_task_name + + +@mock_aws +def test_get_cluster_arn(mocked_cluster, mock_application): + assert ( + get_cluster_arn( + mock_application.environments["development"].session.client("ecs"), + mock_application.name, + "development", + ) + == mocked_cluster["cluster"]["clusterArn"] + ) + + +@mock_aws +def test_get_cluster_arn_with_no_cluster_raises_error(mock_application): + with pytest.raises(NoClusterError): + get_cluster_arn( + mock_application.environments["development"].session.client("ecs"), + mock_application.name, + "does-not-exist", + ) + + +def test_get_ecs_task_arns_with_running_task( + mock_cluster_client_task, mocked_cluster, mock_application +): + + addon_type = "redis" + mock_cluster_client_task(addon_type) + mocked_cluster_arn = mocked_cluster["cluster"]["clusterArn"] + ecs_client = mock_application.environments["development"].session.client("ecs") + + assert get_ecs_task_arns(ecs_client, mocked_cluster_arn, mock_task_name(addon_type)) + + +def test_get_ecs_task_arns_with_no_running_task(mocked_cluster, mock_application): + + addon_type = "opensearch" + mocked_cluster_arn = mocked_cluster["cluster"]["clusterArn"] + ecs_client = mock_application.environments["development"].session.client("ecs") + + assert len(get_ecs_task_arns(ecs_client, mocked_cluster_arn, mock_task_name(addon_type))) is 0 + + +@mock_aws +def test_get_ecs_task_arns_does_not_return_arns_from_other_tasks(mock_application, mocked_cluster): + ecs_client = mock_application.environments["development"].session.client("ecs") + cluster_arn = mocked_cluster["cluster"]["clusterArn"] + task_name = "no-running-task" + ec2 = boto3.resource("ec2") + vpc = ec2.create_vpc(CidrBlock="10.0.0.0/16") + subnet = ec2.create_subnet(VpcId=vpc.id, CidrBlock="10.0.0.0/18") + + # create unrelated task + mocked_task_definition_arn = ecs_client.register_task_definition( + family=f"other-task", + requiresCompatibilities=["FARGATE"], + networkMode="awsvpc", + containerDefinitions=[ + { + "name": "test_container", + "image": "test_image", + "cpu": 256, + "memory": 512, + "essential": True, + } + ], + )["taskDefinition"]["taskDefinitionArn"] + ecs_client.run_task( + taskDefinition=mocked_task_definition_arn, + launchType="FARGATE", + networkConfiguration={ + "awsvpcConfiguration": { + "subnets": [subnet.id], + "securityGroups": ["something-sg"], + } + }, + ) + + assert len(get_ecs_task_arns(ecs_client, cluster_arn, task_name)) is 0 + + +def test_ecs_exec_is_available(mock_cluster_client_task, mocked_cluster, mock_application): + + # use mock ecs_client as describe_tasks is overriden + mocked_ecs_client = mock_cluster_client_task("postgres") + mocked_cluster_arn = mocked_cluster["cluster"]["clusterArn"] + + ecs_exec_is_available( + mocked_ecs_client, + mocked_cluster_arn, + ["arn:aws:ecs:eu-west-2:12345678:task/does-not-matter/1234qwer"], + ) + + +@patch("time.sleep", return_value=None) +def test_test_ecs_exec_is_available_with_exec_not_running_raises_exception( + sleep, mock_cluster_client_task, mocked_cluster, mock_application +): + + # use mock ecs_client as describe_tasks is overriden + mocked_ecs_client = mock_cluster_client_task("postgres", "PENDING") + mocked_cluster_arn = mocked_cluster["cluster"]["clusterArn"] + + with pytest.raises(ECSAgentNotRunning): + ecs_exec_is_available( + mocked_ecs_client, + mocked_cluster_arn, + ["arn:aws:ecs:eu-west-2:12345678:task/does-not-matter/1234qwer"], + ) + + +@mock_aws +def test_get_or_create_task_name(mock_application): + + addon_name = "app-postgres" + parameter_name = mock_parameter_name(mock_application, "postgres", addon_name) + mock_application.environments["development"].session.client("ssm") + mock_ssm = boto3.client("ssm") + mock_ssm.put_parameter( + Name=parameter_name, + Type="String", + Value=mock_task_name(addon_name), + ) + + task_name = get_or_create_task_name( + mock_ssm, mock_application.name, "development", addon_name, parameter_name + ) + + assert task_name == mock_task_name(addon_name) + + +@mock_aws +def test_get_or_create_task_name_appends_random_id(mock_application): + + addon_name = "app-postgres" + ssm_client = mock_application.environments["development"].session.client("ssm") + parameter_name = mock_parameter_name(mock_application, "postgres", addon_name) + task_name = get_or_create_task_name( + ssm_client, mock_application.name, "development", addon_name, parameter_name + ) + random_id = task_name.rsplit("-", 1)[1] + + assert task_name.rsplit("-", 1)[0] == mock_task_name("app-postgres").rsplit("-", 1)[0] + assert random_id.isalnum() and random_id.islower() and len(random_id) == 12 From 02bebd6d331fd8a10cb317460a91634c5745b462 Mon Sep 17 00:00:00 2001 From: Kate Sugden <107400614+ksugden@users.noreply.github.com> Date: Tue, 26 Nov 2024 10:43:17 +0000 Subject: [PATCH 08/38] feat: DBTP-1568 - Add s3 support for external role access (#652) Co-authored-by: Anthony Roy <81255001+antroy-madetech@users.noreply.github.com> --- dbt_platform_helper/utils/validation.py | 22 ++++++++++++++++- .../utils/fixtures/addons_files/s3_addons.yml | 12 ++++++++++ .../addons_files/s3_addons_bad_data.yml | 24 +++++++++++++++++++ .../platform_helper/utils/test_validation.py | 2 ++ 4 files changed, 59 insertions(+), 1 deletion(-) diff --git a/dbt_platform_helper/utils/validation.py b/dbt_platform_helper/utils/validation.py index 0d9df76ca..8014432ac 100644 --- a/dbt_platform_helper/utils/validation.py +++ b/dbt_platform_helper/utils/validation.py @@ -266,6 +266,25 @@ def iam_role_arn_regex(key): ) +def dbt_email_address_regex(key): + return Regex( + r"^[\w.-]+@(businessandtrade.gov.uk|digital.trade.gov.uk)$", + error=f"{key} must contain a valid DBT email address", + ) + + +EXTERNAL_ROLE_ACCESS = { + "role_arn": iam_role_arn_regex("role_arn"), + "read": bool, + "write": bool, + "cyber_sign_off_by": dbt_email_address_regex("cyber_sign_off_by"), +} + +EXTERNAL_ROLE_ACCESS_NAME = Regex( + r"^([a-z][a-zA-Z0-9_-]*)$", + error="External role access block name {} is invalid: names must only contain lowercase alphanumeric characters separated by hypen or underscore", +) + DATA_IMPORT = { Optional("source_kms_key_arn"): kms_key_arn_regex("source_kms_key_arn"), "source_bucket_arn": s3_bucket_arn_regex("source_bucket_arn"), @@ -288,7 +307,8 @@ def iam_role_arn_regex(key): Optional("versioning"): bool, Optional("lifecycle_rules"): [LIFECYCLE_RULE], Optional("data_migration"): DATA_MIGRATION, - } + Optional("external_role_access"): {EXTERNAL_ROLE_ACCESS_NAME: EXTERNAL_ROLE_ACCESS}, + }, }, } diff --git a/tests/platform_helper/utils/fixtures/addons_files/s3_addons.yml b/tests/platform_helper/utils/fixtures/addons_files/s3_addons.yml index cf98d465f..ec12f330f 100644 --- a/tests/platform_helper/utils/fixtures/addons_files/s3_addons.yml +++ b/tests/platform_helper/utils/fixtures/addons_files/s3_addons.yml @@ -59,3 +59,15 @@ my-s3-bucket-with-data-migration: source_bucket_arn: arn:aws:s3:::test-app source_kms_key_arn: arn:aws:kms::123456789012:key/test-key worker_role_arn: arn:aws:iam::123456789012:role/test-role + +my-s3-bucket-with-external-access: + type: s3 + environments: + dev: + bucket_name: s3-data-migration + external_role_access: + some-reason-for-access: + role_arn: arn:aws:iam::123456789012:role/test-role + write: True + read: True + cyber_sign_off_by: somebody@businessandtrade.gov.uk diff --git a/tests/platform_helper/utils/fixtures/addons_files/s3_addons_bad_data.yml b/tests/platform_helper/utils/fixtures/addons_files/s3_addons_bad_data.yml index 7d644faee..8d9266428 100644 --- a/tests/platform_helper/utils/fixtures/addons_files/s3_addons_bad_data.yml +++ b/tests/platform_helper/utils/fixtures/addons_files/s3_addons_bad_data.yml @@ -153,3 +153,27 @@ my-s3-bucket-data-migration-worker-role-invalid-arn: source_bucket_arn: arn:aws:s3:::test-app source_kms_key_arn: arn:aws:kms::123456789012:key/test-key worker_role_arn: 1234abc + +my-s3-external-access-bucket-invalid-arn: + type: s3 + environments: + dev: + bucket_name: mandatory + external_role_access: + some-reason-for-access: + role_arn: 1234abc + write: True + read: True + cyber_sign_off_by: somebody@businessandtrade.gov.uk + +my-s3-external-access-bucket-invalid-email: + type: s3 + environments: + dev: + bucket_name: mandatory + external_role_access: + some-reason-for-access: + role_arn: arn:aws:iam::123456789012:role/test-role + write: True + read: True + cyber_sign_off_by: somebody@dodgy-domain.com diff --git a/tests/platform_helper/utils/test_validation.py b/tests/platform_helper/utils/test_validation.py index 1ecfa62c5..2a4e9672c 100644 --- a/tests/platform_helper/utils/test_validation.py +++ b/tests/platform_helper/utils/test_validation.py @@ -107,6 +107,8 @@ def test_validate_addons_success(addons_file): "my-s3-bucket-data-migration-source-bucket-invalid-arn": r"source_bucket_arn must contain a valid ARN for an S3 bucket", "my-s3-bucket-data-migration-source-kms-key-invalid-arn": r"source_kms_key_arn must contain a valid ARN for a KMS key", "my-s3-bucket-data-migration-worker-role-invalid-arn": r"worker_role_arn must contain a valid ARN for an IAM role", + "my-s3-external-access-bucket-invalid-arn": r"role_arn must contain a valid ARN for an IAM role", + "my-s3-external-access-bucket-invalid-email": r"cyber_sign_off_by must contain a valid DBT email address", }, ), ( From 4b1aec0cb6a4b860881795c338031509276a98b5 Mon Sep 17 00:00:00 2001 From: Will Gibson <8738245+WillGibson@users.noreply.github.com> Date: Tue, 26 Nov 2024 12:36:34 +0000 Subject: [PATCH 09/38] chore: Squad2 todo updates (#659) --- dbt_platform_helper/commands/codebase.py | 5 +++-- dbt_platform_helper/commands/conduit.py | 1 + dbt_platform_helper/commands/secrets.py | 2 +- dbt_platform_helper/domain/codebase.py | 2 +- dbt_platform_helper/providers/cloudformation.py | 2 +- dbt_platform_helper/providers/copilot.py | 2 +- dbt_platform_helper/providers/ecs.py | 1 - .../templates/pipelines/environments/manifest.yml | 1 - images/debian-python/Dockerfile.debian | 2 +- tests/platform_helper/domain/test_codebase.py | 2 +- tests/platform_helper/providers/test_copilot.py | 2 +- 11 files changed, 11 insertions(+), 11 deletions(-) diff --git a/dbt_platform_helper/commands/codebase.py b/dbt_platform_helper/commands/codebase.py index c84906db3..cdf4ee23c 100644 --- a/dbt_platform_helper/commands/codebase.py +++ b/dbt_platform_helper/commands/codebase.py @@ -30,7 +30,7 @@ def prepare(): try: Codebase().prepare() except NotInCodeBaseRepositoryError: - # TODO print error attached to exception + # TODO: Set exception message in the exceptions and just output the message in the command code click.secho( "You are in the deploy repository; make sure you are in the application codebase repository.", fg="red", @@ -109,6 +109,7 @@ def deploy(app, env, codebase, commit): try: Codebase().deploy(app, env, codebase, commit) except ApplicationNotFoundError: + # TODO: Set exception message in the exceptions and just output the message in the command code click.secho( f"""The account "{os.environ.get("AWS_PROFILE")}" does not contain the application "{app}"; ensure you have set the environment variable "AWS_PROFILE" correctly.""", fg="red", @@ -120,9 +121,9 @@ def deploy(app, env, codebase, commit): fg="red", ) raise click.Abort - # TODO: don't hide json decode error except ( CopilotCodebaseNotFoundError, + # TODO: Catch this error earlier and throw a more meaningful error, maybe it's CopilotCodebaseNotFoundError? json.JSONDecodeError, ): click.secho( diff --git a/dbt_platform_helper/commands/conduit.py b/dbt_platform_helper/commands/conduit.py index c7d3eaa4d..a2828841c 100644 --- a/dbt_platform_helper/commands/conduit.py +++ b/dbt_platform_helper/commands/conduit.py @@ -37,6 +37,7 @@ def conduit(addon_name: str, app: str, env: str, access: str): try: Conduit(application).start(env, addon_name, access) except NoClusterError: + # TODO: Set exception message in the exceptions and just output the message in the command code, should be able to catch all errors in one block click.secho(f"""No ECS cluster found for "{app}" in "{env}" environment.""", fg="red") exit(1) except SecretNotFoundError as err: diff --git a/dbt_platform_helper/commands/secrets.py b/dbt_platform_helper/commands/secrets.py index ca0cfd01c..f237d19fc 100755 --- a/dbt_platform_helper/commands/secrets.py +++ b/dbt_platform_helper/commands/secrets.py @@ -102,7 +102,7 @@ def list(app, env): params = dict(Path=path, Recursive=False, WithDecryption=True, MaxResults=10) secrets = [] - # TODO: refactor shared code with get_ssm_secret_names + # TODO: refactor shared code with get_ssm_secret_names - Check if this is still valid while True: response = client.get_parameters_by_path(**params) diff --git a/dbt_platform_helper/domain/codebase.py b/dbt_platform_helper/domain/codebase.py index 952848b8a..91252bff3 100644 --- a/dbt_platform_helper/domain/codebase.py +++ b/dbt_platform_helper/domain/codebase.py @@ -195,7 +195,6 @@ def list(self, app: str, with_images: bool): self.echo_fn("") - # TODO return empty list without exception def __get_codebases(self, application, ssm_client): parameters = ssm_client.get_parameters_by_path( Path=f"/copilot/applications/{application.name}/codebases", @@ -205,6 +204,7 @@ def __get_codebases(self, application, ssm_client): codebases = [json.loads(p["Value"]) for p in parameters] if not codebases: + # TODO Is this really an error? Or just no codebases so we could return an empty list? raise NoCopilotCodebasesFoundError return codebases diff --git a/dbt_platform_helper/providers/cloudformation.py b/dbt_platform_helper/providers/cloudformation.py index b56befaa5..395a49c71 100644 --- a/dbt_platform_helper/providers/cloudformation.py +++ b/dbt_platform_helper/providers/cloudformation.py @@ -98,7 +98,7 @@ def update_conduit_stack_resources( return conduit_stack_name -# TODO opportunity to add error handling if cloudformation stack goes into rollback e.g. botocore.exceptions.WaiterError: Waiter StackUpdateComplete failed: Waiter encountered a terminal failure state: For expression "Stacks[].StackStatus" we matched expected path: "UPDATE_ROLLBACK_COMPLETE" at least once +# TODO Catch errors and raise a more human friendly Exception is the CloudFormation stack goes into a "unhappy" state, e.g. ROLLBACK_IN_PROGRESS. Currently we get things like botocore.exceptions.WaiterError: Waiter StackUpdateComplete failed: Waiter encountered a terminal failure state: For expression "Stacks[].StackStatus" we matched expected path: "UPDATE_ROLLBACK_COMPLETE" at least once def wait_for_cloudformation_to_reach_status(cloudformation_client, stack_status, stack_name): waiter = cloudformation_client.get_waiter(stack_status) diff --git a/dbt_platform_helper/providers/copilot.py b/dbt_platform_helper/providers/copilot.py index c0ceb73b5..61c1a055d 100644 --- a/dbt_platform_helper/providers/copilot.py +++ b/dbt_platform_helper/providers/copilot.py @@ -59,7 +59,7 @@ def create_addon_client_task( # We cannot check for botocore.errorfactory.NoSuchEntityException as botocore generates that class on the fly as part of errorfactory. # factory. Checking the error code is the recommended way of handling these exceptions. if ex.response.get("Error", {}).get("Code", None) != "NoSuchEntity": - # TODO this should raise an exception and caught at the command layer + # TODO Raise an exception to be caught at the command layer abort_with_error( f"cannot obtain Role {role_name}: {ex.response.get('Error', {}).get('Message', '')}" ) diff --git a/dbt_platform_helper/providers/ecs.py b/dbt_platform_helper/providers/ecs.py index 2160dfc12..008deba95 100644 --- a/dbt_platform_helper/providers/ecs.py +++ b/dbt_platform_helper/providers/ecs.py @@ -7,7 +7,6 @@ from dbt_platform_helper.exceptions import NoClusterError -# TODO Refactor this to support passing a list of tags to check against, allowing for a more generic implementation def get_cluster_arn(ecs_client, application_name: str, env: str) -> str: for cluster_arn in ecs_client.list_clusters()["clusterArns"]: tags_response = ecs_client.list_tags_for_resource(resourceArn=cluster_arn) diff --git a/dbt_platform_helper/templates/pipelines/environments/manifest.yml b/dbt_platform_helper/templates/pipelines/environments/manifest.yml index f4bcc0161..aaa1ce0d5 100644 --- a/dbt_platform_helper/templates/pipelines/environments/manifest.yml +++ b/dbt_platform_helper/templates/pipelines/environments/manifest.yml @@ -16,7 +16,6 @@ source: provider: GitHub # Additional properties that further specify the location of the artifacts. properties: - # Todo: Allow for overriding this, but without risking deploying a branch to higher environments branch: main repository: https://github.com/{{ git_repo }} connection_name: {{ app_name }} diff --git a/images/debian-python/Dockerfile.debian b/images/debian-python/Dockerfile.debian index 6c5a04fb0..6d4e550ed 100644 --- a/images/debian-python/Dockerfile.debian +++ b/images/debian-python/Dockerfile.debian @@ -39,7 +39,7 @@ RUN git clone https://github.com/pyenv/pyenv .pyenv \ && pyenv install ${PYTHON_VERSIONS} \ && pyenv global $(echo ${PYTHON_VERSIONS} | awk '{ print $NF }') -# Todo: Revert this change and republish image +# Todo: Revert this change and republish image - Will Doing RUN pip install poetry # Install AWS CLI diff --git a/tests/platform_helper/domain/test_codebase.py b/tests/platform_helper/domain/test_codebase.py index 469aef8a6..487746cf5 100644 --- a/tests/platform_helper/domain/test_codebase.py +++ b/tests/platform_helper/domain/test_codebase.py @@ -379,7 +379,7 @@ def test_codebase_deploy_does_not_trigger_build_without_an_application(): with pytest.raises(ApplicationNotFoundError) as exc: codebase.deploy("not-an-application", "dev", "application", "ab1c23d") - # TODO review + # TODO This assert can probably go now we are catching the errors and outputting them at the command layer mocks.echo_fn.assert_has_calls( [ call( diff --git a/tests/platform_helper/providers/test_copilot.py b/tests/platform_helper/providers/test_copilot.py index c01aa304d..671b45f14 100644 --- a/tests/platform_helper/providers/test_copilot.py +++ b/tests/platform_helper/providers/test_copilot.py @@ -453,7 +453,7 @@ def test_connect_to_addon_client_task(addon_type, mock_application): ) -# Todo: Implement this test +# Todo: Implement a test to cover the desired behaviour # @patch("dbt_platform_helper.providers.copilot.addon_client_is_running", return_value=True) # def test_connect_to_addon_client_task_waits_for_command_agent(addon_client_is_running, mock_application): # task_name = mock_task_name("postgres") # Addon type for this test does not matter From 729c0821bdbc96f49c832a79bf2211475a737bf9 Mon Sep 17 00:00:00 2001 From: tony griffin <54268925+tony-griffin@users.noreply.github.com> Date: Tue, 26 Nov 2024 12:55:26 +0000 Subject: [PATCH 10/38] feat: DBTP-1395 Add validation for new slack alert channel Id that will be set in -alb in platform-config file (#635) --- dbt_platform_helper/utils/validation.py | 1 + .../utils/fixtures/addons_files/alb_addons.yml | 1 + .../utils/fixtures/addons_files/alb_addons_bad_data.yml | 7 +++++++ tests/platform_helper/utils/test_validation.py | 1 + 4 files changed, 10 insertions(+) diff --git a/dbt_platform_helper/utils/validation.py b/dbt_platform_helper/utils/validation.py index 8014432ac..990ffa773 100644 --- a/dbt_platform_helper/utils/validation.py +++ b/dbt_platform_helper/utils/validation.py @@ -422,6 +422,7 @@ def dbt_email_address_regex(key): Optional("forwarded_values_query_string"): bool, Optional("origin_protocol_policy"): str, Optional("origin_ssl_protocols"): list, + Optional("slack_alert_channel_alb_secret_rotation"): str, Optional("viewer_certificate_minimum_protocol_version"): str, Optional("viewer_certificate_ssl_support_method"): str, Optional("viewer_protocol_policy"): str, diff --git a/tests/platform_helper/utils/fixtures/addons_files/alb_addons.yml b/tests/platform_helper/utils/fixtures/addons_files/alb_addons.yml index d33aa6df1..56d0097f9 100644 --- a/tests/platform_helper/utils/fixtures/addons_files/alb_addons.yml +++ b/tests/platform_helper/utils/fixtures/addons_files/alb_addons.yml @@ -55,6 +55,7 @@ my-alb: forwarded_values_query_string: true origin_protocol_policy: "https-only" origin_ssl_protocols: ["TLSv1.2", "TLSv1.3"] + slack_alert_channel_alb_secret_rotation: "Z000AB0TEST" viewer_certificate_minimum_protocol_version: "TLSv1.2" viewer_certificate_ssl_support_method: "sni-only" viewer_protocol_policy: "redirect-to-https" diff --git a/tests/platform_helper/utils/fixtures/addons_files/alb_addons_bad_data.yml b/tests/platform_helper/utils/fixtures/addons_files/alb_addons_bad_data.yml index 8ed2ab8df..3670199d6 100644 --- a/tests/platform_helper/utils/fixtures/addons_files/alb_addons_bad_data.yml +++ b/tests/platform_helper/utils/fixtures/addons_files/alb_addons_bad_data.yml @@ -114,6 +114,13 @@ my-alb-origin-ssl-protocols-should-be-a-list: environments: dev: origin_ssl_protocols: False # Should be a list + + +my-alb-slack-alert-channel-alb-secret-rotation-should-be-a-string: + type: alb + environments: + dev: + slack_alert_channel_alb_secret_rotation: 12345 # Should be a string my-alb-viewer-certificate-minimum-protocol-version-should-be-a-string: diff --git a/tests/platform_helper/utils/test_validation.py b/tests/platform_helper/utils/test_validation.py index 2a4e9672c..6a4f703f2 100644 --- a/tests/platform_helper/utils/test_validation.py +++ b/tests/platform_helper/utils/test_validation.py @@ -226,6 +226,7 @@ def test_validate_addons_success(addons_file): "my-alb-forwarded-values-query-string-should-be-a-bool": r"environments.*dev.*should be instance of 'bool'", "my-alb-origin-protocol-policy-should-be-a-string": r"environments.*dev.*should be instance of 'str'", "my-alb-origin-ssl-protocols-should-be-a-list": r"environments.*dev.*should be instance of 'list'", + "my-alb-slack-alert-channel-alb-secret-rotation-should-be-a-string": r"environments.*dev.*should be instance of 'str'", "my-alb-viewer-certificate-minimum-protocol-version-should-be-a-string": r"environments.*dev.*should be instance of 'str'", "my-alb-viewer-certificate-ssl-support-method-should-be-a-string": r"environments.*dev.*should be instance of 'str'", "my-alb-view-protocol-policy-should-be-a-string": r"environments.*dev.*should be instance of 'str'", From b0cffe457a4ad76abd2b8d9fe8cddec661e45ef1 Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" <41898282+github-actions[bot]@users.noreply.github.com> Date: Tue, 26 Nov 2024 14:00:28 +0000 Subject: [PATCH 11/38] chore(main): release 12.2.0 (#658) Co-authored-by: github-actions[bot] <41898282+github-actions[bot]@users.noreply.github.com> --- CHANGELOG.md | 13 +++++++++++++ pyproject.toml | 2 +- release-manifest.json | 2 +- 3 files changed, 15 insertions(+), 2 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 5af52788d..e73dfa7b2 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,18 @@ # Changelog +## [12.2.0](https://github.com/uktrade/platform-tools/compare/12.1.0...12.2.0) (2024-11-26) + + +### Features + +* DBTP-1395 Add validation for new slack alert channel Id that will be set in <application>-alb in platform-config file ([#635](https://github.com/uktrade/platform-tools/issues/635)) ([729c082](https://github.com/uktrade/platform-tools/commit/729c0821bdbc96f49c832a79bf2211475a737bf9)) +* DBTP-1568 - Add s3 support for external role access ([#652](https://github.com/uktrade/platform-tools/issues/652)) ([02bebd6](https://github.com/uktrade/platform-tools/commit/02bebd6d331fd8a10cb317460a91634c5745b462)) + + +### Bug Fixes + +* DBTP-1577 Fix conduit (ecs) exec race condition ([#656](https://github.com/uktrade/platform-tools/issues/656)) ([22eafa0](https://github.com/uktrade/platform-tools/commit/22eafa0c8388b3132663d953bf97c85887c94999)) + ## [12.1.0](https://github.com/uktrade/platform-tools/compare/12.0.2...12.1.0) (2024-11-21) diff --git a/pyproject.toml b/pyproject.toml index e54c7d560..3a799b1ce 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -3,7 +3,7 @@ line-length = 100 [tool.poetry] name = "dbt-platform-helper" -version = "12.1.0" +version = "12.2.0" description = "Set of tools to help transfer applications/services from GOV.UK PaaS to DBT PaaS augmenting AWS Copilot." authors = ["Department for Business and Trade Platform Team "] license = "MIT" diff --git a/release-manifest.json b/release-manifest.json index ca3a51140..ce79e2a70 100644 --- a/release-manifest.json +++ b/release-manifest.json @@ -1,3 +1,3 @@ { - ".": "12.1.0" + ".": "12.2.0" } From 9ad8c67d8abc8ad61a4123bb90d361b3e26eacd3 Mon Sep 17 00:00:00 2001 From: Will Gibson <8738245+WillGibson@users.noreply.github.com> Date: Tue, 26 Nov 2024 14:26:12 +0000 Subject: [PATCH 12/38] chore!: Don't install poetry in Dockerfile.debian (#655) --- dbt_platform_helper/providers/ecs.py | 1 + images/debian-python/Dockerfile.debian | 3 --- 2 files changed, 1 insertion(+), 3 deletions(-) diff --git a/dbt_platform_helper/providers/ecs.py b/dbt_platform_helper/providers/ecs.py index 008deba95..2878e91ac 100644 --- a/dbt_platform_helper/providers/ecs.py +++ b/dbt_platform_helper/providers/ecs.py @@ -7,6 +7,7 @@ from dbt_platform_helper.exceptions import NoClusterError +# Todo: Refactor to a class, review, then perhaps do the others def get_cluster_arn(ecs_client, application_name: str, env: str) -> str: for cluster_arn in ecs_client.list_clusters()["clusterArns"]: tags_response = ecs_client.list_tags_for_resource(resourceArn=cluster_arn) diff --git a/images/debian-python/Dockerfile.debian b/images/debian-python/Dockerfile.debian index 6d4e550ed..dacb77beb 100644 --- a/images/debian-python/Dockerfile.debian +++ b/images/debian-python/Dockerfile.debian @@ -39,9 +39,6 @@ RUN git clone https://github.com/pyenv/pyenv .pyenv \ && pyenv install ${PYTHON_VERSIONS} \ && pyenv global $(echo ${PYTHON_VERSIONS} | awk '{ print $NF }') -# Todo: Revert this change and republish image - Will Doing -RUN pip install poetry - # Install AWS CLI RUN curl "https://awscli.amazonaws.com/awscli-exe-linux-aarch64.zip" -o "awscliv2.zip" \ && unzip awscliv2.zip \ From 88d68bae627250994e9a1846ca705c9d5687bee0 Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" <41898282+github-actions[bot]@users.noreply.github.com> Date: Tue, 26 Nov 2024 14:44:55 +0000 Subject: [PATCH 13/38] chore(main): release 12.2.1 (#662) Co-authored-by: github-actions[bot] <41898282+github-actions[bot]@users.noreply.github.com> Co-authored-by: Will Gibson <8738245+WillGibson@users.noreply.github.com> --- CHANGELOG.md | 7 +++++++ pyproject.toml | 2 +- release-manifest.json | 2 +- 3 files changed, 9 insertions(+), 2 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index e73dfa7b2..90a0528f6 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,12 @@ # Changelog +## [12.2.1](https://github.com/uktrade/platform-tools/compare/12.2.0...12.2.1) (2024-11-26) + + +### Miscellaneous Chores + +* Don't install poetry in Dockerfile.debian ([#655](https://github.com/uktrade/platform-tools/issues/655)) ([9ad8c67](https://github.com/uktrade/platform-tools/commit/9ad8c67d8abc8ad61a4123bb90d361b3e26eacd3)) + ## [12.2.0](https://github.com/uktrade/platform-tools/compare/12.1.0...12.2.0) (2024-11-26) diff --git a/pyproject.toml b/pyproject.toml index 3a799b1ce..212082057 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -3,7 +3,7 @@ line-length = 100 [tool.poetry] name = "dbt-platform-helper" -version = "12.2.0" +version = "12.2.1" description = "Set of tools to help transfer applications/services from GOV.UK PaaS to DBT PaaS augmenting AWS Copilot." authors = ["Department for Business and Trade Platform Team "] license = "MIT" diff --git a/release-manifest.json b/release-manifest.json index ce79e2a70..e1160f9e0 100644 --- a/release-manifest.json +++ b/release-manifest.json @@ -1,3 +1,3 @@ { - ".": "12.2.0" + ".": "12.2.1" } From 46eddff14ba2460ebe4beee1378ac75b617a8821 Mon Sep 17 00:00:00 2001 From: Chiara <95863059+chiaramapellimt@users.noreply.github.com> Date: Tue, 26 Nov 2024 15:37:05 +0000 Subject: [PATCH 14/38] fix: Fixing json loads (#664) --- dbt_platform_helper/domain/codebase.py | 2 +- dbt_platform_helper/utils/aws.py | 9 ++- tests/platform_helper/domain/test_codebase.py | 6 +- tests/platform_helper/utils/test_aws.py | 63 +++++++++++++++++++ 4 files changed, 73 insertions(+), 7 deletions(-) diff --git a/dbt_platform_helper/domain/codebase.py b/dbt_platform_helper/domain/codebase.py index 91252bff3..eb1b807e2 100644 --- a/dbt_platform_helper/domain/codebase.py +++ b/dbt_platform_helper/domain/codebase.py @@ -144,7 +144,7 @@ def deploy(self, app, env, codebase, commit): if not application.environments.get(env): raise ApplicationEnvironmentNotFoundError() - json.loads(self.check_codebase_exists_fn(session, application, codebase)) + self.check_codebase_exists_fn(session, application, codebase) self.check_image_exists_fn(session, application, codebase, commit) diff --git a/dbt_platform_helper/utils/aws.py b/dbt_platform_helper/utils/aws.py index 0ac6399af..e59c2f511 100644 --- a/dbt_platform_helper/utils/aws.py +++ b/dbt_platform_helper/utils/aws.py @@ -488,13 +488,16 @@ def start_build_extraction(codebuild_client, build_options): def check_codebase_exists(session: Session, application, codebase: str): try: ssm_client = session.client("ssm") - ssm_client.get_parameter( - Name=f"/copilot/applications/{application.name}/codebases/{codebase}" - )["Parameter"]["Value"] + json.loads( + ssm_client.get_parameter( + Name=f"/copilot/applications/{application.name}/codebases/{codebase}" + )["Parameter"]["Value"] + ) except ( KeyError, ValueError, ssm_client.exceptions.ParameterNotFound, + json.JSONDecodeError, ): raise CopilotCodebaseNotFoundError diff --git a/tests/platform_helper/domain/test_codebase.py b/tests/platform_helper/domain/test_codebase.py index 487746cf5..b05f3dcf2 100644 --- a/tests/platform_helper/domain/test_codebase.py +++ b/tests/platform_helper/domain/test_codebase.py @@ -296,8 +296,8 @@ def test_codebase_deploy_exception_with_a_nonexistent_codebase(): codebase.deploy("test-application", "development", "application", "nonexistent-commit-hash") -def test_codebase_deploy_exception_with_malformed_json(): - mocks = CodebaseMocks(check_codebase_exists_fn=Mock(return_value="{ mlaf = josn}")) +def test_check_codebase_exists_returns_error_when_no_json(): + mocks = CodebaseMocks(check_codebase_exists_fn=Mock(side_effect=CopilotCodebaseNotFoundError)) client = mock_aws_client(mocks.get_aws_session_or_abort_fn) @@ -305,7 +305,7 @@ def test_codebase_deploy_exception_with_malformed_json(): "Parameter": {"Value": json.dumps({"name": "application"})}, } - with pytest.raises(json.JSONDecodeError): + with pytest.raises(CopilotCodebaseNotFoundError): codebase = Codebase(**mocks.params()) codebase.deploy("test-application", "development", "application", "nonexistent-commit-hash") diff --git a/tests/platform_helper/utils/test_aws.py b/tests/platform_helper/utils/test_aws.py index b60fe18ba..6ccbc7281 100644 --- a/tests/platform_helper/utils/test_aws.py +++ b/tests/platform_helper/utils/test_aws.py @@ -11,9 +11,11 @@ from moto import mock_aws from dbt_platform_helper.exceptions import AWSException +from dbt_platform_helper.exceptions import CopilotCodebaseNotFoundError from dbt_platform_helper.exceptions import ValidationException from dbt_platform_helper.utils.aws import NoProfileForAccountIdError from dbt_platform_helper.utils.aws import Vpc +from dbt_platform_helper.utils.aws import check_codebase_exists from dbt_platform_helper.utils.aws import get_account_details from dbt_platform_helper.utils.aws import get_aws_session_or_abort from dbt_platform_helper.utils.aws import get_codestar_connection_arn @@ -421,6 +423,67 @@ def test_get_public_repository_arn(mock_get_aws_session_or_abort, repository_uri assert result == expected_arn +@mock_aws +def test_check_codebase_exists(mock_application): + mock_application.environments["development"].session.client("ssm") + mock_ssm = boto3.client("ssm") + mock_ssm.put_parameter( + Name="/copilot/applications/test-application/codebases/application", + Type="String", + Value=""" + { + "name": "test-app", + "repository": "uktrade/test-app", + "services": "1234" + } + """, + ) + + check_codebase_exists( + mock_application.environments["development"].session, mock_application, "application" + ) + + +@mock_aws +def test_check_codebase_does_not_exist(mock_application): + mock_application.environments["development"].session.client("ssm") + mock_ssm = boto3.client("ssm") + mock_ssm.put_parameter( + Name="/copilot/applications/test-application/codebases/application", + Type="String", + Value=""" + { + "name": "test-app", + "repository": "uktrade/test-app", + "services": "1234" + } + """, + ) + + with pytest.raises(CopilotCodebaseNotFoundError): + check_codebase_exists( + mock_application.environments["development"].session, + mock_application, + "not-found-application", + ) + + +@mock_aws +def test_check_codebase_errors_when_json_is_malformed(mock_application): + mock_application.environments["development"].session.client("ssm") + mock_ssm = boto3.client("ssm") + mock_ssm.put_parameter( + Name="/copilot/applications/test-application/codebases/application", + Type="String", + Value="not valid JSON", + ) + + with pytest.raises(CopilotCodebaseNotFoundError): + check_codebase_exists( + mock_application.environments["development"].session, mock_application, "application" + ) + + @patch("dbt_platform_helper.utils.aws.get_aws_session_or_abort") def test_get_account_id(mock_get_aws_session_or_abort): mock_get_caller_identity(mock_get_aws_session_or_abort) From 5b30f558617fa7b37690fef1167a3caac10744c3 Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" <41898282+github-actions[bot]@users.noreply.github.com> Date: Tue, 26 Nov 2024 15:42:34 +0000 Subject: [PATCH 15/38] chore(main): release 12.2.2 (#666) Co-authored-by: github-actions[bot] <41898282+github-actions[bot]@users.noreply.github.com> --- CHANGELOG.md | 7 +++++++ pyproject.toml | 2 +- release-manifest.json | 2 +- 3 files changed, 9 insertions(+), 2 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 90a0528f6..204724279 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,12 @@ # Changelog +## [12.2.2](https://github.com/uktrade/platform-tools/compare/12.2.1...12.2.2) (2024-11-26) + + +### Bug Fixes + +* Fixing json loads ([#664](https://github.com/uktrade/platform-tools/issues/664)) ([46eddff](https://github.com/uktrade/platform-tools/commit/46eddff14ba2460ebe4beee1378ac75b617a8821)) + ## [12.2.1](https://github.com/uktrade/platform-tools/compare/12.2.0...12.2.1) (2024-11-26) diff --git a/pyproject.toml b/pyproject.toml index 212082057..6bd451089 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -3,7 +3,7 @@ line-length = 100 [tool.poetry] name = "dbt-platform-helper" -version = "12.2.1" +version = "12.2.2" description = "Set of tools to help transfer applications/services from GOV.UK PaaS to DBT PaaS augmenting AWS Copilot." authors = ["Department for Business and Trade Platform Team "] license = "MIT" diff --git a/release-manifest.json b/release-manifest.json index e1160f9e0..8a8917679 100644 --- a/release-manifest.json +++ b/release-manifest.json @@ -1,3 +1,3 @@ { - ".": "12.2.1" + ".": "12.2.2" } From f0f561beba2239f757fec62cd530483432bb953b Mon Sep 17 00:00:00 2001 From: Will Gibson <8738245+WillGibson@users.noreply.github.com> Date: Fri, 29 Nov 2024 12:40:22 +0000 Subject: [PATCH 16/38] fix: DBTP-1524 Make subnet order from environment generate match CloudFormation exports (#665) --- dbt_platform_helper/commands/environment.py | 54 ++++- .../test_command_environment.py | 192 ++++++++++++------ 2 files changed, 179 insertions(+), 67 deletions(-) diff --git a/dbt_platform_helper/commands/environment.py b/dbt_platform_helper/commands/environment.py index 6af166066..51a4434bd 100644 --- a/dbt_platform_helper/commands/environment.py +++ b/dbt_platform_helper/commands/environment.py @@ -71,7 +71,7 @@ def get_vpc_id(session, env_name, vpc_name=None): return vpcs[0]["VpcId"] -def get_subnet_ids(session, vpc_id): +def get_subnet_ids(session, vpc_id, environment_name): subnets = session.client("ec2").describe_subnets( Filters=[{"Name": "vpc-id", "Values": [vpc_id]}] )["Subnets"] @@ -81,11 +81,41 @@ def get_subnet_ids(session, vpc_id): raise click.Abort public_tag = {"Key": "subnet_type", "Value": "public"} - public = [subnet["SubnetId"] for subnet in subnets if public_tag in subnet["Tags"]] + public_subnets = [subnet["SubnetId"] for subnet in subnets if public_tag in subnet["Tags"]] private_tag = {"Key": "subnet_type", "Value": "private"} - private = [subnet["SubnetId"] for subnet in subnets if private_tag in subnet["Tags"]] + private_subnets = [subnet["SubnetId"] for subnet in subnets if private_tag in subnet["Tags"]] + + # This call and the method declaration can be removed when we stop using AWS Copilot to deploy the services + public_subnets, private_subnets = _match_subnet_id_order_to_cloudformation_exports( + session, + environment_name, + public_subnets, + private_subnets, + ) + + return public_subnets, private_subnets + + +def _match_subnet_id_order_to_cloudformation_exports( + session, environment_name, public_subnets, private_subnets +): + public_subnet_exports = [] + private_subnet_exports = [] + for page in session.client("cloudformation").get_paginator("list_exports").paginate(): + for export in page["Exports"]: + if f"-{environment_name}-" in export["Name"]: + if export["Name"].endswith("-PublicSubnets"): + public_subnet_exports = export["Value"].split(",") + if export["Name"].endswith("-PrivateSubnets"): + private_subnet_exports = export["Value"].split(",") + + # If the elements match, regardless of order, use the list from the CloudFormation exports + if set(public_subnets) == set(public_subnet_exports): + public_subnets = public_subnet_exports + if set(private_subnets) == set(private_subnet_exports): + private_subnets = private_subnet_exports - return public, private + return public_subnets, private_subnets def get_cert_arn(session, application, env_name): @@ -142,22 +172,26 @@ def generate_terraform(name, terraform_platform_modules_version): ) -def _generate_copilot_environment_manifests(name, application, env_config, session): +def _generate_copilot_environment_manifests(environment_name, application, env_config, session): env_template = setup_templates().get_template("env/manifest.yml") vpc_name = env_config.get("vpc", None) - vpc_id = get_vpc_id(session, name, vpc_name) - pub_subnet_ids, priv_subnet_ids = get_subnet_ids(session, vpc_id) - cert_arn = get_cert_arn(session, application, name) + vpc_id = get_vpc_id(session, environment_name, vpc_name) + pub_subnet_ids, priv_subnet_ids = get_subnet_ids(session, vpc_id, environment_name) + cert_arn = get_cert_arn(session, application, environment_name) contents = env_template.render( { - "name": name, + "name": environment_name, "vpc_id": vpc_id, "pub_subnet_ids": pub_subnet_ids, "priv_subnet_ids": priv_subnet_ids, "certificate_arn": cert_arn, } ) - click.echo(mkfile(".", f"copilot/environments/{name}/manifest.yml", contents, overwrite=True)) + click.echo( + mkfile( + ".", f"copilot/environments/{environment_name}/manifest.yml", contents, overwrite=True + ) + ) def _generate_terraform_environment_manifests( diff --git a/tests/platform_helper/test_command_environment.py b/tests/platform_helper/test_command_environment.py index 6e74615cf..2715df834 100644 --- a/tests/platform_helper/test_command_environment.py +++ b/tests/platform_helper/test_command_environment.py @@ -231,7 +231,6 @@ def test_offline_an_environment_when_listener_not_found( load_application, mock_application, ): - load_application.return_value = mock_application find_https_listener.side_effect = ListenerNotFoundError() @@ -269,7 +268,6 @@ def test_successful_offline_multiple_services( load_application, mock_application, ): - mock_application.services["web2"] = Service("web2", "Load Balanced Web Service") load_application.return_value = mock_application @@ -322,7 +320,6 @@ def test_successful_online( load_application, mock_application, ): - load_application.return_value = mock_application result = CliRunner().invoke( @@ -358,7 +355,6 @@ def test_online_an_environment_that_is_not_offline( load_application, mock_application, ): - load_application.return_value = mock_application result = CliRunner().invoke( @@ -383,7 +379,6 @@ def test_online_an_environment_when_listener_not_found( load_application, mock_application, ): - load_application.return_value = mock_application find_https_listener.side_effect = ListenerNotFoundError() @@ -434,6 +429,7 @@ def test_online_an_environment_when_load_balancer_not_found( class TestGenerate: + @patch("dbt_platform_helper.jinja2_tags.version", new=Mock(return_value="v0.1-TEST")) @patch("dbt_platform_helper.commands.environment.get_cert_arn", return_value="arn:aws:acm:test") @patch( @@ -486,7 +482,7 @@ def test_generate( ) mock_get_vpc_id.assert_called_once_with(mocked_session, "test", expected_vpc) - mock_get_subnet_ids.assert_called_once_with(mocked_session, "vpc-abc123") + mock_get_subnet_ids.assert_called_once_with(mocked_session, "vpc-abc123", "test") mock_get_cert_arn.assert_called_once_with(mocked_session, "my-app", "test") mock_get_aws_session_1.assert_called_once_with("non-prod-acc") @@ -595,19 +591,8 @@ def test_fail_with_explanation_if_vpc_name_option_used(self, fakefs): @pytest.mark.parametrize("vpc_name", ["default", "default-prod"]) @mock_aws def test_get_vpc_id(self, vpc_name): - session = boto3.session.Session() - vpc = session.client("ec2").create_vpc( - CidrBlock="10.0.0.0/16", - TagSpecifications=[ - { - "ResourceType": "vpc", - "Tags": [ - {"Key": "Name", "Value": vpc_name}, - ], - }, - ], - )["Vpc"] + vpc = self.create_moto_mocked_vpc(session, vpc_name) expected_vpc_id = vpc["VpcId"] actual_vpc_id = get_vpc_id(session, "prod") @@ -630,54 +615,119 @@ def test_get_vpc_id_failure(self, capsys): @mock_aws def test_get_subnet_ids(self): - session = boto3.session.Session() - vpc = session.client("ec2").create_vpc( - CidrBlock="10.0.0.0/16", - TagSpecifications=[ - { - "ResourceType": "vpc", - "Tags": [ - {"Key": "Name", "Value": "default-development"}, - ], - }, - ], - )["Vpc"] - public_subnet = session.client("ec2").create_subnet( - CidrBlock="10.0.128.0/24", - VpcId=vpc["VpcId"], - TagSpecifications=[ - { - "ResourceType": "subnet", - "Tags": [ - {"Key": "subnet_type", "Value": "public"}, - ], - }, - ], - )["Subnet"] - private_subnet = session.client("ec2").create_subnet( - CidrBlock="10.0.1.0/24", - VpcId=vpc["VpcId"], - TagSpecifications=[ + vpc_id = self.create_moto_mocked_vpc(session, "default-development")["VpcId"] + expected_public_subnet_id = self.create_moto_mocked_subnet( + session, vpc_id, "public", "10.0.128.0/24" + ) + expected_private_subnet_id = self.create_moto_mocked_subnet( + session, vpc_id, "private", "10.0.1.0/24" + ) + + public_subnet_ids, private_subnet_ids = get_subnet_ids( + session, vpc_id, "environment-name-does-not-matter" + ) + + assert public_subnet_ids == [expected_public_subnet_id] + assert private_subnet_ids == [expected_private_subnet_id] + + @mock_aws + def test_get_subnet_ids_with_cloudformation_export_returning_a_different_order(self): + # This test and the associated behavior can be removed when we stop using AWS Copilot to deploy the services + def _list_exports_subnet_object(environment: str, subnet_ids: list[str], visibility: str): + return { + "Name": f"application-{environment}-{visibility.capitalize()}Subnets", + "Value": f"{','.join(subnet_ids)}", + } + + def _describe_subnets_subnet_object(subnet_id: str, visibility: str): + return { + "SubnetId": subnet_id, + "Tags": [{"Key": "subnet_type", "Value": visibility}], + } + + def _non_subnet_exports(number): + return [ { - "ResourceType": "subnet", - "Tags": [ - {"Key": "subnet_type", "Value": "private"}, - ], - }, - ], - )["Subnet"] + "Name": f"application-environment-NotASubnet", + "Value": "does-not-matter", + } + ] * number + + expected_public_subnet_id_1 = "subnet-1public" + expected_public_subnet_id_2 = "subnet-2public" + expected_private_subnet_id_1 = "subnet-1private" + expected_private_subnet_id_2 = "subnet-2private" + + mock_boto3_session = MagicMock() + + # Cloudformation list_exports returns a paginated response with the exports in the expected order plus some we are not interested in + mock_boto3_session.client("cloudformation").get_paginator( + "list_exports" + ).paginate.return_value = [ + {"Exports": _non_subnet_exports(5)}, + { + "Exports": [ + _list_exports_subnet_object( + "environment", + [ + expected_public_subnet_id_1, + expected_public_subnet_id_2, + ], + "public", + ), + _list_exports_subnet_object( + "environment", + [ + expected_private_subnet_id_1, + expected_private_subnet_id_2, + ], + "private", + ), + _list_exports_subnet_object( + "otherenvironment", + [expected_public_subnet_id_1], + "public", + ), + _list_exports_subnet_object( + "otherenvironment", + [expected_private_subnet_id_2], + "private", + ), + ] + }, + {"Exports": _non_subnet_exports(5)}, + ] + + # EC2 client should return them in an order that differs from the CloudFormation Export + mock_boto3_session.client("ec2").describe_subnets.return_value = { + "Subnets": [ + _describe_subnets_subnet_object(expected_public_subnet_id_2, "public"), + _describe_subnets_subnet_object(expected_public_subnet_id_1, "public"), + _describe_subnets_subnet_object(expected_private_subnet_id_2, "private"), + _describe_subnets_subnet_object(expected_private_subnet_id_1, "private"), + ] + } - public, private = get_subnet_ids(session, vpc["VpcId"]) + # Act (there's a lot of setup, worth signposting where this happens) + public_subnet_ids, private_subnet_ids = get_subnet_ids( + mock_boto3_session, "vpc-id-does-not-matter", "environment" + ) - assert public == [public_subnet["SubnetId"]] - assert private == [private_subnet["SubnetId"]] + assert public_subnet_ids == [ + expected_public_subnet_id_1, + expected_public_subnet_id_2, + ] + assert private_subnet_ids == [ + expected_private_subnet_id_1, + expected_private_subnet_id_2, + ] @mock_aws def test_get_subnet_ids_failure(self, capsys): with pytest.raises(click.Abort): - get_subnet_ids(boto3.session.Session(), "123") + get_subnet_ids(boto3.session.Session(), "123", "environment-name-does-not-matter") captured = capsys.readouterr() @@ -710,6 +760,34 @@ def test_cert_arn_failure(self, capsys): in captured.out ) + def create_moto_mocked_subnet(self, session, vpc_id, visibility, cidr_block): + return session.client("ec2").create_subnet( + CidrBlock=cidr_block, + VpcId=vpc_id, + TagSpecifications=[ + { + "ResourceType": "subnet", + "Tags": [ + {"Key": "subnet_type", "Value": visibility}, + ], + }, + ], + )["Subnet"]["SubnetId"] + + def create_moto_mocked_vpc(self, session, vpc_name): + vpc = session.client("ec2").create_vpc( + CidrBlock="10.0.0.0/16", + TagSpecifications=[ + { + "ResourceType": "vpc", + "Tags": [ + {"Key": "Name", "Value": vpc_name}, + ], + }, + ], + )["Vpc"] + return vpc + class TestFindHTTPSCertificate: @patch( From db6a5526da55a1a66c2f954b0814b96522a857f1 Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" <41898282+github-actions[bot]@users.noreply.github.com> Date: Fri, 29 Nov 2024 12:44:31 +0000 Subject: [PATCH 17/38] chore(main): release 12.2.3 (#667) Co-authored-by: github-actions[bot] <41898282+github-actions[bot]@users.noreply.github.com> --- CHANGELOG.md | 7 +++++++ pyproject.toml | 2 +- release-manifest.json | 2 +- 3 files changed, 9 insertions(+), 2 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 204724279..0c5c4007d 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,12 @@ # Changelog +## [12.2.3](https://github.com/uktrade/platform-tools/compare/12.2.2...12.2.3) (2024-11-29) + + +### Bug Fixes + +* DBTP-1524 Make subnet order from environment generate match CloudFormation exports ([#665](https://github.com/uktrade/platform-tools/issues/665)) ([f0f561b](https://github.com/uktrade/platform-tools/commit/f0f561beba2239f757fec62cd530483432bb953b)) + ## [12.2.2](https://github.com/uktrade/platform-tools/compare/12.2.1...12.2.2) (2024-11-26) diff --git a/pyproject.toml b/pyproject.toml index 6bd451089..962a35adb 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -3,7 +3,7 @@ line-length = 100 [tool.poetry] name = "dbt-platform-helper" -version = "12.2.2" +version = "12.2.3" description = "Set of tools to help transfer applications/services from GOV.UK PaaS to DBT PaaS augmenting AWS Copilot." authors = ["Department for Business and Trade Platform Team "] license = "MIT" diff --git a/release-manifest.json b/release-manifest.json index 8a8917679..d083fd0eb 100644 --- a/release-manifest.json +++ b/release-manifest.json @@ -1,3 +1,3 @@ { - ".": "12.2.2" + ".": "12.2.3" } From 2ce98bfdcd22b880867306e3181f4815e46c6acb Mon Sep 17 00:00:00 2001 From: Connor Hindle <69192234+DeveloperConnor@users.noreply.github.com> Date: Mon, 2 Dec 2024 09:09:55 +0000 Subject: [PATCH 18/38] fix: DBTP-1572 - Fix _validate_exension_supported_versions incorrectly raising an error when no version is supplied (#660) --- dbt_platform_helper/utils/validation.py | 7 +- .../platform_helper/utils/test_validation.py | 92 ++++++++++--------- 2 files changed, 55 insertions(+), 44 deletions(-) diff --git a/dbt_platform_helper/utils/validation.py b/dbt_platform_helper/utils/validation.py index 990ffa773..809c3acda 100644 --- a/dbt_platform_helper/utils/validation.py +++ b/dbt_platform_helper/utils/validation.py @@ -590,13 +590,16 @@ def _validate_extension_supported_versions( if not isinstance(environments, dict): click.secho( - "Error: Opensearch extension definition is invalid type, expected dictionary", + f"Error: {extension_type} extension definition is invalid type, expected dictionary", fg="red", ) continue for environment, env_config in environments.items(): + + # An extension version doesn't need to be specified for all environments, provided one is specified under "*". + # So check if the version is set before checking if it's supported extension_version = env_config.get(version_key) - if extension_version not in supported_extension_versions: + if extension_version and extension_version not in supported_extension_versions: extensions_with_invalid_version.append( {"environment": environment, "version": extension_version} ) diff --git a/tests/platform_helper/utils/test_validation.py b/tests/platform_helper/utils/test_validation.py index 6a4f703f2..6de93e31a 100644 --- a/tests/platform_helper/utils/test_validation.py +++ b/tests/platform_helper/utils/test_validation.py @@ -941,19 +941,58 @@ def test_validate_database_copy_multi_postgres_failures(capfd): ) +@pytest.mark.parametrize( + "config, expected_response", + [ + ( + # No engine defined in either env + { + "extensions": { + "connors-redis": { + "type": "redis", + "environments": {"*": {"plan": "tiny"}, "prod": {"plan": "largish"}}, + } + }, + }, + "", + ), + ( + # Valid engine version defined in * + { + "extensions": { + "connors-redis": { + "type": "redis", + "environments": { + "*": {"engine": "7.1", "plan": "tiny"}, + "prod": {"plan": "tiny"}, + }, + } + }, + }, + "", + ), + ( + # Invalid engine defined in prod environment + { + "extensions": { + "connors-redis": { + "type": "redis", + "environments": { + "*": {"plan": "tiny"}, + "prod": {"engine": "invalid", "plan": "tiny"}, + }, + } + }, + }, + "redis version for environment prod is not in the list of supported redis versions: ['7.1']. Provided Version: invalid", + ), + ], +) @patch("dbt_platform_helper.utils.validation.get_supported_redis_versions", return_value=["7.1"]) -def test_validate_extensions_supported_versions_successful_with_supported_version( - mock_supported_versions, capsys +def test_validate_extension_supported_versions( + mock_supported_versions, config, expected_response, capsys ): - config = { - "application": "test-app", - "environments": {"dev": {}, "test": {}, "prod": {}}, - "extensions": { - "connors-redis": {"type": "redis", "environments": {"*": {"engine": "7.1"}}} - }, - } - _validate_extension_supported_versions( config=config, extension_type="redis", @@ -961,38 +1000,7 @@ def test_validate_extensions_supported_versions_successful_with_supported_versio get_supported_versions_fn=mock_supported_versions, ) - # Nothing should be logged if the version is valid. captured = capsys.readouterr() - assert captured.out == "" - assert captured.err == "" - - -@patch("dbt_platform_helper.utils.validation.get_supported_redis_versions", return_value=["7.1"]) -def test_validate_extensions_supported_versions_fails_with_unsupported_version( - mock_supported_versions, capsys -): - config = { - "application": "test-app", - "environments": {"dev": {}, "test": {}, "prod": {}}, - "extensions": { - "connors-redis": { - "type": "redis", - "environments": {"*": {"engine": "some-engine-which-probably-doesnt-exist"}}, - } - }, - } - - _validate_extension_supported_versions( - config=config, - extension_type="redis", - version_key="engine", - get_supported_versions_fn=mock_supported_versions, - ) - - captured = capsys.readouterr() - assert ( - "redis version for environment * is not in the list of supported redis versions" - in captured.out - ) + assert expected_response in captured.out assert captured.err == "" From 6017e046f7c64b8e3cabbd71b08feb59ef3ff908 Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" <41898282+github-actions[bot]@users.noreply.github.com> Date: Mon, 2 Dec 2024 10:02:47 +0000 Subject: [PATCH 19/38] chore(main): release 12.2.4 (#668) Co-authored-by: github-actions[bot] <41898282+github-actions[bot]@users.noreply.github.com> --- CHANGELOG.md | 7 +++++++ pyproject.toml | 2 +- release-manifest.json | 2 +- 3 files changed, 9 insertions(+), 2 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 0c5c4007d..0b2cd91f4 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,12 @@ # Changelog +## [12.2.4](https://github.com/uktrade/platform-tools/compare/12.2.3...12.2.4) (2024-12-02) + + +### Bug Fixes + +* DBTP-1572 - Fix _validate_exension_supported_versions incorrectly raising an error when no version is supplied ([#660](https://github.com/uktrade/platform-tools/issues/660)) ([2ce98bf](https://github.com/uktrade/platform-tools/commit/2ce98bfdcd22b880867306e3181f4815e46c6acb)) + ## [12.2.3](https://github.com/uktrade/platform-tools/compare/12.2.2...12.2.3) (2024-11-29) diff --git a/pyproject.toml b/pyproject.toml index 962a35adb..7e7f7c64a 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -3,7 +3,7 @@ line-length = 100 [tool.poetry] name = "dbt-platform-helper" -version = "12.2.3" +version = "12.2.4" description = "Set of tools to help transfer applications/services from GOV.UK PaaS to DBT PaaS augmenting AWS Copilot." authors = ["Department for Business and Trade Platform Team "] license = "MIT" diff --git a/release-manifest.json b/release-manifest.json index d083fd0eb..b1f1b166a 100644 --- a/release-manifest.json +++ b/release-manifest.json @@ -1,3 +1,3 @@ { - ".": "12.2.3" + ".": "12.2.4" } From 081f8ff2f34a46791d0b51c915e2d8dbf3efbe25 Mon Sep 17 00:00:00 2001 From: A Gleeson Date: Mon, 2 Dec 2024 16:15:21 +0000 Subject: [PATCH 20/38] refactor: Improving provider structure and exception handling (#661) Co-authored-by: chiaramapellimt Co-authored-by: Chiara <95863059+chiaramapellimt@users.noreply.github.com> Co-authored-by: Will Gibson <8738245+WillGibson@users.noreply.github.com> --- dbt_platform_helper/commands/codebase.py | 89 +------ dbt_platform_helper/commands/conduit.py | 52 +--- dbt_platform_helper/domain/codebase.py | 12 +- dbt_platform_helper/domain/conduit.py | 43 +--- dbt_platform_helper/exceptions.py | 107 +++++++-- .../providers/cloudformation.py | 222 ++++++++++-------- dbt_platform_helper/providers/copilot.py | 17 +- dbt_platform_helper/providers/ecs.py | 148 ++++++------ dbt_platform_helper/providers/secrets.py | 148 ++++++------ dbt_platform_helper/utils/application.py | 2 +- dbt_platform_helper/utils/aws.py | 4 +- tests/platform_helper/domain/test_codebase.py | 58 ++--- tests/platform_helper/domain/test_conduit.py | 52 +++- .../domain/test_database_copy.py | 2 +- .../providers/test_cloudformation.py | 78 +++--- .../platform_helper/providers/test_copilot.py | 201 ++-------------- tests/platform_helper/providers/test_ecs.py | 118 ++++++---- .../platform_helper/providers/test_secrets.py | 189 ++++++++++++++- .../platform_helper/test_command_codebase.py | 22 +- tests/platform_helper/test_command_conduit.py | 55 +---- tests/platform_helper/test_exceptions.py | 110 +++++++++ 21 files changed, 905 insertions(+), 824 deletions(-) create mode 100644 tests/platform_helper/test_exceptions.py diff --git a/dbt_platform_helper/commands/codebase.py b/dbt_platform_helper/commands/codebase.py index cdf4ee23c..e082f5b1c 100644 --- a/dbt_platform_helper/commands/codebase.py +++ b/dbt_platform_helper/commands/codebase.py @@ -1,18 +1,8 @@ -import json -import os - import click from dbt_platform_helper.domain.codebase import Codebase -from dbt_platform_helper.exceptions import ApplicationDeploymentNotTriggered -from dbt_platform_helper.exceptions import ApplicationEnvironmentNotFoundError -from dbt_platform_helper.exceptions import ApplicationNotFoundError -from dbt_platform_helper.exceptions import CopilotCodebaseNotFoundError -from dbt_platform_helper.exceptions import ImageNotFoundError -from dbt_platform_helper.exceptions import NoCopilotCodebasesFoundError -from dbt_platform_helper.exceptions import NotInCodeBaseRepositoryError +from dbt_platform_helper.exceptions import PlatformException from dbt_platform_helper.utils.click import ClickDocOptGroup -from dbt_platform_helper.utils.git import CommitNotFoundError from dbt_platform_helper.utils.versioning import ( check_platform_helper_version_needs_update, ) @@ -29,12 +19,8 @@ def prepare(): """Sets up an application codebase for use within a DBT platform project.""" try: Codebase().prepare() - except NotInCodeBaseRepositoryError: - # TODO: Set exception message in the exceptions and just output the message in the command code - click.secho( - "You are in the deploy repository; make sure you are in the application codebase repository.", - fg="red", - ) + except PlatformException as err: + click.secho(str(err), fg="red") raise click.Abort @@ -50,17 +36,8 @@ def list(app, with_images): """List available codebases for the application.""" try: Codebase().list(app, with_images) - except NoCopilotCodebasesFoundError: - click.secho( - f"""No codebases found for application "{app}""", - fg="red", - ) - raise click.Abort - except ApplicationNotFoundError: - click.secho( - f"""The account "{os.environ.get("AWS_PROFILE")}" does not contain the application "{app}"; ensure you have set the environment variable "AWS_PROFILE" correctly.""", - fg="red", - ) + except PlatformException as err: + click.secho(str(err), fg="red") raise click.Abort @@ -76,23 +53,8 @@ def build(app, codebase, commit): """Trigger a CodePipeline pipeline based build.""" try: Codebase().build(app, codebase, commit) - except ApplicationNotFoundError: - click.secho( - f"""The account "{os.environ.get("AWS_PROFILE")}" does not contain the application "{app}"; ensure you have set the environment variable "AWS_PROFILE" correctly.""", - fg="red", - ) - raise click.Abort - except CommitNotFoundError: - click.secho( - f'The commit hash "{commit}" either does not exist or you need to run `git fetch`.', - fg="red", - ) - raise click.Abort - except ApplicationDeploymentNotTriggered: - click.secho( - f"Your build for {codebase} was not triggered.", - fg="red", - ) + except PlatformException as err: + click.secho(str(err), fg="red") raise click.Abort @@ -108,39 +70,6 @@ def build(app, codebase, commit): def deploy(app, env, codebase, commit): try: Codebase().deploy(app, env, codebase, commit) - except ApplicationNotFoundError: - # TODO: Set exception message in the exceptions and just output the message in the command code - click.secho( - f"""The account "{os.environ.get("AWS_PROFILE")}" does not contain the application "{app}"; ensure you have set the environment variable "AWS_PROFILE" correctly.""", - fg="red", - ) - raise click.Abort - except ApplicationEnvironmentNotFoundError: - click.secho( - f"""The environment "{env}" either does not exist or has not been deployed.""", - fg="red", - ) - raise click.Abort - except ( - CopilotCodebaseNotFoundError, - # TODO: Catch this error earlier and throw a more meaningful error, maybe it's CopilotCodebaseNotFoundError? - json.JSONDecodeError, - ): - click.secho( - f"""The codebase "{codebase}" either does not exist or has not been deployed.""", - fg="red", - ) - raise click.Abort - except ImageNotFoundError: - click.secho( - f'The commit hash "{commit}" has not been built into an image, try the ' - "`platform-helper codebase build` command first.", - fg="red", - ) - raise click.Abort - except ApplicationDeploymentNotTriggered: - click.secho( - f"Your deployment for {codebase} was not triggered.", - fg="red", - ) + except PlatformException as err: + click.secho(str(err), fg="red") raise click.Abort diff --git a/dbt_platform_helper/commands/conduit.py b/dbt_platform_helper/commands/conduit.py index a2828841c..6f0707090 100644 --- a/dbt_platform_helper/commands/conduit.py +++ b/dbt_platform_helper/commands/conduit.py @@ -1,14 +1,7 @@ import click -from dbt_platform_helper.constants import CONDUIT_ADDON_TYPES from dbt_platform_helper.domain.conduit import Conduit -from dbt_platform_helper.exceptions import AddonNotFoundError -from dbt_platform_helper.exceptions import AddonTypeMissingFromConfigError -from dbt_platform_helper.exceptions import CreateTaskTimeoutError -from dbt_platform_helper.exceptions import InvalidAddonTypeError -from dbt_platform_helper.exceptions import NoClusterError -from dbt_platform_helper.exceptions import ParameterNotFoundError -from dbt_platform_helper.providers.secrets import SecretNotFoundError +from dbt_platform_helper.exceptions import AWSException from dbt_platform_helper.utils.application import load_application from dbt_platform_helper.utils.click import ClickDocOptCommand from dbt_platform_helper.utils.versioning import ( @@ -36,43 +29,6 @@ def conduit(addon_name: str, app: str, env: str, access: str): try: Conduit(application).start(env, addon_name, access) - except NoClusterError: - # TODO: Set exception message in the exceptions and just output the message in the command code, should be able to catch all errors in one block - click.secho(f"""No ECS cluster found for "{app}" in "{env}" environment.""", fg="red") - exit(1) - except SecretNotFoundError as err: - click.secho( - f"""No secret called "{err}" for "{app}" in "{env}" environment.""", - fg="red", - ) - exit(1) - except CreateTaskTimeoutError: - click.secho( - f"""Client ({addon_name}) ECS task has failed to start for "{app}" in "{env}" environment.""", - fg="red", - ) - exit(1) - except ParameterNotFoundError: - click.secho( - f"""No parameter called "/copilot/applications/{app}/environments/{env}/addons". Try deploying the "{app}" "{env}" environment.""", - fg="red", - ) - exit(1) - except AddonNotFoundError: - click.secho( - f"""Addon "{addon_name}" does not exist.""", - fg="red", - ) - exit(1) - except InvalidAddonTypeError as err: - click.secho( - f"""Addon type "{err.addon_type}" is not supported, we support: {", ".join(CONDUIT_ADDON_TYPES)}.""", - fg="red", - ) - exit(1) - except AddonTypeMissingFromConfigError: - click.secho( - f"""The configuration for the addon {addon_name}, is missconfigured and missing the addon type.""", - fg="red", - ) - exit(1) + except AWSException as err: + click.secho(str(err), fg="red") + raise click.Abort diff --git a/dbt_platform_helper/domain/codebase.py b/dbt_platform_helper/domain/codebase.py index eb1b807e2..053a25a60 100644 --- a/dbt_platform_helper/domain/codebase.py +++ b/dbt_platform_helper/domain/codebase.py @@ -11,7 +11,6 @@ from dbt_platform_helper.exceptions import ApplicationDeploymentNotTriggered from dbt_platform_helper.exceptions import ApplicationEnvironmentNotFoundError -from dbt_platform_helper.exceptions import NoCopilotCodebasesFoundError from dbt_platform_helper.exceptions import NotInCodeBaseRepositoryError from dbt_platform_helper.utils.application import Application from dbt_platform_helper.utils.application import load_application @@ -67,7 +66,7 @@ def prepare(self): .removesuffix(".git") ) if repository.endswith("-deploy") or Path("./copilot").exists(): - raise NotInCodeBaseRepositoryError + raise NotInCodeBaseRepositoryError() builder_configuration_url = "https://raw.githubusercontent.com/uktrade/ci-image-builder/main/image_builder/configuration/builder_configuration.yml" builder_configuration_response = requests.get(builder_configuration_url) @@ -134,7 +133,7 @@ def build(self, app: str, codebase: str, commit: str): f"Your build has been triggered. Check your build progress in the AWS Console: {build_url}" ) - raise ApplicationDeploymentNotTriggered() + raise ApplicationDeploymentNotTriggered(codebase) def deploy(self, app, env, codebase, commit): """Trigger a CodePipeline pipeline based deployment.""" @@ -142,7 +141,7 @@ def deploy(self, app, env, codebase, commit): application = self.load_application_fn(app, default_session=session) if not application.environments.get(env): - raise ApplicationEnvironmentNotFoundError() + raise ApplicationEnvironmentNotFoundError(env) self.check_codebase_exists_fn(session, application, codebase) @@ -171,7 +170,7 @@ def deploy(self, app, env, codebase, commit): f"{build_url}", ) - raise ApplicationDeploymentNotTriggered() + raise ApplicationDeploymentNotTriggered(codebase) def list(self, app: str, with_images: bool): """List available codebases for the application.""" @@ -204,8 +203,7 @@ def __get_codebases(self, application, ssm_client): codebases = [json.loads(p["Value"]) for p in parameters] if not codebases: - # TODO Is this really an error? Or just no codebases so we could return an empty list? - raise NoCopilotCodebasesFoundError + return [] return codebases def __start_build_with_confirmation( diff --git a/dbt_platform_helper/domain/conduit.py b/dbt_platform_helper/domain/conduit.py index 349df2617..51a138f6b 100644 --- a/dbt_platform_helper/domain/conduit.py +++ b/dbt_platform_helper/domain/conduit.py @@ -3,25 +3,13 @@ import click -from dbt_platform_helper.exceptions import ECSAgentNotRunning -from dbt_platform_helper.providers.cloudformation import ( - add_stack_delete_policy_to_task_role, -) -from dbt_platform_helper.providers.cloudformation import update_conduit_stack_resources -from dbt_platform_helper.providers.cloudformation import ( - wait_for_cloudformation_to_reach_status, -) +from dbt_platform_helper.providers.cloudformation import CloudFormation from dbt_platform_helper.providers.copilot import connect_to_addon_client_task from dbt_platform_helper.providers.copilot import create_addon_client_task from dbt_platform_helper.providers.copilot import create_postgres_admin_task -from dbt_platform_helper.providers.ecs import ecs_exec_is_available -from dbt_platform_helper.providers.ecs import get_cluster_arn -from dbt_platform_helper.providers.ecs import get_ecs_task_arns -from dbt_platform_helper.providers.ecs import get_or_create_task_name -from dbt_platform_helper.providers.secrets import get_addon_type -from dbt_platform_helper.providers.secrets import get_parameter_name +from dbt_platform_helper.providers.ecs import ECS +from dbt_platform_helper.providers.secrets import Secrets from dbt_platform_helper.utils.application import Application -from dbt_platform_helper.utils.messages import abort_with_error class Conduit: @@ -30,19 +18,18 @@ def __init__( application: Application, echo_fn: Callable[[str], str] = click.secho, subprocess_fn: subprocess = subprocess, - get_ecs_task_arns_fn=get_ecs_task_arns, + get_ecs_task_arns_fn=ECS.get_ecs_task_arns, connect_to_addon_client_task_fn=connect_to_addon_client_task, create_addon_client_task_fn=create_addon_client_task, create_postgres_admin_task_fn=create_postgres_admin_task, - get_addon_type_fn=get_addon_type, - ecs_exec_is_available_fn=ecs_exec_is_available, - get_cluster_arn_fn=get_cluster_arn, - get_parameter_name_fn=get_parameter_name, - get_or_create_task_name_fn=get_or_create_task_name, - add_stack_delete_policy_to_task_role_fn=add_stack_delete_policy_to_task_role, - update_conduit_stack_resources_fn=update_conduit_stack_resources, - wait_for_cloudformation_to_reach_status_fn=wait_for_cloudformation_to_reach_status, - abort_fn=abort_with_error, + get_addon_type_fn=Secrets.get_addon_type, + ecs_exec_is_available_fn=ECS.ecs_exec_is_available, + get_cluster_arn_fn=ECS.get_cluster_arn, + get_parameter_name_fn=Secrets.get_parameter_name, + get_or_create_task_name_fn=ECS.get_or_create_task_name, + add_stack_delete_policy_to_task_role_fn=CloudFormation.add_stack_delete_policy_to_task_role, + update_conduit_stack_resources_fn=CloudFormation.update_conduit_stack_resources, + wait_for_cloudformation_to_reach_status_fn=CloudFormation.wait_for_cloudformation_to_reach_status, ): self.application = application @@ -60,7 +47,6 @@ def __init__( self.add_stack_delete_policy_to_task_role_fn = add_stack_delete_policy_to_task_role_fn self.update_conduit_stack_resources_fn = update_conduit_stack_resources_fn self.wait_for_cloudformation_to_reach_status_fn = wait_for_cloudformation_to_reach_status_fn - self.abort_fn = abort_fn def start(self, env: str, addon_name: str, access: str = "read"): clients = self._initialise_clients(env) @@ -106,10 +92,7 @@ def start(self, env: str, addon_name: str, access: str = "read"): self.echo_fn(f"Checking if exec is available for conduit task...") - try: - self.ecs_exec_is_available_fn(clients["ecs"], cluster_arn, task_arn) - except ECSAgentNotRunning: - self.abort_fn('ECS exec agent never reached "RUNNING" status') + self.ecs_exec_is_available_fn(clients["ecs"], cluster_arn, task_arn) self.echo_fn("Connecting to conduit task") self.connect_to_addon_client_task_fn( diff --git a/dbt_platform_helper/exceptions.py b/dbt_platform_helper/exceptions.py index 2917d1059..a0a28e551 100644 --- a/dbt_platform_helper/exceptions.py +++ b/dbt_platform_helper/exceptions.py @@ -1,11 +1,38 @@ +import os + +from dbt_platform_helper.constants import CONDUIT_ADDON_TYPES + + class ValidationException(Exception): pass -class AWSException(Exception): +class PlatformException(Exception): + pass + + +class AWSException(PlatformException): + pass + + +class ApplicationException(PlatformException): pass +class CloudFormationException(AWSException): + def __init__(self, stack_name: str, current_status: str): + super().__init__( + f"The CloudFormation stack '{stack_name}' is not in a good state: {current_status}" + ) + + +class CommitNotFoundError: + def __init__(self, commit: str): + super().__init__( + f"""The commit hash "{commit}" either does not exist or you need to run `git fetch`.""" + ) + + class IncompatibleMajorVersion(ValidationException): def __init__(self, app_version: str, check_version: str): super().__init__() @@ -21,61 +48,97 @@ def __init__(self, app_version: str, check_version: str): class NoClusterError(AWSException): - pass + def __init__(self, application_name: str, environment: str): + super().__init__( + f"""No ECS cluster found for "{application_name}" in "{environment}" environment.""" + ) class CreateTaskTimeoutError(AWSException): - pass + def __init__(self, addon_name: str, application_name: str, environment: str): + super().__init__( + f"""Client ({addon_name}) ECS task has failed to start for "{application_name}" in "{environment}" environment.""" + ) class ParameterNotFoundError(AWSException): - pass + def __init__(self, application_name: str, environment: str): + super().__init__( + f"""No parameter called "/copilot/applications/{application_name}/environments/{environment}/addons". Try deploying the "{application_name}" "{environment}" environment.""" + ) class AddonNotFoundError(AWSException): - pass + def __init__(self, addon_name: str): + super().__init__(f"""Addon "{addon_name}" does not exist.""") class InvalidAddonTypeError(AWSException): def __init__(self, addon_type): self.addon_type = addon_type + super().__init__( + f"""Addon type "{self.addon_type}" is not supported, we support: {", ".join(CONDUIT_ADDON_TYPES)}.""" + ) class AddonTypeMissingFromConfigError(AWSException): - pass + def __init__(self, addon_name: str): + super().__init__( + f"""The configuration for the addon {addon_name}, is misconfigured and missing the addon type.""" + ) -class CopilotCodebaseNotFoundError(Exception): - pass +class CopilotCodebaseNotFoundError(PlatformException): + def __init__(self, codebase: str): + super().__init__( + f"""The codebase "{codebase}" either does not exist or has not been deployed.""" + ) -class NotInCodeBaseRepositoryError(Exception): - pass +class NotInCodeBaseRepositoryError(PlatformException): + def __init__(self): + super().__init__( + "You are in the deploy repository; make sure you are in the application codebase repository.", + ) -class NoCopilotCodebasesFoundError(Exception): - pass +class NoCopilotCodebasesFoundError(PlatformException): + def __init__(self, application_name: str): + super().__init__(f"""No codebases found for application "{application_name}".""") -class ImageNotFoundError(Exception): - pass +class ImageNotFoundError(PlatformException): + def __init__(self, commit: str): + super().__init__( + f"""The commit hash "{commit}" has not been built into an image, try the `platform-helper codebase build` command first.""" + ) -class ApplicationDeploymentNotTriggered(Exception): - pass +class ApplicationDeploymentNotTriggered(PlatformException): + def __init__(self, codebase: str): + super().__init__(f"""Your deployment for {codebase} was not triggered.""") -class ApplicationNotFoundError(Exception): - pass +class ApplicationNotFoundError(ApplicationException): + def __init__(self, application_name: str): + super().__init__( + f"""The account "{os.environ.get("AWS_PROFILE")}" does not contain the application "{application_name}"; ensure you have set the environment variable "AWS_PROFILE" correctly.""" + ) -class ApplicationEnvironmentNotFoundError(Exception): - pass +class ApplicationEnvironmentNotFoundError(ApplicationException): + def __init__(self, environment: str): + super().__init__( + f"""The environment "{environment}" either does not exist or has not been deployed.""" + ) class SecretNotFoundError(AWSException): - pass + # application_name: str, environment: str, + def __init__(self, secret_name: str): + super().__init__(f"""No secret called "{secret_name}".""") class ECSAgentNotRunning(AWSException): - pass + def __init__(self): + super().__init__("""ECS exec agent never reached "RUNNING" status""") diff --git a/dbt_platform_helper/providers/cloudformation.py b/dbt_platform_helper/providers/cloudformation.py index 395a49c71..141be9052 100644 --- a/dbt_platform_helper/providers/cloudformation.py +++ b/dbt_platform_helper/providers/cloudformation.py @@ -1,105 +1,127 @@ import json +import botocore from cfn_tools import dump_yaml from cfn_tools import load_yaml - -def add_stack_delete_policy_to_task_role(cloudformation_client, iam_client, task_name: str): - - stack_name = f"task-{task_name}" - stack_resources = cloudformation_client.list_stack_resources(StackName=stack_name)[ - "StackResourceSummaries" - ] - - for resource in stack_resources: - if resource["LogicalResourceId"] == "DefaultTaskRole": - task_role_name = resource["PhysicalResourceId"] - iam_client.put_role_policy( - RoleName=task_role_name, - PolicyName="DeleteCloudFormationStack", - PolicyDocument=json.dumps( - { - "Version": "2012-10-17", - "Statement": [ - { - "Action": ["cloudformation:DeleteStack"], - "Effect": "Allow", - "Resource": f"arn:aws:cloudformation:*:*:stack/{stack_name}/*", - }, - ], - }, - ), - ) - - -def update_conduit_stack_resources( - cloudformation_client, - iam_client, - ssm_client, - application_name: str, - env: str, - addon_type: str, - addon_name: str, - task_name: str, - parameter_name: str, - access: str, -): - - conduit_stack_name = f"task-{task_name}" - template = cloudformation_client.get_template(StackName=conduit_stack_name) - template_yml = load_yaml(template["TemplateBody"]) - template_yml["Resources"]["LogGroup"]["DeletionPolicy"] = "Retain" - template_yml["Resources"]["TaskNameParameter"] = load_yaml( - f""" - Type: AWS::SSM::Parameter - Properties: - Name: {parameter_name} - Type: String - Value: {task_name} - """ - ) - - log_filter_role_arn = iam_client.get_role(RoleName="CWLtoSubscriptionFilterRole")["Role"]["Arn"] - - destination_log_group_arns = json.loads( - ssm_client.get_parameter(Name="/copilot/tools/central_log_groups")["Parameter"]["Value"] - ) - - destination_arn = destination_log_group_arns["dev"] - if env.lower() in ("prod", "production"): - destination_arn = destination_log_group_arns["prod"] - - template_yml["Resources"]["SubscriptionFilter"] = load_yaml( - f""" - Type: AWS::Logs::SubscriptionFilter - DeletionPolicy: Retain - Properties: - RoleArn: {log_filter_role_arn} - LogGroupName: /copilot/{task_name} - FilterName: /copilot/conduit/{application_name}/{env}/{addon_type}/{addon_name}/{task_name.rsplit("-", 1)[1]}/{access} - FilterPattern: '' - DestinationArn: {destination_arn} - """ - ) - - params = [] - if "Parameters" in template_yml: - for param in template_yml["Parameters"]: - # TODO testing missed in codecov, update test to assert on method call below with params including ExistingParameter from cloudformation template. - params.append({"ParameterKey": param, "UsePreviousValue": True}) - - cloudformation_client.update_stack( - StackName=conduit_stack_name, - TemplateBody=dump_yaml(template_yml), - Parameters=params, - Capabilities=["CAPABILITY_IAM"], - ) - - return conduit_stack_name - - -# TODO Catch errors and raise a more human friendly Exception is the CloudFormation stack goes into a "unhappy" state, e.g. ROLLBACK_IN_PROGRESS. Currently we get things like botocore.exceptions.WaiterError: Waiter StackUpdateComplete failed: Waiter encountered a terminal failure state: For expression "Stacks[].StackStatus" we matched expected path: "UPDATE_ROLLBACK_COMPLETE" at least once -def wait_for_cloudformation_to_reach_status(cloudformation_client, stack_status, stack_name): - - waiter = cloudformation_client.get_waiter(stack_status) - waiter.wait(StackName=stack_name, WaiterConfig={"Delay": 5, "MaxAttempts": 20}) +from dbt_platform_helper.exceptions import CloudFormationException + + +class CloudFormation: + def __init__(self, cloudformation_client, iam_client, ssm_client): + self.cloudformation_client = cloudformation_client + self.iam_client = iam_client + self.ssm_client = ssm_client + + def add_stack_delete_policy_to_task_role(self, task_name: str): + stack_name = f"task-{task_name}" + stack_resources = self.cloudformation_client.list_stack_resources(StackName=stack_name)[ + "StackResourceSummaries" + ] + + for resource in stack_resources: + if resource["LogicalResourceId"] == "DefaultTaskRole": + task_role_name = resource["PhysicalResourceId"] + self.iam_client.put_role_policy( + RoleName=task_role_name, + PolicyName="DeleteCloudFormationStack", + PolicyDocument=json.dumps( + { + "Version": "2012-10-17", + "Statement": [ + { + "Action": ["cloudformation:DeleteStack"], + "Effect": "Allow", + "Resource": f"arn:aws:cloudformation:*:*:stack/{stack_name}/*", + }, + ], + }, + ), + ) + + def update_conduit_stack_resources( + self, + application_name: str, + env: str, + addon_type: str, + addon_name: str, + task_name: str, + parameter_name: str, + access: str, + ): + conduit_stack_name = f"task-{task_name}" + template = self.cloudformation_client.get_template(StackName=conduit_stack_name) + template_yml = load_yaml(template["TemplateBody"]) + + template_yml["Resources"]["LogGroup"]["DeletionPolicy"] = "Retain" + + template_yml["Resources"]["TaskNameParameter"] = load_yaml( + f""" + Type: AWS::SSM::Parameter + Properties: + Name: {parameter_name} + Type: String + Value: {task_name} + """ + ) + + log_filter_role_arn = self.iam_client.get_role(RoleName="CWLtoSubscriptionFilterRole")[ + "Role" + ]["Arn"] + + destination_log_group_arns = json.loads( + self.ssm_client.get_parameter(Name="/copilot/tools/central_log_groups")["Parameter"][ + "Value" + ] + ) + + destination_arn = destination_log_group_arns["dev"] + if env.lower() in ("prod", "production"): + destination_arn = destination_log_group_arns["prod"] + + template_yml["Resources"]["SubscriptionFilter"] = load_yaml( + f""" + Type: AWS::Logs::SubscriptionFilter + DeletionPolicy: Retain + Properties: + RoleArn: {log_filter_role_arn} + LogGroupName: /copilot/{task_name} + FilterName: /copilot/conduit/{application_name}/{env}/{addon_type}/{addon_name}/{task_name.rsplit("-", 1)[1]}/{access} + FilterPattern: '' + DestinationArn: {destination_arn} + """ + ) + + params = [] + # TODO moto bug https://uktrade.atlassian.net/browse/DBTP-1582 + if "Parameters" in template_yml: + for param in template_yml["Parameters"]: + params.append({"ParameterKey": param, "UsePreviousValue": True}) + + self.cloudformation_client.update_stack( + StackName=conduit_stack_name, + TemplateBody=dump_yaml(template_yml), + Parameters=params, + Capabilities=["CAPABILITY_IAM"], + ) + + return conduit_stack_name + + def wait_for_cloudformation_to_reach_status(self, stack_status, stack_name): + waiter = self.cloudformation_client.get_waiter(stack_status) + + try: + waiter.wait(StackName=stack_name, WaiterConfig={"Delay": 5, "MaxAttempts": 20}) + except botocore.exceptions.WaiterError as err: + current_status = err.last_response.get("Stacks", [{}])[0].get("StackStatus", "") + + if current_status in [ + "ROLLBACK_IN_PROGRESS", + "UPDATE_ROLLBACK_IN_PROGRESS", + "ROLLBACK_FAILED", + ]: + raise CloudFormationException(stack_name, current_status) + else: + raise CloudFormationException( + stack_name, f"Error while waiting for stack status: {str(err)}" + ) diff --git a/dbt_platform_helper/providers/copilot.py b/dbt_platform_helper/providers/copilot.py index 61c1a055d..7954f1555 100644 --- a/dbt_platform_helper/providers/copilot.py +++ b/dbt_platform_helper/providers/copilot.py @@ -5,11 +5,8 @@ from dbt_platform_helper.constants import CONDUIT_DOCKER_IMAGE_LOCATION from dbt_platform_helper.exceptions import CreateTaskTimeoutError -from dbt_platform_helper.providers.ecs import get_ecs_task_arns -from dbt_platform_helper.providers.secrets import get_connection_secret_arn -from dbt_platform_helper.providers.secrets import ( - get_postgres_connection_data_updated_with_master_secret, -) +from dbt_platform_helper.providers.ecs import ECS +from dbt_platform_helper.providers.secrets import Secrets from dbt_platform_helper.utils.application import Application from dbt_platform_helper.utils.messages import abort_with_error @@ -69,7 +66,7 @@ def create_addon_client_task( f"--task-group-name {task_name} " f"{execution_role}" f"--image {CONDUIT_DOCKER_IMAGE_LOCATION}:{addon_type} " - f"--secrets CONNECTION_SECRET={get_connection_secret_arn(ssm_client,secrets_manager_client, secret_name)} " + f"--secrets CONNECTION_SECRET={Secrets.get_connection_secret_arn(ssm_client,secrets_manager_client, secret_name)} " "--platform-os linux " "--platform-arch arm64", shell=True, @@ -95,7 +92,7 @@ def create_postgres_admin_task( "Parameter" ]["Value"] connection_string = json.dumps( - get_postgres_connection_data_updated_with_master_secret( + Secrets.get_postgres_connection_data_updated_with_master_secret( ssm_client, secrets_manager_client, read_only_secret_name, master_secret_arn ) ) @@ -118,13 +115,13 @@ def connect_to_addon_client_task( env, cluster_arn, task_name, - addon_client_is_running_fn=get_ecs_task_arns, + get_ecs_task_arns_fn=ECS.get_ecs_task_arns, ): running = False tries = 0 while tries < 15 and not running: tries += 1 - if addon_client_is_running_fn(ecs_client, cluster_arn, task_name): + if get_ecs_task_arns_fn(ecs_client, cluster_arn, task_name): subprocess.call( "copilot task exec " f"--app {application_name} --env {env} " @@ -137,7 +134,7 @@ def connect_to_addon_client_task( time.sleep(1) if not running: - raise CreateTaskTimeoutError + raise CreateTaskTimeoutError(task_name, application_name, env) def _normalise_secret_name(addon_name: str) -> str: diff --git a/dbt_platform_helper/providers/ecs.py b/dbt_platform_helper/providers/ecs.py index 2878e91ac..7ddf0a180 100644 --- a/dbt_platform_helper/providers/ecs.py +++ b/dbt_platform_helper/providers/ecs.py @@ -7,73 +7,81 @@ from dbt_platform_helper.exceptions import NoClusterError -# Todo: Refactor to a class, review, then perhaps do the others -def get_cluster_arn(ecs_client, application_name: str, env: str) -> str: - for cluster_arn in ecs_client.list_clusters()["clusterArns"]: - tags_response = ecs_client.list_tags_for_resource(resourceArn=cluster_arn) - tags = tags_response["tags"] - - app_key_found = False - env_key_found = False - cluster_key_found = False - - for tag in tags: - if tag["key"] == "copilot-application" and tag["value"] == application_name: - app_key_found = True - if tag["key"] == "copilot-environment" and tag["value"] == env: - env_key_found = True - if tag["key"] == "aws:cloudformation:logical-id" and tag["value"] == "Cluster": - cluster_key_found = True - - if app_key_found and env_key_found and cluster_key_found: - return cluster_arn - - raise NoClusterError - - -def get_or_create_task_name( - ssm_client, application_name: str, env: str, addon_name: str, parameter_name: str -) -> str: - try: - return ssm_client.get_parameter(Name=parameter_name)["Parameter"]["Value"] - except ssm_client.exceptions.ParameterNotFound: - random_id = "".join(random.choices(string.ascii_lowercase + string.digits, k=12)) - return f"conduit-{application_name}-{env}-{addon_name}-{random_id}" - - -def get_ecs_task_arns(ecs_client, cluster_arn: str, task_name: str): - - tasks = ecs_client.list_tasks( - cluster=cluster_arn, - desiredStatus="RUNNING", - family=f"copilot-{task_name}", - ) - - if not tasks["taskArns"]: - return [] - - return tasks["taskArns"] - - -def ecs_exec_is_available(ecs_client, cluster_arn: str, task_arns: List[str]): - - current_attemps = 0 - execute_command_agent_status = "" - - while execute_command_agent_status != "RUNNING" and current_attemps < 25: - - current_attemps += 1 - - task_details = ecs_client.describe_tasks(cluster=cluster_arn, tasks=task_arns) - - managed_agents = task_details["tasks"][0]["containers"][0]["managedAgents"] - execute_command_agent_status = [ - agent["lastStatus"] - for agent in managed_agents - if agent["name"] == "ExecuteCommandAgent" - ][0] - - time.sleep(1) - - if execute_command_agent_status != "RUNNING": - raise ECSAgentNotRunning +class ECS: + def __init__(self, ecs_client, ssm_client, application_name: str, env: str): + self.ecs_client = ecs_client + self.ssm_client = ssm_client + self.application_name = application_name + self.env = env + + def get_cluster_arn(self) -> str: + """Returns the ARN of the ECS cluster for the given application and + environment.""" + for cluster_arn in self.ecs_client.list_clusters()["clusterArns"]: + tags_response = self.ecs_client.list_tags_for_resource(resourceArn=cluster_arn) + tags = tags_response["tags"] + + app_key_found = False + env_key_found = False + cluster_key_found = False + + for tag in tags: + if tag["key"] == "copilot-application" and tag["value"] == self.application_name: + app_key_found = True + if tag["key"] == "copilot-environment" and tag["value"] == self.env: + env_key_found = True + if tag["key"] == "aws:cloudformation:logical-id" and tag["value"] == "Cluster": + cluster_key_found = True + + if app_key_found and env_key_found and cluster_key_found: + return cluster_arn + + raise NoClusterError(self.application_name, self.env) + + def get_or_create_task_name(self, addon_name: str, parameter_name: str) -> str: + """Fetches the task name from SSM or creates a new one if not found.""" + try: + return self.ssm_client.get_parameter(Name=parameter_name)["Parameter"]["Value"] + except self.ssm_client.exceptions.ParameterNotFound: + random_id = "".join(random.choices(string.ascii_lowercase + string.digits, k=12)) + return f"conduit-{self.application_name}-{self.env}-{addon_name}-{random_id}" + + def get_ecs_task_arns(self, cluster_arn: str, task_name: str): + """Gets the ECS task ARNs for a given task name and cluster ARN.""" + tasks = self.ecs_client.list_tasks( + cluster=cluster_arn, + desiredStatus="RUNNING", + family=f"copilot-{task_name}", + ) + + if not tasks["taskArns"]: + return [] + + return tasks["taskArns"] + + def ecs_exec_is_available(self, cluster_arn: str, task_arns: List[str]): + """ + Checks if the ExecuteCommandAgent is running on the specified ECS task. + + Waits for up to 25 attempts, then raises ECSAgentNotRunning if still not + running. + """ + current_attempts = 0 + execute_command_agent_status = "" + + while execute_command_agent_status != "RUNNING" and current_attempts < 25: + current_attempts += 1 + + task_details = self.ecs_client.describe_tasks(cluster=cluster_arn, tasks=task_arns) + + managed_agents = task_details["tasks"][0]["containers"][0]["managedAgents"] + execute_command_agent_status = [ + agent["lastStatus"] + for agent in managed_agents + if agent["name"] == "ExecuteCommandAgent" + ][0] + if execute_command_agent_status != "RUNNING": + time.sleep(1) + + if execute_command_agent_status != "RUNNING": + raise ECSAgentNotRunning diff --git a/dbt_platform_helper/providers/secrets.py b/dbt_platform_helper/providers/secrets.py index feeaf0ae1..c875f7dfc 100644 --- a/dbt_platform_helper/providers/secrets.py +++ b/dbt_platform_helper/providers/secrets.py @@ -9,77 +9,77 @@ from dbt_platform_helper.exceptions import SecretNotFoundError -def get_postgres_connection_data_updated_with_master_secret( - ssm_client, secrets_manager_client, parameter_name, secret_arn -): - response = ssm_client.get_parameter(Name=parameter_name, WithDecryption=True) - parameter_value = response["Parameter"]["Value"] - - parameter_data = json.loads(parameter_value) - - secret_response = secrets_manager_client.get_secret_value(SecretId=secret_arn) - secret_value = json.loads(secret_response["SecretString"]) - - parameter_data["username"] = urllib.parse.quote(secret_value["username"]) - parameter_data["password"] = urllib.parse.quote(secret_value["password"]) - - return parameter_data - - -def get_connection_secret_arn(ssm_client, secrets_manager_client, secret_name: str) -> str: - - try: - return ssm_client.get_parameter(Name=secret_name, WithDecryption=False)["Parameter"]["ARN"] - except ssm_client.exceptions.ParameterNotFound: - pass - - try: - return secrets_manager_client.describe_secret(SecretId=secret_name)["ARN"] - except secrets_manager_client.exceptions.ResourceNotFoundException: - pass - - raise SecretNotFoundError(secret_name) - - -def get_addon_type(ssm_client, application_name: str, env: str, addon_name: str) -> str: - addon_type = None - try: - addon_config = json.loads( - ssm_client.get_parameter( - Name=f"/copilot/applications/{application_name}/environments/{env}/addons" - )["Parameter"]["Value"] - ) - except ssm_client.exceptions.ParameterNotFound: - raise ParameterNotFoundError - - if addon_name not in addon_config.keys(): - raise AddonNotFoundError - - for name, config in addon_config.items(): - if name == addon_name: - if not config.get("type"): - raise AddonTypeMissingFromConfigError() - addon_type = config["type"] - - if not addon_type or addon_type not in CONDUIT_ADDON_TYPES: - raise InvalidAddonTypeError(addon_type) - - if "postgres" in addon_type: - addon_type = "postgres" - - return addon_type - - -def get_parameter_name( - application_name: str, env: str, addon_type: str, addon_name: str, access: str -) -> str: - if addon_type == "postgres": - return f"/copilot/{application_name}/{env}/conduits/{_normalise_secret_name(addon_name)}_{access.upper()}" - elif addon_type == "redis" or addon_type == "opensearch": - return f"/copilot/{application_name}/{env}/conduits/{_normalise_secret_name(addon_name)}_ENDPOINT" - else: - return f"/copilot/{application_name}/{env}/conduits/{_normalise_secret_name(addon_name)}" - - -def _normalise_secret_name(addon_name: str) -> str: - return addon_name.replace("-", "_").upper() +class Secrets: + def __init__(self, ssm_client, secrets_manager_client, application_name, env): + self.ssm_client = ssm_client + self.secrets_manager_client = secrets_manager_client + self.application_name = application_name + self.env = env + + def get_postgres_connection_data_updated_with_master_secret(self, parameter_name, secret_arn): + response = self.ssm_client.get_parameter(Name=parameter_name, WithDecryption=True) + parameter_value = response["Parameter"]["Value"] + + parameter_data = json.loads(parameter_value) + + secret_response = self.secrets_manager_client.get_secret_value(SecretId=secret_arn) + secret_value = json.loads(secret_response["SecretString"]) + + parameter_data["username"] = urllib.parse.quote(secret_value["username"]) + parameter_data["password"] = urllib.parse.quote(secret_value["password"]) + + return parameter_data + + def get_connection_secret_arn(self, secret_name: str) -> str: + try: + return self.ssm_client.get_parameter(Name=secret_name, WithDecryption=False)[ + "Parameter" + ]["ARN"] + except self.ssm_client.exceptions.ParameterNotFound: + pass + + try: + return self.secrets_manager_client.describe_secret(SecretId=secret_name)["ARN"] + except self.secrets_manager_client.exceptions.ResourceNotFoundException: + pass + + raise SecretNotFoundError(secret_name) + + def get_addon_type(self, addon_name: str) -> str: + addon_type = None + try: + addon_config = json.loads( + self.ssm_client.get_parameter( + Name=f"/copilot/applications/{self.application_name}/environments/{self.env}/addons" + )["Parameter"]["Value"] + ) + except self.ssm_client.exceptions.ParameterNotFound: + raise ParameterNotFoundError(self.application_name, self.env) + + if addon_name not in addon_config.keys(): + raise AddonNotFoundError(addon_name) + + for name, config in addon_config.items(): + if name == addon_name: + if not config.get("type"): + raise AddonTypeMissingFromConfigError(addon_name) + addon_type = config["type"] + + if not addon_type or addon_type not in CONDUIT_ADDON_TYPES: + raise InvalidAddonTypeError(addon_type) + + if "postgres" in addon_type: + addon_type = "postgres" + + return addon_type + + def get_parameter_name(self, addon_type: str, addon_name: str, access: str) -> str: + if addon_type == "postgres": + return f"/copilot/{self.application_name}/{self.env}/conduits/{self._normalise_secret_name(addon_name)}_{access.upper()}" + elif addon_type == "redis" or addon_type == "opensearch": + return f"/copilot/{self.application_name}/{self.env}/conduits/{self._normalise_secret_name(addon_name)}_ENDPOINT" + else: + return f"/copilot/{self.application_name}/{self.env}/conduits/{self._normalise_secret_name(addon_name)}" + + def _normalise_secret_name(self, addon_name: str) -> str: + return addon_name.replace("-", "_").upper() diff --git a/dbt_platform_helper/utils/application.py b/dbt_platform_helper/utils/application.py index 420689df5..fba00bdab 100644 --- a/dbt_platform_helper/utils/application.py +++ b/dbt_platform_helper/utils/application.py @@ -80,7 +80,7 @@ def load_application(app: str = None, default_session: Session = None) -> Applic WithDecryption=False, ) except ssm_client.exceptions.ParameterNotFound: - raise ApplicationNotFoundError + raise ApplicationNotFoundError(app) path = f"/copilot/applications/{application.name}/environments" secrets = get_ssm_secrets(app, None, current_session, path) diff --git a/dbt_platform_helper/utils/aws.py b/dbt_platform_helper/utils/aws.py index e59c2f511..e7814afa6 100644 --- a/dbt_platform_helper/utils/aws.py +++ b/dbt_platform_helper/utils/aws.py @@ -499,7 +499,7 @@ def check_codebase_exists(session: Session, application, codebase: str): ssm_client.exceptions.ParameterNotFound, json.JSONDecodeError, ): - raise CopilotCodebaseNotFoundError + raise CopilotCodebaseNotFoundError(codebase) def check_image_exists(session, application, codebase, commit): @@ -513,7 +513,7 @@ def check_image_exists(session, application, codebase, commit): ecr_client.exceptions.RepositoryNotFoundException, ecr_client.exceptions.ImageNotFoundException, ): - raise ImageNotFoundError + raise ImageNotFoundError(commit) def get_build_url_from_arn(build_arn: str) -> str: diff --git a/tests/platform_helper/domain/test_codebase.py b/tests/platform_helper/domain/test_codebase.py index b05f3dcf2..2ea4d9c5f 100644 --- a/tests/platform_helper/domain/test_codebase.py +++ b/tests/platform_helper/domain/test_codebase.py @@ -20,7 +20,6 @@ from dbt_platform_helper.exceptions import ApplicationNotFoundError from dbt_platform_helper.exceptions import CopilotCodebaseNotFoundError from dbt_platform_helper.exceptions import ImageNotFoundError -from dbt_platform_helper.exceptions import NoCopilotCodebasesFoundError from dbt_platform_helper.utils.application import Environment from dbt_platform_helper.utils.git import CommitNotFoundError from tests.platform_helper.conftest import EXPECTED_FILES_DIR @@ -147,22 +146,15 @@ def test_codebase_prepare_does_not_generate_files_in_a_repo_with_a_copilot_direc os.chdir(tmp_path) Path(tmp_path / "copilot").mkdir() - mocks.subprocess.return_value.stderr = mock_suprocess_fixture() + mocks.subprocess.return_value.stdout = mock_suprocess_fixture() - codebase.prepare() - - mocks.echo_fn.assert_has_calls( - [ - call( - "You are in the deploy repository; make sure you are in the application codebase repository.", - ), - ] - ) + with pytest.raises(NotInCodeBaseRepositoryError): + codebase.prepare() def test_codebase_build_does_not_trigger_build_without_an_application(): mocks = CodebaseMocks() - mocks.load_application_fn.side_effect = ApplicationNotFoundError() + mocks.load_application_fn.side_effect = ApplicationNotFoundError("not-an-application") codebase = Codebase(**mocks.params()) with pytest.raises(ApplicationNotFoundError): @@ -186,7 +178,7 @@ def test_codebase_build_commit_not_found(): codebase.build("not-an-application", "application", "ab1c23d") -def test_codebase_prepare_does_not_generate_files_in_a_repo_with_a_copilot_directory(tmp_path): +def test_codebase_prepare_raises_not_in_codebase_exception(tmp_path): mocks = CodebaseMocks() mocks.load_application_fn.side_effect = SystemExit(1) @@ -283,7 +275,9 @@ def test_codebase_deploy_successfully_triggers_a_pipeline_based_deploy(mock_appl def test_codebase_deploy_exception_with_a_nonexistent_codebase(): - mocks = CodebaseMocks(check_codebase_exists_fn=Mock(side_effect=CopilotCodebaseNotFoundError())) + mocks = CodebaseMocks( + check_codebase_exists_fn=Mock(side_effect=CopilotCodebaseNotFoundError("application")) + ) client = mock_aws_client(mocks.get_aws_session_or_abort_fn) @@ -297,7 +291,9 @@ def test_codebase_deploy_exception_with_a_nonexistent_codebase(): def test_check_codebase_exists_returns_error_when_no_json(): - mocks = CodebaseMocks(check_codebase_exists_fn=Mock(side_effect=CopilotCodebaseNotFoundError)) + mocks = CodebaseMocks( + check_codebase_exists_fn=Mock(side_effect=CopilotCodebaseNotFoundError("application")) + ) client = mock_aws_client(mocks.get_aws_session_or_abort_fn) @@ -311,7 +307,9 @@ def test_check_codebase_exists_returns_error_when_no_json(): def test_codebase_deploy_aborts_with_a_nonexistent_image_repository(): - mocks = CodebaseMocks(check_image_exists_fn=Mock(side_effect=ImageNotFoundError)) + mocks = CodebaseMocks( + check_image_exists_fn=Mock(side_effect=ImageNotFoundError("nonexistent-commit-hash")) + ) client = mock_aws_client(mocks.get_aws_session_or_abort_fn) @@ -326,7 +324,9 @@ def test_codebase_deploy_aborts_with_a_nonexistent_image_repository(): def test_codebase_deploy_aborts_with_a_nonexistent_image_tag(): - mocks = CodebaseMocks(check_image_exists_fn=Mock(side_effect=ImageNotFoundError)) + mocks = CodebaseMocks( + check_image_exists_fn=Mock(side_effect=ImageNotFoundError("nonexistent-commit-hash")) + ) client = mock_aws_client(mocks.get_aws_session_or_abort_fn) @@ -374,20 +374,11 @@ def test_codebase_deploy_does_not_trigger_build_without_confirmation(): def test_codebase_deploy_does_not_trigger_build_without_an_application(): mocks = CodebaseMocks() - mocks.load_application_fn.side_effect = ApplicationNotFoundError() + mocks.load_application_fn.side_effect = ApplicationNotFoundError("not-an-application") codebase = Codebase(**mocks.params()) with pytest.raises(ApplicationNotFoundError) as exc: codebase.deploy("not-an-application", "dev", "application", "ab1c23d") - # TODO This assert can probably go now we are catching the errors and outputting them at the command layer - mocks.echo_fn.assert_has_calls( - [ - call( - """The account "foo" does not contain the application "not-an-application"; ensure you have set the environment variable "AWS_PROFILE" correctly.""", - fg="red", - ), - ] - ) def test_codebase_deploy_does_not_trigger_build_with_missing_environment(mock_application): @@ -423,15 +414,15 @@ def test_codebase_deploy_does_not_trigger_deployment_without_confirmation(): def test_codebase_list_does_not_trigger_build_without_an_application(): mocks = CodebaseMocks() - mocks.load_application_fn.side_effect = ApplicationNotFoundError() + mocks.load_application_fn.side_effect = ApplicationNotFoundError("not-an-application") codebase = Codebase(**mocks.params()) with pytest.raises(ApplicationNotFoundError) as exc: codebase.list("not-an-application", True) -def test_codebase_list_raises_exception_when_no_codebases(): - mocks = CodebaseMocks(check_codebase_exists_fn=Mock(side_effect=NoCopilotCodebasesFoundError())) +def test_codebase_list_returns_empty_when_no_codebases(): + mocks = CodebaseMocks(check_codebase_exists_fn=Mock()) client = mock_aws_client(mocks.get_aws_session_or_abort_fn) @@ -439,9 +430,10 @@ def test_codebase_list_raises_exception_when_no_codebases(): "Parameter": {"Value": json.dumps({"name": "application"})}, } - with pytest.raises(NoCopilotCodebasesFoundError): - codebase = Codebase(**mocks.params()) - codebase.list("test-application", True) + codebase = Codebase(**mocks.params()) + codebase.list("test-application", True) + + mocks.echo_fn.assert_has_calls([]) def test_lists_codebases_with_multiple_pages_of_images(): diff --git a/tests/platform_helper/domain/test_conduit.py b/tests/platform_helper/domain/test_conduit.py index 385566bd7..2d499dd47 100644 --- a/tests/platform_helper/domain/test_conduit.py +++ b/tests/platform_helper/domain/test_conduit.py @@ -7,6 +7,7 @@ from dbt_platform_helper.domain.conduit import Conduit from dbt_platform_helper.exceptions import AddonNotFoundError from dbt_platform_helper.exceptions import CreateTaskTimeoutError +from dbt_platform_helper.exceptions import ECSAgentNotRunning from dbt_platform_helper.exceptions import InvalidAddonTypeError from dbt_platform_helper.exceptions import NoClusterError from dbt_platform_helper.exceptions import ParameterNotFoundError @@ -192,7 +193,11 @@ def test_conduit_with_task_already_running(): def test_conduit_domain_when_no_cluster_exists(): conduit_mocks = ConduitMocks( - app_name, addon_type, get_cluster_arn_fn=Mock(side_effect=NoClusterError()) + app_name, + addon_type, + get_cluster_arn_fn=Mock( + side_effect=NoClusterError(application_name=app_name, environment=env) + ), ) conduit = Conduit(**conduit_mocks.params()) ecs_client = conduit.application.environments[env].session.client("ecs") @@ -209,7 +214,9 @@ def test_conduit_domain_when_no_connection_secret_exists(): app_name, addon_type, get_ecs_task_arns_fn=Mock(return_value=False), - create_addon_client_task_fn=Mock(side_effect=SecretNotFoundError()), + create_addon_client_task_fn=Mock( + side_effect=SecretNotFoundError(f"/copilot/{app_name}/{env}/secrets/{addon_name}") + ), ) conduit = Conduit(**conduit_mocks.params()) @@ -230,7 +237,13 @@ def test_conduit_domain_when_client_task_fails_to_start(): conduit_mocks = ConduitMocks( app_name, addon_type, - connect_to_addon_client_task_fn=Mock(side_effect=CreateTaskTimeoutError()), + connect_to_addon_client_task_fn=Mock( + side_effect=CreateTaskTimeoutError( + addon_name=addon_name, + application_name=app_name, + environment=env, + ) + ), ) conduit = Conduit(**conduit_mocks.params()) ecs_client = conduit.application.environments[env].session.client("ecs") @@ -272,7 +285,7 @@ def test_conduit_domain_when_addon_type_is_invalid(): def test_conduit_domain_when_addon_does_not_exist(): addon_name = "addon_doesnt_exist" conduit_mocks = ConduitMocks( - app_name, addon_type, get_addon_type_fn=Mock(side_effect=AddonNotFoundError()) + app_name, addon_type, get_addon_type_fn=Mock(side_effect=AddonNotFoundError(addon_name)) ) conduit = Conduit(**conduit_mocks.params()) @@ -286,7 +299,14 @@ def test_conduit_domain_when_addon_does_not_exist(): def test_conduit_domain_when_no_addon_config_parameter_exists(): addon_name = "parameter_doesnt_exist" conduit_mocks = ConduitMocks( - app_name, addon_type, get_addon_type_fn=Mock(side_effect=ParameterNotFoundError()) + app_name, + addon_type, + get_addon_type_fn=Mock( + side_effect=ParameterNotFoundError( + application_name=app_name, + environment=env, + ) + ), ) conduit = Conduit(**conduit_mocks.params()) @@ -295,3 +315,25 @@ def test_conduit_domain_when_no_addon_config_parameter_exists(): with pytest.raises(ParameterNotFoundError): conduit.start(env, addon_name) conduit.get_ecs_task_arns_fn.assert_called_once_with(ecs_client, cluster_name, task_name) + + +def test_conduit_domain_ecs_exec_agent_does_not_start(): + conduit_mocks = ConduitMocks( + app_name, + addon_type, + get_ecs_task_arns_fn=Mock( + return_value=["arn:aws:ecs:eu-west-2:123456789012:task/MyTaskARN"] + ), + ecs_exec_is_available_fn=Mock(side_effect=ECSAgentNotRunning()), + ) + conduit = Conduit(**conduit_mocks.params()) + ecs_client = conduit.application.environments[env].session.client("ecs") + + with pytest.raises(ECSAgentNotRunning): + conduit.start(env, addon_name) + + conduit.ecs_exec_is_available_fn.assert_called_once_with( + ecs_client, + "arn:aws:ecs:eu-west-2:123456789012:cluster/MyECSCluster1", + ["arn:aws:ecs:eu-west-2:123456789012:task/MyTaskARN"], + ) diff --git a/tests/platform_helper/domain/test_database_copy.py b/tests/platform_helper/domain/test_database_copy.py index 22bd2d98f..fc48b63c6 100644 --- a/tests/platform_helper/domain/test_database_copy.py +++ b/tests/platform_helper/domain/test_database_copy.py @@ -303,7 +303,7 @@ def test_database_dump_handles_account_id_errors(is_dump): def test_database_copy_initialization_handles_app_name_errors(): mocks = DataCopyMocks() - mocks.load_application_fn = Mock(side_effect=ApplicationNotFoundError()) + mocks.load_application_fn = Mock(side_effect=ApplicationNotFoundError("bad-app")) with pytest.raises(SystemExit) as exc: DatabaseCopy("bad-app", "test-db", **mocks.params()) diff --git a/tests/platform_helper/providers/test_cloudformation.py b/tests/platform_helper/providers/test_cloudformation.py index 6d44869f9..0dab33905 100644 --- a/tests/platform_helper/providers/test_cloudformation.py +++ b/tests/platform_helper/providers/test_cloudformation.py @@ -4,16 +4,12 @@ import boto3 import pytest +from botocore.exceptions import WaiterError from cfn_tools import load_yaml from moto import mock_aws -from dbt_platform_helper.providers.cloudformation import ( - add_stack_delete_policy_to_task_role, -) -from dbt_platform_helper.providers.cloudformation import update_conduit_stack_resources -from dbt_platform_helper.providers.cloudformation import ( - wait_for_cloudformation_to_reach_status, -) +from dbt_platform_helper.exceptions import CloudFormationException +from dbt_platform_helper.providers.cloudformation import CloudFormation from tests.platform_helper.conftest import mock_parameter_name from tests.platform_helper.conftest import mock_task_name @@ -60,21 +56,17 @@ def test_update_conduit_stack_resources( iam_client = mock_application.environments[env].session.client("iam") ssm_client = mock_application.environments[env].session.client("ssm") - update_conduit_stack_resources( - cloudformation_client, - iam_client, - ssm_client, - mock_application.name, - env, - addon_type, - addon_name, - task_name, - parameter_name, - "read", + cloudformation = CloudFormation(cloudformation_client, iam_client, ssm_client) + + cloudformation.update_conduit_stack_resources( + mock_application.name, env, addon_type, addon_name, task_name, parameter_name, "read" ) template = boto3.client("cloudformation").get_template(StackName=f"task-{task_name}") + stack = boto3.client("cloudformation").describe_stacks(StackName=f"task-{task_name}") template_yml = load_yaml(template["TemplateBody"]) + + assert stack["Stacks"][0]["Parameters"][0]["ParameterValue"] == "does-not-matter" assert template_yml["Resources"]["LogGroup"]["DeletionPolicy"] == "Retain" assert template_yml["Resources"]["TaskNameParameter"]["Properties"]["Name"] == parameter_name assert ( @@ -97,7 +89,7 @@ def test_update_conduit_stack_resources( ) @patch("time.sleep", return_value=None) def test_add_stack_delete_policy_to_task_role(sleep, mock_stack, addon_name, mock_application): - """Test that, given app, env and addon name + """Test that, given app, env and addon name, add_stack_delete_policy_to_task_role adds a policy to the IAM role in a CloudFormation stack.""" @@ -118,7 +110,9 @@ def test_add_stack_delete_policy_to_task_role(sleep, mock_stack, addon_name, moc ], } - add_stack_delete_policy_to_task_role(cloudformation_client, iam_client, task_name) + cloudformation = CloudFormation(cloudformation_client, iam_client, None) + + cloudformation.add_stack_delete_policy_to_task_role(task_name) stack_resources = boto3.client("cloudformation").list_stack_resources(StackName=stack_name)[ "StackResourceSummaries" @@ -138,17 +132,41 @@ def test_add_stack_delete_policy_to_task_role(sleep, mock_stack, addon_name, moc assert policy_document == mock_policy -def test_wait_for_cloudformation_to_reach_status(): - +@mock_aws +def test_wait_for_cloudformation_with_no_success_raises_exception(): cloudformation_client = Mock() - mock_return = Mock() - mock_waiter = Mock(return_value=mock_return) - cloudformation_client.get_waiter = mock_waiter + waiter_mock = Mock() + cloudformation_client.get_waiter = Mock(return_value=waiter_mock) - wait_for_cloudformation_to_reach_status( - cloudformation_client, "stack_update_complete", "task-stack-name" + waiter_error = WaiterError( + "Waiter StackUpdatecomplete failed", + "Fail!!", + {"Stacks": [{"StackStatus": "ROLLBACK_IN_PROGRESS"}]}, ) - mock_waiter.assert_called() - mock_return.wait.assert_called_with( - StackName="task-stack-name", WaiterConfig={"Delay": 5, "MaxAttempts": 20} + waiter_mock.wait.side_effect = waiter_error + + cloudformation = CloudFormation(cloudformation_client, None, None) + + with pytest.raises( + CloudFormationException, + match="The CloudFormation stack 'stack-name' is not in a good state: ROLLBACK_IN_PROGRESS", + ): + cloudformation.wait_for_cloudformation_to_reach_status( + "stack_update_complete", "stack-name" + ) + + +@mock_aws +def test_wait_for_cloudformation_with_update_complete(): + cloudformation_client = Mock() + waiter_mock = Mock() + cloudformation_client.get_waiter = Mock(return_value=waiter_mock) + waiter_mock.wait.return_value = None + + cloudformation = CloudFormation(cloudformation_client, None, None) + + cloudformation.wait_for_cloudformation_to_reach_status("stack_update_complete", "stack-name") + + waiter_mock.wait.assert_called_with( + StackName="stack-name", WaiterConfig={"Delay": 5, "MaxAttempts": 20} ) diff --git a/tests/platform_helper/providers/test_copilot.py b/tests/platform_helper/providers/test_copilot.py index 671b45f14..dd55eb3c9 100644 --- a/tests/platform_helper/providers/test_copilot.py +++ b/tests/platform_helper/providers/test_copilot.py @@ -1,4 +1,3 @@ -import json from unittest.mock import Mock from unittest.mock import patch @@ -7,53 +6,29 @@ from botocore.exceptions import ClientError from moto import mock_aws -from dbt_platform_helper.exceptions import AddonNotFoundError -from dbt_platform_helper.exceptions import AddonTypeMissingFromConfigError -from dbt_platform_helper.exceptions import InvalidAddonTypeError -from dbt_platform_helper.exceptions import ParameterNotFoundError +from dbt_platform_helper.exceptions import SecretNotFoundError from dbt_platform_helper.providers.copilot import CreateTaskTimeoutError from dbt_platform_helper.providers.copilot import connect_to_addon_client_task from dbt_platform_helper.providers.copilot import create_addon_client_task from dbt_platform_helper.providers.copilot import create_postgres_admin_task -from dbt_platform_helper.providers.secrets import SecretNotFoundError -from dbt_platform_helper.providers.secrets import ( - _normalise_secret_name as normalise_secret_name, -) -from dbt_platform_helper.providers.secrets import get_addon_type -from dbt_platform_helper.providers.secrets import get_parameter_name from tests.platform_helper.conftest import NoSuchEntityException -from tests.platform_helper.conftest import add_addon_config_parameter from tests.platform_helper.conftest import expected_connection_secret_name -from tests.platform_helper.conftest import mock_parameter_name from tests.platform_helper.conftest import mock_task_name env = "development" -@pytest.mark.parametrize( - "test_string", - [ - ("app-rds-postgres", "APP_RDS_POSTGRES"), - ("APP-POSTGRES", "APP_POSTGRES"), - ("APP-OpenSearch", "APP_OPENSEARCH"), - ], -) -def test_normalise_secret_name(test_string): - """Test that given an addon name, normalise_secret_name produces the - expected result.""" - - assert normalise_secret_name(test_string[0]) == test_string[1] - - @mock_aws @patch( # Nested function within provider function - "dbt_platform_helper.providers.copilot.get_postgres_connection_data_updated_with_master_secret", + "dbt_platform_helper.providers.secrets.Secrets.get_postgres_connection_data_updated_with_master_secret", return_value="connection string", ) def test_create_postgres_admin_task(mock_update_parameter, mock_application): addon_name = "dummy-postgres" - master_secret_name = f"/copilot/{mock_application.name}/{env}/secrets/{normalise_secret_name(addon_name)}_RDS_MASTER_ARN" + master_secret_name = ( + f"/copilot/{mock_application.name}/{env}/secrets/DUMMY_POSTGRES_RDS_MASTER_ARN" + ) ssm_client = mock_application.environments[env].session.client("ssm") secrets_manager_client = mock_application.environments[env].session.client("secretsmanager") @@ -107,7 +82,10 @@ def test_create_postgres_admin_task(mock_update_parameter, mock_application): ("opensearch", "custom-name-opensearch"), ], ) -@patch("dbt_platform_helper.providers.copilot.get_connection_secret_arn", return_value="test-arn") +@patch( + "dbt_platform_helper.providers.secrets.Secrets.get_connection_secret_arn", + return_value="test-arn", +) def test_create_redis_or_opensearch_addon_client_task( get_connection_secret_arn, access, @@ -141,10 +119,6 @@ def test_create_redis_or_opensearch_addon_client_task( access, ) - secret_name = expected_connection_secret_name(mock_application, addon_type, addon_name, access) - get_connection_secret_arn.assert_called_once_with( - ssm_client, secretsmanager_client, secret_name - ) mock_subprocess.call.assert_called() mock_subprocess.call.assert_called_once_with( f"copilot task run --app test-application --env {env} " @@ -165,7 +139,10 @@ def test_create_redis_or_opensearch_addon_client_task( "write", ], ) -@patch("dbt_platform_helper.providers.copilot.get_connection_secret_arn", return_value="test-arn") +@patch( + "dbt_platform_helper.providers.secrets.Secrets.get_connection_secret_arn", + return_value="test-arn", +) def test_create_postgres_addon_client_task( get_connection_secret_arn, access, @@ -252,7 +229,10 @@ def test_create_postgres_addon_client_task_admin( ) -@patch("dbt_platform_helper.providers.copilot.get_connection_secret_arn", return_value="test-arn") +@patch( + "dbt_platform_helper.providers.secrets.Secrets.get_connection_secret_arn", + return_value="test-arn", +) def test_create_addon_client_task_does_not_add_execution_role_if_role_not_found( get_connection_secret_arn, mock_application, @@ -304,11 +284,9 @@ def test_create_addon_client_task_does_not_add_execution_role_if_role_not_found( ) -@patch("dbt_platform_helper.providers.copilot.get_connection_secret_arn", return_value="test-arn") @patch("click.secho") def test_create_addon_client_task_abort_with_message_on_other_exceptions( mock_secho, - get_connection_secret_arn, mock_application, ): """Test that if an unexpected ClientError is throw when trying to get the @@ -353,7 +331,7 @@ def test_create_addon_client_task_abort_with_message_on_other_exceptions( ) -@patch("dbt_platform_helper.providers.copilot.get_connection_secret_arn") +@patch("dbt_platform_helper.providers.secrets.Secrets.get_connection_secret_arn") def test_create_addon_client_task_when_no_secret_found(get_connection_secret_arn): """Test that, given app, environment and secret name strings, create_addon_client_task raises a NoConnectionSecretError and does not call @@ -367,7 +345,9 @@ def test_create_addon_client_task_when_no_secret_found(get_connection_secret_arn ssm_client = mock_application.environments[env].session.client("ssm") secretsmanager_client = mock_application.environments[env].session.client("secretsmanager") - get_connection_secret_arn.side_effect = SecretNotFoundError + get_connection_secret_arn.side_effect = SecretNotFoundError( + "/copilot/test-application/development/secrets/named-postgres" + ) with pytest.raises(SecretNotFoundError): create_addon_client_task( @@ -386,35 +366,6 @@ def test_create_addon_client_task_when_no_secret_found(get_connection_secret_arn mock_subprocess.call.assert_not_called() -@mock_aws -@pytest.mark.parametrize( - "access", - [ - "read", - "write", - "admin", - ], -) -@pytest.mark.parametrize( - "addon_type, addon_name", - [ - ("postgres", "custom-name-postgres"), - ("postgres", "custom-name-rds-postgres"), - ("redis", "custom-name-redis"), - ("opensearch", "custom-name-opensearch"), - ("s3", "custon-name-s3"), - ], -) -def test_get_parameter_name(access, addon_type, addon_name, mock_application): - """Test that get_parameter_name builds the correct parameter name given the - addon_name, addon_type and permission.""" - - parameter_name = get_parameter_name( - mock_application.name, "development", addon_type, addon_name, access - ) - assert parameter_name == mock_parameter_name(mock_application, addon_type, addon_name, access) - - @pytest.mark.parametrize( "addon_type", ["postgres", "redis", "opensearch"], @@ -453,22 +404,6 @@ def test_connect_to_addon_client_task(addon_type, mock_application): ) -# Todo: Implement a test to cover the desired behaviour -# @patch("dbt_platform_helper.providers.copilot.addon_client_is_running", return_value=True) -# def test_connect_to_addon_client_task_waits_for_command_agent(addon_client_is_running, mock_application): -# task_name = mock_task_name("postgres") # Addon type for this test does not matter -# ecs_client = mock_application.environments[env].session.client("ecs") -# mock_subprocess = Mock() -# # We want this to throw InvalidParameterException the first time, then behave as normal -# -# connect_to_addon_client_task( -# ecs_client, mock_subprocess, mock_application.name, env, "test-arn", task_name -# ) -# -# # Assert "Unable to connect, execute command agent probably isn’t running yet" in output -# # If it doesn't bomb out with CreateTaskTimeoutError all is good - - @pytest.mark.parametrize( "addon_type", ["postgres", "redis", "opensearch"], @@ -485,7 +420,7 @@ def test_connect_to_addon_client_task_with_timeout_reached_throws_exception( task_name = mock_task_name(addon_type) ecs_client = mock_application.environments[env].session.client("ecs") mock_subprocess = Mock() - addon_client_is_running = Mock(return_value=False) + get_ecs_task_arns = Mock(return_value=[]) with pytest.raises(CreateTaskTimeoutError): connect_to_addon_client_task( @@ -495,95 +430,9 @@ def test_connect_to_addon_client_task_with_timeout_reached_throws_exception( env, "test-arn", task_name, - addon_client_is_running_fn=addon_client_is_running, + get_ecs_task_arns_fn=get_ecs_task_arns, ) - addon_client_is_running.assert_called_with(ecs_client, "test-arn", task_name) - assert addon_client_is_running.call_count == 15 + get_ecs_task_arns.assert_called_with(ecs_client, "test-arn", task_name) + assert get_ecs_task_arns.call_count == 15 mock_subprocess.call.assert_not_called() - - -@mock_aws -@pytest.mark.parametrize( - "addon_name, expected_type", - [ - ("custom-name-postgres", "postgres"), - ("custom-name-redis", "redis"), - ("custom-name-opensearch", "opensearch"), - ], -) -def test_get_addon_type(addon_name, expected_type, mock_application): - """Test that get_addon_type returns the expected addon type.""" - - ssm_client = mock_application.environments[env].session.client("ssm") - - add_addon_config_parameter() - addon_type = get_addon_type(ssm_client, mock_application.name, env, addon_name) - - assert addon_type == expected_type - - -@mock_aws -def test_get_addon_type_with_not_found_throws_exception(mock_application): - """Test that get_addon_type raises the expected error when the addon is not - found in the config file.""" - - add_addon_config_parameter({"different-name": {"type": "redis"}}) - ssm_client = mock_application.environments[env].session.client("ssm") - - with pytest.raises(AddonNotFoundError): - get_addon_type(ssm_client, mock_application.name, env, "custom-name-postgres") - - -@mock_aws -def test_get_addon_type_with_parameter_not_found_throws_exception(mock_application): - """Test that get_addon_type raises the expected error when the addon config - parameter is not found.""" - - ssm_client = mock_application.environments[env].session.client("ssm") - - mock_ssm = boto3.client("ssm") - mock_ssm.put_parameter( - Name=f"/copilot/applications/test-application/environments/development/invalid-parameter", - Type="String", - Value=json.dumps({"custom-name-postgres": {"type": "postgres"}}), - ) - - with pytest.raises(ParameterNotFoundError): - get_addon_type(ssm_client, mock_application.name, env, "custom-name-postgres") - - -@mock_aws -def test_get_addon_type_with_invalid_type_throws_exception(mock_application): - """Test that get_addon_type raises the expected error when the config - contains an invalid addon type.""" - - add_addon_config_parameter(param_value={"invalid-extension": {"type": "invalid"}}) - ssm_client = mock_application.environments[env].session.client("ssm") - - with pytest.raises(InvalidAddonTypeError): - get_addon_type(ssm_client, mock_application.name, env, "invalid-extension") - - -@mock_aws -def test_get_addon_type_with_blank_type_throws_exception(mock_application): - """Test that get_addon_type raises the expected error when the config - contains an empty addon type.""" - - add_addon_config_parameter(param_value={"blank-extension": {}}) - ssm_client = mock_application.environments[env].session.client("ssm") - - with pytest.raises(AddonTypeMissingFromConfigError): - get_addon_type(ssm_client, mock_application.name, env, "blank-extension") - - -@mock_aws -def test_get_addon_type_with_unspecified_type_throws_exception(mock_application): - """Test that get_addon_type raises the expected error when the config - contains an empty addon type.""" - - add_addon_config_parameter(param_value={"addon-type-unspecified": {"type": None}}) - ssm_client = mock_application.environments[env].session.client("ssm") - - with pytest.raises(AddonTypeMissingFromConfigError): - get_addon_type(ssm_client, mock_application.name, env, "addon-type-unspecified") diff --git a/tests/platform_helper/providers/test_ecs.py b/tests/platform_helper/providers/test_ecs.py index 2d7c96a55..1bb15b022 100644 --- a/tests/platform_helper/providers/test_ecs.py +++ b/tests/platform_helper/providers/test_ecs.py @@ -6,55 +6,66 @@ from dbt_platform_helper.exceptions import ECSAgentNotRunning from dbt_platform_helper.exceptions import NoClusterError -from dbt_platform_helper.providers.ecs import ecs_exec_is_available -from dbt_platform_helper.providers.ecs import get_cluster_arn -from dbt_platform_helper.providers.ecs import get_ecs_task_arns -from dbt_platform_helper.providers.ecs import get_or_create_task_name +from dbt_platform_helper.providers.ecs import ECS from tests.platform_helper.conftest import mock_parameter_name from tests.platform_helper.conftest import mock_task_name @mock_aws def test_get_cluster_arn(mocked_cluster, mock_application): - assert ( - get_cluster_arn( - mock_application.environments["development"].session.client("ecs"), - mock_application.name, - "development", - ) - == mocked_cluster["cluster"]["clusterArn"] - ) + ecs_client = mock_application.environments["development"].session.client("ecs") + ssm_client = mock_application.environments["development"].session.client("ssm") + application_name = mock_application.name + env = "development" + ecs_manager = ECS(ecs_client, ssm_client, application_name, env) + + cluster_arn = ecs_manager.get_cluster_arn() + + assert cluster_arn == mocked_cluster["cluster"]["clusterArn"] @mock_aws def test_get_cluster_arn_with_no_cluster_raises_error(mock_application): + ecs_client = mock_application.environments["development"].session.client("ecs") + ssm_client = mock_application.environments["development"].session.client("ssm") + application_name = mock_application.name + env = "does-not-exist" + + ecs_manager = ECS(ecs_client, ssm_client, application_name, env) + with pytest.raises(NoClusterError): - get_cluster_arn( - mock_application.environments["development"].session.client("ecs"), - mock_application.name, - "does-not-exist", - ) + ecs_manager.get_cluster_arn() +@mock_aws def test_get_ecs_task_arns_with_running_task( mock_cluster_client_task, mocked_cluster, mock_application ): - addon_type = "redis" mock_cluster_client_task(addon_type) mocked_cluster_arn = mocked_cluster["cluster"]["clusterArn"] ecs_client = mock_application.environments["development"].session.client("ecs") - - assert get_ecs_task_arns(ecs_client, mocked_cluster_arn, mock_task_name(addon_type)) + ecs_manager = ECS( + ecs_client, + mock_application.environments["development"].session.client("ssm"), + mock_application.name, + "development", + ) + assert ecs_manager.get_ecs_task_arns(mocked_cluster_arn, mock_task_name(addon_type)) +@mock_aws def test_get_ecs_task_arns_with_no_running_task(mocked_cluster, mock_application): - addon_type = "opensearch" mocked_cluster_arn = mocked_cluster["cluster"]["clusterArn"] ecs_client = mock_application.environments["development"].session.client("ecs") - - assert len(get_ecs_task_arns(ecs_client, mocked_cluster_arn, mock_task_name(addon_type))) is 0 + ecs_manager = ECS( + ecs_client, + mock_application.environments["development"].session.client("ssm"), + mock_application.name, + "development", + ) + assert len(ecs_manager.get_ecs_task_arns(mocked_cluster_arn, mock_task_name(addon_type))) == 0 @mock_aws @@ -65,8 +76,6 @@ def test_get_ecs_task_arns_does_not_return_arns_from_other_tasks(mock_applicatio ec2 = boto3.resource("ec2") vpc = ec2.create_vpc(CidrBlock="10.0.0.0/16") subnet = ec2.create_subnet(VpcId=vpc.id, CidrBlock="10.0.0.0/18") - - # create unrelated task mocked_task_definition_arn = ecs_client.register_task_definition( family=f"other-task", requiresCompatibilities=["FARGATE"], @@ -91,43 +100,51 @@ def test_get_ecs_task_arns_does_not_return_arns_from_other_tasks(mock_applicatio } }, ) - - assert len(get_ecs_task_arns(ecs_client, cluster_arn, task_name)) is 0 + ecs_manager = ECS( + ecs_client, + mock_application.environments["development"].session.client("ssm"), + mock_application.name, + "development", + ) + assert len(ecs_manager.get_ecs_task_arns(cluster_arn, task_name)) == 0 +@mock_aws def test_ecs_exec_is_available(mock_cluster_client_task, mocked_cluster, mock_application): - - # use mock ecs_client as describe_tasks is overriden mocked_ecs_client = mock_cluster_client_task("postgres") mocked_cluster_arn = mocked_cluster["cluster"]["clusterArn"] - - ecs_exec_is_available( + ecs_manager = ECS( mocked_ecs_client, - mocked_cluster_arn, - ["arn:aws:ecs:eu-west-2:12345678:task/does-not-matter/1234qwer"], + mock_application.environments["development"].session.client("ssm"), + mock_application.name, + "development", + ) + ecs_manager.ecs_exec_is_available( + mocked_cluster_arn, ["arn:aws:ecs:eu-west-2:12345678:task/does-not-matter/1234qwer"] ) @patch("time.sleep", return_value=None) -def test_test_ecs_exec_is_available_with_exec_not_running_raises_exception( +@mock_aws +def test_ecs_exec_is_available_with_exec_not_running_raises_exception( sleep, mock_cluster_client_task, mocked_cluster, mock_application ): - - # use mock ecs_client as describe_tasks is overriden mocked_ecs_client = mock_cluster_client_task("postgres", "PENDING") mocked_cluster_arn = mocked_cluster["cluster"]["clusterArn"] - + ecs_manager = ECS( + mocked_ecs_client, + mock_application.environments["development"].session.client("ssm"), + mock_application.name, + "development", + ) with pytest.raises(ECSAgentNotRunning): - ecs_exec_is_available( - mocked_ecs_client, - mocked_cluster_arn, - ["arn:aws:ecs:eu-west-2:12345678:task/does-not-matter/1234qwer"], + ecs_manager.ecs_exec_is_available( + mocked_cluster_arn, ["arn:aws:ecs:eu-west-2:12345678:task/does-not-matter/1234qwer"] ) @mock_aws def test_get_or_create_task_name(mock_application): - addon_name = "app-postgres" parameter_name = mock_parameter_name(mock_application, "postgres", addon_name) mock_application.environments["development"].session.client("ssm") @@ -137,23 +154,24 @@ def test_get_or_create_task_name(mock_application): Type="String", Value=mock_task_name(addon_name), ) - - task_name = get_or_create_task_name( - mock_ssm, mock_application.name, "development", addon_name, parameter_name + ecs_manager = ECS( + mock_application.environments["development"].session.client("ecs"), + mock_ssm, + mock_application.name, + "development", ) - + task_name = ecs_manager.get_or_create_task_name(addon_name, parameter_name) assert task_name == mock_task_name(addon_name) @mock_aws def test_get_or_create_task_name_appends_random_id(mock_application): - addon_name = "app-postgres" ssm_client = mock_application.environments["development"].session.client("ssm") parameter_name = mock_parameter_name(mock_application, "postgres", addon_name) - task_name = get_or_create_task_name( - ssm_client, mock_application.name, "development", addon_name, parameter_name - ) + ecs_manager = ECS(ssm_client, ssm_client, mock_application.name, "development") + + task_name = ecs_manager.get_or_create_task_name(addon_name, parameter_name) random_id = task_name.rsplit("-", 1)[1] assert task_name.rsplit("-", 1)[0] == mock_task_name("app-postgres").rsplit("-", 1)[0] diff --git a/tests/platform_helper/providers/test_secrets.py b/tests/platform_helper/providers/test_secrets.py index cea50b10b..43f716398 100644 --- a/tests/platform_helper/providers/test_secrets.py +++ b/tests/platform_helper/providers/test_secrets.py @@ -1,16 +1,40 @@ +import json + import boto3 import pytest from moto import mock_aws -from dbt_platform_helper.providers.copilot import ( - get_postgres_connection_data_updated_with_master_secret, -) -from dbt_platform_helper.providers.secrets import SecretNotFoundError -from dbt_platform_helper.providers.secrets import get_connection_secret_arn +from dbt_platform_helper.exceptions import AddonNotFoundError +from dbt_platform_helper.exceptions import AddonTypeMissingFromConfigError +from dbt_platform_helper.exceptions import InvalidAddonTypeError +from dbt_platform_helper.exceptions import ParameterNotFoundError +from dbt_platform_helper.exceptions import SecretNotFoundError +from dbt_platform_helper.providers.secrets import Secrets +from tests.platform_helper.conftest import add_addon_config_parameter +from tests.platform_helper.conftest import mock_parameter_name env = "development" +@pytest.mark.parametrize( + "test_string", + [ + ("app-rds-postgres", "APP_RDS_POSTGRES"), + ("APP-POSTGRES", "APP_POSTGRES"), + ("APP-OpenSearch", "APP_OPENSEARCH"), + ], +) +def test_normalise_secret_name(test_string, mock_application): + """Test that given an addon name, normalise_secret_name produces the + expected result.""" + + ssm_client = mock_application.environments[env].session.client("ssm") + secrets_client = mock_application.environments[env].session.client("secretsmanager") + secrets_manager = Secrets(ssm_client, secrets_client, mock_application.name, env) + + assert secrets_manager._normalise_secret_name(test_string[0]) == test_string[1] + + @mock_aws def test_get_connection_secret_arn_from_secrets_manager(mock_application): """Test that, given app, environment and secret name strings, @@ -26,7 +50,9 @@ def test_get_connection_secret_arn_from_secrets_manager(mock_application): ssm_client = mock_application.environments[env].session.client("ssm") secrets_client = mock_application.environments[env].session.client("secretsmanager") - arn = get_connection_secret_arn(ssm_client, secrets_client, secret_name) + secrets_manager = Secrets(ssm_client, secrets_client, mock_application.name, env) + + arn = secrets_manager.get_connection_secret_arn(secret_name) assert arn.startswith( "arn:aws:secretsmanager:eu-west-2:123456789012:secret:" @@ -49,7 +75,9 @@ def test_get_connection_secret_arn_from_parameter_store(mock_application): Type="SecureString", ) - arn = get_connection_secret_arn(ssm_client, secrets_client, secret_name) + secrets_manager = Secrets(ssm_client, secrets_client, mock_application.name, env) + + arn = secrets_manager.get_connection_secret_arn(secret_name) assert ( arn @@ -65,18 +93,19 @@ def test_get_connection_secret_arn_when_secret_does_not_exist(mock_application): ssm_client = mock_application.environments[env].session.client("ssm") secrets_client = mock_application.environments[env].session.client("secretsmanager") + secrets_manager = Secrets(ssm_client, secrets_client, mock_application.name, env) - with pytest.raises(SecretNotFoundError): - get_connection_secret_arn(ssm_client, secrets_client, "POSTGRES") + with pytest.raises(SecretNotFoundError) as ex: + secrets_manager.get_connection_secret_arn("POSTGRES") @mock_aws -def test_update_postgres_parameter_with_master_secret(): +def test_update_postgres_parameter_with_master_secret(mock_application): session = boto3.session.Session() parameter_name = "test-parameter" ssm_client = session.client("ssm") - secretsmanager_client = session.client("secretsmanager") + session.client("secretsmanager") ssm_client.put_parameter( Name=parameter_name, Value='{"username": "read-only-user", "password": ">G12345", "host": "test.com", "port": 5432}', @@ -86,8 +115,14 @@ def test_update_postgres_parameter_with_master_secret(): Name="master-secret", SecretString='{"username": "postgres", "password": ">G6789"}' )["ARN"] - updated_parameter_value = get_postgres_connection_data_updated_with_master_secret( - ssm_client, secretsmanager_client, parameter_name, secret_arn + ssm_client = mock_application.environments[env].session.client("ssm") + secrets_client = mock_application.environments[env].session.client("secretsmanager") + secrets_manager = Secrets(ssm_client, secrets_client, mock_application.name, env) + + updated_parameter_value = ( + secrets_manager.get_postgres_connection_data_updated_with_master_secret( + parameter_name, secret_arn + ) ) assert updated_parameter_value == { @@ -96,3 +131,131 @@ def test_update_postgres_parameter_with_master_secret(): "host": "test.com", "port": 5432, } + + +@mock_aws +@pytest.mark.parametrize( + "addon_name, expected_type", + [ + ("custom-name-postgres", "postgres"), + ("custom-name-redis", "redis"), + ("custom-name-opensearch", "opensearch"), + ], +) +def test_get_addon_type(addon_name, expected_type, mock_application): + """Test that get_addon_type returns the expected addon type.""" + + ssm_client = mock_application.environments[env].session.client("ssm") + secrets_client = mock_application.environments[env].session.client("secretsmanager") + secrets_manager = Secrets(ssm_client, secrets_client, mock_application.name, env) + + add_addon_config_parameter() + addon_type = secrets_manager.get_addon_type(addon_name) + + assert addon_type == expected_type + + +@mock_aws +def test_get_addon_type_with_not_found_throws_exception(mock_application): + """Test that get_addon_type raises the expected error when the addon is not + found in the config file.""" + + add_addon_config_parameter({"different-name": {"type": "redis"}}) + ssm_client = mock_application.environments[env].session.client("ssm") + secrets_client = mock_application.environments[env].session.client("secretsmanager") + secrets_manager = Secrets(ssm_client, secrets_client, mock_application.name, env) + + with pytest.raises(AddonNotFoundError): + secrets_manager.get_addon_type("custom-name-postgres") + + +@mock_aws +def test_get_addon_type_with_parameter_not_found_throws_exception(mock_application): + """Test that get_addon_type raises the expected error when the addon config + parameter is not found.""" + + ssm_client = mock_application.environments[env].session.client("ssm") + secrets_client = mock_application.environments[env].session.client("secretsmanager") + secrets_manager = Secrets(ssm_client, secrets_client, mock_application.name, env) + + mock_ssm = boto3.client("ssm") + mock_ssm.put_parameter( + Name=f"/copilot/applications/test-application/environments/development/invalid-parameter", + Type="String", + Value=json.dumps({"custom-name-postgres": {"type": "postgres"}}), + ) + + with pytest.raises(ParameterNotFoundError): + secrets_manager.get_addon_type("custom-name-postgres") + + +@mock_aws +def test_get_addon_type_with_invalid_type_throws_exception(mock_application): + """Test that get_addon_type raises the expected error when the config + contains an invalid addon type.""" + + add_addon_config_parameter(param_value={"invalid-extension": {"type": "invalid"}}) + ssm_client = mock_application.environments[env].session.client("ssm") + secrets_client = mock_application.environments[env].session.client("secretsmanager") + secrets_manager = Secrets(ssm_client, secrets_client, mock_application.name, env) + + with pytest.raises(InvalidAddonTypeError): + secrets_manager.get_addon_type("invalid-extension") + + +@mock_aws +def test_get_addon_type_with_blank_type_throws_exception(mock_application): + """Test that get_addon_type raises the expected error when the config + contains an empty addon type.""" + + add_addon_config_parameter(param_value={"blank-extension": {}}) + ssm_client = mock_application.environments[env].session.client("ssm") + secrets_client = mock_application.environments[env].session.client("secretsmanager") + secrets_manager = Secrets(ssm_client, secrets_client, mock_application.name, env) + + with pytest.raises(AddonTypeMissingFromConfigError): + secrets_manager.get_addon_type("blank-extension") + + +@mock_aws +def test_get_addon_type_with_unspecified_type_throws_exception(mock_application): + """Test that get_addon_type raises the expected error when the config + contains an empty addon type.""" + + add_addon_config_parameter(param_value={"addon-type-unspecified": {"type": None}}) + ssm_client = mock_application.environments[env].session.client("ssm") + secrets_client = mock_application.environments[env].session.client("secretsmanager") + secrets_manager = Secrets(ssm_client, secrets_client, mock_application.name, env) + + with pytest.raises(AddonTypeMissingFromConfigError): + secrets_manager.get_addon_type("addon-type-unspecified") + + +@mock_aws +@pytest.mark.parametrize( + "access", + [ + "read", + "write", + "admin", + ], +) +@pytest.mark.parametrize( + "addon_type, addon_name", + [ + ("postgres", "custom-name-postgres"), + ("postgres", "custom-name-rds-postgres"), + ("redis", "custom-name-redis"), + ("opensearch", "custom-name-opensearch"), + ("s3", "custon-name-s3"), + ], +) +def test_get_parameter_name(access, addon_type, addon_name, mock_application): + """Test that get_parameter_name builds the correct parameter name given the + addon_name, addon_type and permission.""" + + ssm_client = mock_application.environments[env].session.client("ssm") + secrets_client = mock_application.environments[env].session.client("secretsmanager") + secrets_manager = Secrets(ssm_client, secrets_client, mock_application.name, env) + parameter_name = secrets_manager.get_parameter_name(addon_type, addon_name, access) + assert parameter_name == mock_parameter_name(mock_application, addon_type, addon_name, access) diff --git a/tests/platform_helper/test_command_codebase.py b/tests/platform_helper/test_command_codebase.py index 137b4c038..894b746f7 100644 --- a/tests/platform_helper/test_command_codebase.py +++ b/tests/platform_helper/test_command_codebase.py @@ -45,8 +45,6 @@ def test_aborts_when_not_in_a_codebase_repository(self, mock_click, mock_codebas result = CliRunner().invoke(prepare_command) - expected_message = "You are in the deploy repository; make sure you are in the application codebase repository." - mock_click.assert_called_with(expected_message, fg="red") assert result.exit_code == 1 @@ -72,8 +70,6 @@ def test_codebase_build_does_not_trigger_build_without_an_application( "ab1c23d", ], ) - expected_message = f"""The account "foo" does not contain the application "not-an-application"; ensure you have set the environment variable "AWS_PROFILE" correctly.""" - mock_click.assert_called_with(expected_message, fg="red") assert result.exit_code == 1 @@ -101,8 +97,7 @@ def test_codebase_build_aborts_with_a_nonexistent_commit_hash( mock_codebase_object_instance.build.assert_called_once_with( "test-application", "application", "nonexistent-commit-hash" ) - expected_message = f"""The commit hash "nonexistent-commit-hash" either does not exist or you need to run `git fetch`.""" - mock_click.assert_called_with(expected_message, fg="red") + assert result.exit_code == 1 @@ -156,8 +151,6 @@ def test_codebase_deploy_aborts_with_a_nonexistent_image_repository_or_image_tag mock_codebase_object_instance.deploy.assert_called_once_with( "test-application", "development", "application", "nonexistent-commit-hash" ) - expected_message = f"""The commit hash "nonexistent-commit-hash" has not been built into an image, try the `platform-helper codebase build` command first.""" - mock_click.assert_called_with(expected_message, fg="red") assert result.exit_code == 1 @patch("dbt_platform_helper.commands.codebase.Codebase") @@ -186,8 +179,6 @@ def test_codebase_deploy_does_not_trigger_build_without_an_application( mock_codebase_object_instance.deploy.assert_called_once_with( "not-an-application", "dev", "application", "ab1c23d" ) - expected_message = f"""The account "foo" does not contain the application "not-an-application"; ensure you have set the environment variable "AWS_PROFILE" correctly.""" - mock_click.assert_called_with(expected_message, fg="red") assert result.exit_code == 1 @patch("dbt_platform_helper.commands.codebase.Codebase") @@ -216,8 +207,6 @@ def test_codebase_deploy_does_not_trigger_build_with_missing_environment( mock_codebase_object_instance.deploy.assert_called_once_with( "test-application", "not-an-environment", "application", "ab1c23d" ) - expected_message = f"""The environment "not-an-environment" either does not exist or has not been deployed.""" - mock_click.assert_called_with(expected_message, fg="red") assert result.exit_code == 1 @patch("dbt_platform_helper.commands.codebase.Codebase") @@ -246,10 +235,6 @@ def test_codebase_deploy_does_not_trigger_build_with_missing_codebase( mock_codebase_object_instance.deploy.assert_called_once_with( "test-application", "test-environment", "not-a-codebase", "ab1c23d" ) - expected_message = ( - f"""The codebase "not-a-codebase" either does not exist or has not been deployed.""" - ) - mock_click.assert_called_with(expected_message, fg="red") assert result.exit_code == 1 @@ -273,8 +258,6 @@ def test_list_aborts_when_application_has_no_codebases(self, mock_click, mock_co result = CliRunner().invoke(list, ["--app", "test-application", "--with-images"]) - expected_message = f"""No codebases found for application "test-application""" - mock_click.assert_called_with(expected_message, fg="red") assert result.exit_code == 1 @patch("dbt_platform_helper.commands.codebase.Codebase") @@ -286,9 +269,6 @@ def test_aborts_when_application_does_not_exist(self, mock_click, mock_codebase_ result = CliRunner().invoke(list, ["--app", "test-application", "--with-images"]) - app = "test-application" - expected_message = f"""The account "{os.environ.get("AWS_PROFILE")}" does not contain the application "{app}"; ensure you have set the environment variable "AWS_PROFILE" correctly.""" - mock_click.assert_called_with(expected_message, fg="red") assert result.exit_code == 1 diff --git a/tests/platform_helper/test_command_conduit.py b/tests/platform_helper/test_command_conduit.py index c5c9990b5..880486335 100644 --- a/tests/platform_helper/test_command_conduit.py +++ b/tests/platform_helper/test_command_conduit.py @@ -5,13 +5,7 @@ from click.testing import CliRunner from dbt_platform_helper.commands.conduit import conduit -from dbt_platform_helper.exceptions import AddonNotFoundError -from dbt_platform_helper.exceptions import AddonTypeMissingFromConfigError -from dbt_platform_helper.exceptions import InvalidAddonTypeError -from dbt_platform_helper.exceptions import NoClusterError -from dbt_platform_helper.exceptions import ParameterNotFoundError -from dbt_platform_helper.providers.copilot import CreateTaskTimeoutError -from dbt_platform_helper.providers.secrets import SecretNotFoundError +from dbt_platform_helper.exceptions import SecretNotFoundError @pytest.mark.parametrize( @@ -52,42 +46,6 @@ def test_start_conduit(mock_application, mock_conduit_object, addon_name, valida mock_conduit_instance.start.assert_called_with("development", addon_name, "read") -@pytest.mark.parametrize( - "exception_type,exception_input_params,expected_message", - [ - ( - SecretNotFoundError, - {}, - """No secret called "" for "test-application" in "development" environment.""", - ), - (AddonNotFoundError, {}, """Addon "important-db" does not exist."""), - ( - CreateTaskTimeoutError, - {}, - """Client (important-db) ECS task has failed to start for "test-application" in "development" environment.""", - ), - ( - NoClusterError, - {}, - """No ECS cluster found for "test-application" in "development" environment.""", - ), - ( - ParameterNotFoundError, - {}, - """No parameter called "/copilot/applications/test-application/environments/development/addons". Try deploying the "test-application" "development" environment.""", - ), - ( - InvalidAddonTypeError, - {"addon_type": "fake-postgres"}, - """Addon type "fake-postgres" is not supported, we support: opensearch, postgres, redis.""", - ), - ( - AddonTypeMissingFromConfigError, - {}, - """The configuration for the addon important-db, is missconfigured and missing the addon type.""", - ), - ], -) @patch("dbt_platform_helper.commands.conduit.Conduit") @patch( "dbt_platform_helper.utils.versioning.running_as_installed_package", @@ -95,20 +53,15 @@ def test_start_conduit(mock_application, mock_conduit_object, addon_name, valida ) @patch("dbt_platform_helper.commands.conduit.load_application") @patch("click.secho") -def test_start_conduit_exception_is_raised( +def test_start_conduit_with_exception_raised_exit_1( mock_click, mock_application, mock_conduit_object, validate_version, - exception_type, - exception_input_params, - expected_message, ): - """Test that given an app, env and addon name strings, the conduit command - calls start_conduit with app, env, addon type and addon name.""" mock_conduit_instance = mock_conduit_object.return_value - mock_conduit_instance.start.side_effect = exception_type(**exception_input_params) + mock_conduit_instance.start.side_effect = SecretNotFoundError(secret_name="test-secret") addon_name = "important-db" result = CliRunner().invoke( conduit, @@ -121,7 +74,7 @@ def test_start_conduit_exception_is_raised( ], ) - mock_click.assert_called_with(expected_message, fg="red") + mock_click.assert_called_with("""No secret called "test-secret".""", fg="red") assert result.exit_code == 1 diff --git a/tests/platform_helper/test_exceptions.py b/tests/platform_helper/test_exceptions.py new file mode 100644 index 000000000..7c7d7a8d3 --- /dev/null +++ b/tests/platform_helper/test_exceptions.py @@ -0,0 +1,110 @@ +import os + +import pytest + +from dbt_platform_helper.exceptions import AddonNotFoundError +from dbt_platform_helper.exceptions import AddonTypeMissingFromConfigError +from dbt_platform_helper.exceptions import ApplicationDeploymentNotTriggered +from dbt_platform_helper.exceptions import ApplicationEnvironmentNotFoundError +from dbt_platform_helper.exceptions import ApplicationNotFoundError +from dbt_platform_helper.exceptions import CopilotCodebaseNotFoundError +from dbt_platform_helper.exceptions import CreateTaskTimeoutError +from dbt_platform_helper.exceptions import ECSAgentNotRunning +from dbt_platform_helper.exceptions import ImageNotFoundError +from dbt_platform_helper.exceptions import InvalidAddonTypeError +from dbt_platform_helper.exceptions import NoClusterError +from dbt_platform_helper.exceptions import NoCopilotCodebasesFoundError +from dbt_platform_helper.exceptions import NotInCodeBaseRepositoryError +from dbt_platform_helper.exceptions import ParameterNotFoundError +from dbt_platform_helper.exceptions import SecretNotFoundError + + +@pytest.mark.parametrize( + "exception, exception_params, expected_message", + [ + ( + AddonNotFoundError, + {"addon_name": "test-addon"}, + """Addon "test-addon" does not exist.""", + ), + ( + AddonTypeMissingFromConfigError, + {"addon_name": "test-addon"}, + """The configuration for the addon test-addon, is misconfigured and missing the addon type.""", + ), + ( + ApplicationDeploymentNotTriggered, + {"codebase": "test-codebase"}, + """Your deployment for test-codebase was not triggered.""", + ), + ( + ApplicationEnvironmentNotFoundError, + {"environment": "development"}, + """The environment "development" either does not exist or has not been deployed.""", + ), + ( + ApplicationNotFoundError, + {"application_name": "test-application"}, + """The account "foo" does not contain the application "test-application"; ensure you have set the environment variable "AWS_PROFILE" correctly.""", + ), + ( + CopilotCodebaseNotFoundError, + {"codebase": "test-codebase-exists"}, + """The codebase "test-codebase-exists" either does not exist or has not been deployed.""", + ), + ( + CreateTaskTimeoutError, + { + "addon_name": "test-addon", + "application_name": "test-application", + "environment": "environment", + }, + """Client (test-addon) ECS task has failed to start for "test-application" in "environment" environment.""", + ), + ( + InvalidAddonTypeError, + {"addon_type": "test-addon-type"}, + """Addon type "test-addon-type" is not supported, we support: opensearch, postgres, redis.""", + ), + ( + ImageNotFoundError, + {"commit": "test-commit-hash"}, + """The commit hash "test-commit-hash" has not been built into an image, try the `platform-helper codebase build` command first.""", + ), + ( + NoCopilotCodebasesFoundError, + {"application_name": "test-application"}, + """No codebases found for application "test-application".""", + ), + ( + NoClusterError, + {"application_name": "test-application", "environment": "environment"}, + """No ECS cluster found for "test-application" in "environment" environment.""", + ), + ( + NotInCodeBaseRepositoryError, + {}, + """You are in the deploy repository; make sure you are in the application codebase repository.""", + ), + ( + ParameterNotFoundError, + {"application_name": "test-application", "environment": "environment"}, + """No parameter called "/copilot/applications/test-application/environments/environment/addons". Try deploying the "test-application" "environment" environment.""", + ), + ( + SecretNotFoundError, + {"secret_name": "test-secret"}, + """No secret called "test-secret".""", + ), + ( + ECSAgentNotRunning, + {}, + """ECS exec agent never reached "RUNNING" status""", + ), + ], +) +def test_exception_message(exception, exception_params, expected_message): + os.environ["AWS_PROFILE"] = "foo" + + exception = exception(**exception_params) + assert str(exception) == expected_message From 331e8b89d60fec4e29a9ea4473ffa44cba8e92c7 Mon Sep 17 00:00:00 2001 From: Connor Hindle <69192234+DeveloperConnor@users.noreply.github.com> Date: Mon, 2 Dec 2024 16:52:04 +0000 Subject: [PATCH 21/38] revert: Improving provider structure and exception handling" (#670) --- dbt_platform_helper/commands/codebase.py | 89 ++++++- dbt_platform_helper/commands/conduit.py | 52 +++- dbt_platform_helper/domain/codebase.py | 12 +- dbt_platform_helper/domain/conduit.py | 43 +++- dbt_platform_helper/exceptions.py | 107 ++------- .../providers/cloudformation.py | 222 ++++++++---------- dbt_platform_helper/providers/copilot.py | 17 +- dbt_platform_helper/providers/ecs.py | 148 ++++++------ dbt_platform_helper/providers/secrets.py | 148 ++++++------ dbt_platform_helper/utils/application.py | 2 +- dbt_platform_helper/utils/aws.py | 4 +- tests/platform_helper/domain/test_codebase.py | 58 +++-- tests/platform_helper/domain/test_conduit.py | 52 +--- .../domain/test_database_copy.py | 2 +- .../providers/test_cloudformation.py | 78 +++--- .../platform_helper/providers/test_copilot.py | 201 ++++++++++++++-- tests/platform_helper/providers/test_ecs.py | 118 ++++------ .../platform_helper/providers/test_secrets.py | 189 +-------------- .../platform_helper/test_command_codebase.py | 22 +- tests/platform_helper/test_command_conduit.py | 55 ++++- tests/platform_helper/test_exceptions.py | 110 --------- 21 files changed, 824 insertions(+), 905 deletions(-) delete mode 100644 tests/platform_helper/test_exceptions.py diff --git a/dbt_platform_helper/commands/codebase.py b/dbt_platform_helper/commands/codebase.py index e082f5b1c..cdf4ee23c 100644 --- a/dbt_platform_helper/commands/codebase.py +++ b/dbt_platform_helper/commands/codebase.py @@ -1,8 +1,18 @@ +import json +import os + import click from dbt_platform_helper.domain.codebase import Codebase -from dbt_platform_helper.exceptions import PlatformException +from dbt_platform_helper.exceptions import ApplicationDeploymentNotTriggered +from dbt_platform_helper.exceptions import ApplicationEnvironmentNotFoundError +from dbt_platform_helper.exceptions import ApplicationNotFoundError +from dbt_platform_helper.exceptions import CopilotCodebaseNotFoundError +from dbt_platform_helper.exceptions import ImageNotFoundError +from dbt_platform_helper.exceptions import NoCopilotCodebasesFoundError +from dbt_platform_helper.exceptions import NotInCodeBaseRepositoryError from dbt_platform_helper.utils.click import ClickDocOptGroup +from dbt_platform_helper.utils.git import CommitNotFoundError from dbt_platform_helper.utils.versioning import ( check_platform_helper_version_needs_update, ) @@ -19,8 +29,12 @@ def prepare(): """Sets up an application codebase for use within a DBT platform project.""" try: Codebase().prepare() - except PlatformException as err: - click.secho(str(err), fg="red") + except NotInCodeBaseRepositoryError: + # TODO: Set exception message in the exceptions and just output the message in the command code + click.secho( + "You are in the deploy repository; make sure you are in the application codebase repository.", + fg="red", + ) raise click.Abort @@ -36,8 +50,17 @@ def list(app, with_images): """List available codebases for the application.""" try: Codebase().list(app, with_images) - except PlatformException as err: - click.secho(str(err), fg="red") + except NoCopilotCodebasesFoundError: + click.secho( + f"""No codebases found for application "{app}""", + fg="red", + ) + raise click.Abort + except ApplicationNotFoundError: + click.secho( + f"""The account "{os.environ.get("AWS_PROFILE")}" does not contain the application "{app}"; ensure you have set the environment variable "AWS_PROFILE" correctly.""", + fg="red", + ) raise click.Abort @@ -53,8 +76,23 @@ def build(app, codebase, commit): """Trigger a CodePipeline pipeline based build.""" try: Codebase().build(app, codebase, commit) - except PlatformException as err: - click.secho(str(err), fg="red") + except ApplicationNotFoundError: + click.secho( + f"""The account "{os.environ.get("AWS_PROFILE")}" does not contain the application "{app}"; ensure you have set the environment variable "AWS_PROFILE" correctly.""", + fg="red", + ) + raise click.Abort + except CommitNotFoundError: + click.secho( + f'The commit hash "{commit}" either does not exist or you need to run `git fetch`.', + fg="red", + ) + raise click.Abort + except ApplicationDeploymentNotTriggered: + click.secho( + f"Your build for {codebase} was not triggered.", + fg="red", + ) raise click.Abort @@ -70,6 +108,39 @@ def build(app, codebase, commit): def deploy(app, env, codebase, commit): try: Codebase().deploy(app, env, codebase, commit) - except PlatformException as err: - click.secho(str(err), fg="red") + except ApplicationNotFoundError: + # TODO: Set exception message in the exceptions and just output the message in the command code + click.secho( + f"""The account "{os.environ.get("AWS_PROFILE")}" does not contain the application "{app}"; ensure you have set the environment variable "AWS_PROFILE" correctly.""", + fg="red", + ) + raise click.Abort + except ApplicationEnvironmentNotFoundError: + click.secho( + f"""The environment "{env}" either does not exist or has not been deployed.""", + fg="red", + ) + raise click.Abort + except ( + CopilotCodebaseNotFoundError, + # TODO: Catch this error earlier and throw a more meaningful error, maybe it's CopilotCodebaseNotFoundError? + json.JSONDecodeError, + ): + click.secho( + f"""The codebase "{codebase}" either does not exist or has not been deployed.""", + fg="red", + ) + raise click.Abort + except ImageNotFoundError: + click.secho( + f'The commit hash "{commit}" has not been built into an image, try the ' + "`platform-helper codebase build` command first.", + fg="red", + ) + raise click.Abort + except ApplicationDeploymentNotTriggered: + click.secho( + f"Your deployment for {codebase} was not triggered.", + fg="red", + ) raise click.Abort diff --git a/dbt_platform_helper/commands/conduit.py b/dbt_platform_helper/commands/conduit.py index 6f0707090..a2828841c 100644 --- a/dbt_platform_helper/commands/conduit.py +++ b/dbt_platform_helper/commands/conduit.py @@ -1,7 +1,14 @@ import click +from dbt_platform_helper.constants import CONDUIT_ADDON_TYPES from dbt_platform_helper.domain.conduit import Conduit -from dbt_platform_helper.exceptions import AWSException +from dbt_platform_helper.exceptions import AddonNotFoundError +from dbt_platform_helper.exceptions import AddonTypeMissingFromConfigError +from dbt_platform_helper.exceptions import CreateTaskTimeoutError +from dbt_platform_helper.exceptions import InvalidAddonTypeError +from dbt_platform_helper.exceptions import NoClusterError +from dbt_platform_helper.exceptions import ParameterNotFoundError +from dbt_platform_helper.providers.secrets import SecretNotFoundError from dbt_platform_helper.utils.application import load_application from dbt_platform_helper.utils.click import ClickDocOptCommand from dbt_platform_helper.utils.versioning import ( @@ -29,6 +36,43 @@ def conduit(addon_name: str, app: str, env: str, access: str): try: Conduit(application).start(env, addon_name, access) - except AWSException as err: - click.secho(str(err), fg="red") - raise click.Abort + except NoClusterError: + # TODO: Set exception message in the exceptions and just output the message in the command code, should be able to catch all errors in one block + click.secho(f"""No ECS cluster found for "{app}" in "{env}" environment.""", fg="red") + exit(1) + except SecretNotFoundError as err: + click.secho( + f"""No secret called "{err}" for "{app}" in "{env}" environment.""", + fg="red", + ) + exit(1) + except CreateTaskTimeoutError: + click.secho( + f"""Client ({addon_name}) ECS task has failed to start for "{app}" in "{env}" environment.""", + fg="red", + ) + exit(1) + except ParameterNotFoundError: + click.secho( + f"""No parameter called "/copilot/applications/{app}/environments/{env}/addons". Try deploying the "{app}" "{env}" environment.""", + fg="red", + ) + exit(1) + except AddonNotFoundError: + click.secho( + f"""Addon "{addon_name}" does not exist.""", + fg="red", + ) + exit(1) + except InvalidAddonTypeError as err: + click.secho( + f"""Addon type "{err.addon_type}" is not supported, we support: {", ".join(CONDUIT_ADDON_TYPES)}.""", + fg="red", + ) + exit(1) + except AddonTypeMissingFromConfigError: + click.secho( + f"""The configuration for the addon {addon_name}, is missconfigured and missing the addon type.""", + fg="red", + ) + exit(1) diff --git a/dbt_platform_helper/domain/codebase.py b/dbt_platform_helper/domain/codebase.py index 053a25a60..eb1b807e2 100644 --- a/dbt_platform_helper/domain/codebase.py +++ b/dbt_platform_helper/domain/codebase.py @@ -11,6 +11,7 @@ from dbt_platform_helper.exceptions import ApplicationDeploymentNotTriggered from dbt_platform_helper.exceptions import ApplicationEnvironmentNotFoundError +from dbt_platform_helper.exceptions import NoCopilotCodebasesFoundError from dbt_platform_helper.exceptions import NotInCodeBaseRepositoryError from dbt_platform_helper.utils.application import Application from dbt_platform_helper.utils.application import load_application @@ -66,7 +67,7 @@ def prepare(self): .removesuffix(".git") ) if repository.endswith("-deploy") or Path("./copilot").exists(): - raise NotInCodeBaseRepositoryError() + raise NotInCodeBaseRepositoryError builder_configuration_url = "https://raw.githubusercontent.com/uktrade/ci-image-builder/main/image_builder/configuration/builder_configuration.yml" builder_configuration_response = requests.get(builder_configuration_url) @@ -133,7 +134,7 @@ def build(self, app: str, codebase: str, commit: str): f"Your build has been triggered. Check your build progress in the AWS Console: {build_url}" ) - raise ApplicationDeploymentNotTriggered(codebase) + raise ApplicationDeploymentNotTriggered() def deploy(self, app, env, codebase, commit): """Trigger a CodePipeline pipeline based deployment.""" @@ -141,7 +142,7 @@ def deploy(self, app, env, codebase, commit): application = self.load_application_fn(app, default_session=session) if not application.environments.get(env): - raise ApplicationEnvironmentNotFoundError(env) + raise ApplicationEnvironmentNotFoundError() self.check_codebase_exists_fn(session, application, codebase) @@ -170,7 +171,7 @@ def deploy(self, app, env, codebase, commit): f"{build_url}", ) - raise ApplicationDeploymentNotTriggered(codebase) + raise ApplicationDeploymentNotTriggered() def list(self, app: str, with_images: bool): """List available codebases for the application.""" @@ -203,7 +204,8 @@ def __get_codebases(self, application, ssm_client): codebases = [json.loads(p["Value"]) for p in parameters] if not codebases: - return [] + # TODO Is this really an error? Or just no codebases so we could return an empty list? + raise NoCopilotCodebasesFoundError return codebases def __start_build_with_confirmation( diff --git a/dbt_platform_helper/domain/conduit.py b/dbt_platform_helper/domain/conduit.py index 51a138f6b..349df2617 100644 --- a/dbt_platform_helper/domain/conduit.py +++ b/dbt_platform_helper/domain/conduit.py @@ -3,13 +3,25 @@ import click -from dbt_platform_helper.providers.cloudformation import CloudFormation +from dbt_platform_helper.exceptions import ECSAgentNotRunning +from dbt_platform_helper.providers.cloudformation import ( + add_stack_delete_policy_to_task_role, +) +from dbt_platform_helper.providers.cloudformation import update_conduit_stack_resources +from dbt_platform_helper.providers.cloudformation import ( + wait_for_cloudformation_to_reach_status, +) from dbt_platform_helper.providers.copilot import connect_to_addon_client_task from dbt_platform_helper.providers.copilot import create_addon_client_task from dbt_platform_helper.providers.copilot import create_postgres_admin_task -from dbt_platform_helper.providers.ecs import ECS -from dbt_platform_helper.providers.secrets import Secrets +from dbt_platform_helper.providers.ecs import ecs_exec_is_available +from dbt_platform_helper.providers.ecs import get_cluster_arn +from dbt_platform_helper.providers.ecs import get_ecs_task_arns +from dbt_platform_helper.providers.ecs import get_or_create_task_name +from dbt_platform_helper.providers.secrets import get_addon_type +from dbt_platform_helper.providers.secrets import get_parameter_name from dbt_platform_helper.utils.application import Application +from dbt_platform_helper.utils.messages import abort_with_error class Conduit: @@ -18,18 +30,19 @@ def __init__( application: Application, echo_fn: Callable[[str], str] = click.secho, subprocess_fn: subprocess = subprocess, - get_ecs_task_arns_fn=ECS.get_ecs_task_arns, + get_ecs_task_arns_fn=get_ecs_task_arns, connect_to_addon_client_task_fn=connect_to_addon_client_task, create_addon_client_task_fn=create_addon_client_task, create_postgres_admin_task_fn=create_postgres_admin_task, - get_addon_type_fn=Secrets.get_addon_type, - ecs_exec_is_available_fn=ECS.ecs_exec_is_available, - get_cluster_arn_fn=ECS.get_cluster_arn, - get_parameter_name_fn=Secrets.get_parameter_name, - get_or_create_task_name_fn=ECS.get_or_create_task_name, - add_stack_delete_policy_to_task_role_fn=CloudFormation.add_stack_delete_policy_to_task_role, - update_conduit_stack_resources_fn=CloudFormation.update_conduit_stack_resources, - wait_for_cloudformation_to_reach_status_fn=CloudFormation.wait_for_cloudformation_to_reach_status, + get_addon_type_fn=get_addon_type, + ecs_exec_is_available_fn=ecs_exec_is_available, + get_cluster_arn_fn=get_cluster_arn, + get_parameter_name_fn=get_parameter_name, + get_or_create_task_name_fn=get_or_create_task_name, + add_stack_delete_policy_to_task_role_fn=add_stack_delete_policy_to_task_role, + update_conduit_stack_resources_fn=update_conduit_stack_resources, + wait_for_cloudformation_to_reach_status_fn=wait_for_cloudformation_to_reach_status, + abort_fn=abort_with_error, ): self.application = application @@ -47,6 +60,7 @@ def __init__( self.add_stack_delete_policy_to_task_role_fn = add_stack_delete_policy_to_task_role_fn self.update_conduit_stack_resources_fn = update_conduit_stack_resources_fn self.wait_for_cloudformation_to_reach_status_fn = wait_for_cloudformation_to_reach_status_fn + self.abort_fn = abort_fn def start(self, env: str, addon_name: str, access: str = "read"): clients = self._initialise_clients(env) @@ -92,7 +106,10 @@ def start(self, env: str, addon_name: str, access: str = "read"): self.echo_fn(f"Checking if exec is available for conduit task...") - self.ecs_exec_is_available_fn(clients["ecs"], cluster_arn, task_arn) + try: + self.ecs_exec_is_available_fn(clients["ecs"], cluster_arn, task_arn) + except ECSAgentNotRunning: + self.abort_fn('ECS exec agent never reached "RUNNING" status') self.echo_fn("Connecting to conduit task") self.connect_to_addon_client_task_fn( diff --git a/dbt_platform_helper/exceptions.py b/dbt_platform_helper/exceptions.py index a0a28e551..2917d1059 100644 --- a/dbt_platform_helper/exceptions.py +++ b/dbt_platform_helper/exceptions.py @@ -1,38 +1,11 @@ -import os - -from dbt_platform_helper.constants import CONDUIT_ADDON_TYPES - - class ValidationException(Exception): pass -class PlatformException(Exception): - pass - - -class AWSException(PlatformException): - pass - - -class ApplicationException(PlatformException): +class AWSException(Exception): pass -class CloudFormationException(AWSException): - def __init__(self, stack_name: str, current_status: str): - super().__init__( - f"The CloudFormation stack '{stack_name}' is not in a good state: {current_status}" - ) - - -class CommitNotFoundError: - def __init__(self, commit: str): - super().__init__( - f"""The commit hash "{commit}" either does not exist or you need to run `git fetch`.""" - ) - - class IncompatibleMajorVersion(ValidationException): def __init__(self, app_version: str, check_version: str): super().__init__() @@ -48,97 +21,61 @@ def __init__(self, app_version: str, check_version: str): class NoClusterError(AWSException): - def __init__(self, application_name: str, environment: str): - super().__init__( - f"""No ECS cluster found for "{application_name}" in "{environment}" environment.""" - ) + pass class CreateTaskTimeoutError(AWSException): - def __init__(self, addon_name: str, application_name: str, environment: str): - super().__init__( - f"""Client ({addon_name}) ECS task has failed to start for "{application_name}" in "{environment}" environment.""" - ) + pass class ParameterNotFoundError(AWSException): - def __init__(self, application_name: str, environment: str): - super().__init__( - f"""No parameter called "/copilot/applications/{application_name}/environments/{environment}/addons". Try deploying the "{application_name}" "{environment}" environment.""" - ) + pass class AddonNotFoundError(AWSException): - def __init__(self, addon_name: str): - super().__init__(f"""Addon "{addon_name}" does not exist.""") + pass class InvalidAddonTypeError(AWSException): def __init__(self, addon_type): self.addon_type = addon_type - super().__init__( - f"""Addon type "{self.addon_type}" is not supported, we support: {", ".join(CONDUIT_ADDON_TYPES)}.""" - ) class AddonTypeMissingFromConfigError(AWSException): - def __init__(self, addon_name: str): - super().__init__( - f"""The configuration for the addon {addon_name}, is misconfigured and missing the addon type.""" - ) + pass -class CopilotCodebaseNotFoundError(PlatformException): - def __init__(self, codebase: str): - super().__init__( - f"""The codebase "{codebase}" either does not exist or has not been deployed.""" - ) +class CopilotCodebaseNotFoundError(Exception): + pass -class NotInCodeBaseRepositoryError(PlatformException): - def __init__(self): - super().__init__( - "You are in the deploy repository; make sure you are in the application codebase repository.", - ) +class NotInCodeBaseRepositoryError(Exception): + pass -class NoCopilotCodebasesFoundError(PlatformException): - def __init__(self, application_name: str): - super().__init__(f"""No codebases found for application "{application_name}".""") +class NoCopilotCodebasesFoundError(Exception): + pass -class ImageNotFoundError(PlatformException): - def __init__(self, commit: str): - super().__init__( - f"""The commit hash "{commit}" has not been built into an image, try the `platform-helper codebase build` command first.""" - ) +class ImageNotFoundError(Exception): + pass -class ApplicationDeploymentNotTriggered(PlatformException): - def __init__(self, codebase: str): - super().__init__(f"""Your deployment for {codebase} was not triggered.""") +class ApplicationDeploymentNotTriggered(Exception): + pass -class ApplicationNotFoundError(ApplicationException): - def __init__(self, application_name: str): - super().__init__( - f"""The account "{os.environ.get("AWS_PROFILE")}" does not contain the application "{application_name}"; ensure you have set the environment variable "AWS_PROFILE" correctly.""" - ) +class ApplicationNotFoundError(Exception): + pass -class ApplicationEnvironmentNotFoundError(ApplicationException): - def __init__(self, environment: str): - super().__init__( - f"""The environment "{environment}" either does not exist or has not been deployed.""" - ) +class ApplicationEnvironmentNotFoundError(Exception): + pass class SecretNotFoundError(AWSException): - # application_name: str, environment: str, - def __init__(self, secret_name: str): - super().__init__(f"""No secret called "{secret_name}".""") + pass class ECSAgentNotRunning(AWSException): - def __init__(self): - super().__init__("""ECS exec agent never reached "RUNNING" status""") + pass diff --git a/dbt_platform_helper/providers/cloudformation.py b/dbt_platform_helper/providers/cloudformation.py index 141be9052..395a49c71 100644 --- a/dbt_platform_helper/providers/cloudformation.py +++ b/dbt_platform_helper/providers/cloudformation.py @@ -1,127 +1,105 @@ import json -import botocore from cfn_tools import dump_yaml from cfn_tools import load_yaml -from dbt_platform_helper.exceptions import CloudFormationException - - -class CloudFormation: - def __init__(self, cloudformation_client, iam_client, ssm_client): - self.cloudformation_client = cloudformation_client - self.iam_client = iam_client - self.ssm_client = ssm_client - - def add_stack_delete_policy_to_task_role(self, task_name: str): - stack_name = f"task-{task_name}" - stack_resources = self.cloudformation_client.list_stack_resources(StackName=stack_name)[ - "StackResourceSummaries" - ] - - for resource in stack_resources: - if resource["LogicalResourceId"] == "DefaultTaskRole": - task_role_name = resource["PhysicalResourceId"] - self.iam_client.put_role_policy( - RoleName=task_role_name, - PolicyName="DeleteCloudFormationStack", - PolicyDocument=json.dumps( - { - "Version": "2012-10-17", - "Statement": [ - { - "Action": ["cloudformation:DeleteStack"], - "Effect": "Allow", - "Resource": f"arn:aws:cloudformation:*:*:stack/{stack_name}/*", - }, - ], - }, - ), - ) - - def update_conduit_stack_resources( - self, - application_name: str, - env: str, - addon_type: str, - addon_name: str, - task_name: str, - parameter_name: str, - access: str, - ): - conduit_stack_name = f"task-{task_name}" - template = self.cloudformation_client.get_template(StackName=conduit_stack_name) - template_yml = load_yaml(template["TemplateBody"]) - - template_yml["Resources"]["LogGroup"]["DeletionPolicy"] = "Retain" - - template_yml["Resources"]["TaskNameParameter"] = load_yaml( - f""" - Type: AWS::SSM::Parameter - Properties: - Name: {parameter_name} - Type: String - Value: {task_name} - """ - ) - - log_filter_role_arn = self.iam_client.get_role(RoleName="CWLtoSubscriptionFilterRole")[ - "Role" - ]["Arn"] - - destination_log_group_arns = json.loads( - self.ssm_client.get_parameter(Name="/copilot/tools/central_log_groups")["Parameter"][ - "Value" - ] - ) - - destination_arn = destination_log_group_arns["dev"] - if env.lower() in ("prod", "production"): - destination_arn = destination_log_group_arns["prod"] - - template_yml["Resources"]["SubscriptionFilter"] = load_yaml( - f""" - Type: AWS::Logs::SubscriptionFilter - DeletionPolicy: Retain - Properties: - RoleArn: {log_filter_role_arn} - LogGroupName: /copilot/{task_name} - FilterName: /copilot/conduit/{application_name}/{env}/{addon_type}/{addon_name}/{task_name.rsplit("-", 1)[1]}/{access} - FilterPattern: '' - DestinationArn: {destination_arn} - """ - ) - - params = [] - # TODO moto bug https://uktrade.atlassian.net/browse/DBTP-1582 - if "Parameters" in template_yml: - for param in template_yml["Parameters"]: - params.append({"ParameterKey": param, "UsePreviousValue": True}) - - self.cloudformation_client.update_stack( - StackName=conduit_stack_name, - TemplateBody=dump_yaml(template_yml), - Parameters=params, - Capabilities=["CAPABILITY_IAM"], - ) - - return conduit_stack_name - - def wait_for_cloudformation_to_reach_status(self, stack_status, stack_name): - waiter = self.cloudformation_client.get_waiter(stack_status) - - try: - waiter.wait(StackName=stack_name, WaiterConfig={"Delay": 5, "MaxAttempts": 20}) - except botocore.exceptions.WaiterError as err: - current_status = err.last_response.get("Stacks", [{}])[0].get("StackStatus", "") - - if current_status in [ - "ROLLBACK_IN_PROGRESS", - "UPDATE_ROLLBACK_IN_PROGRESS", - "ROLLBACK_FAILED", - ]: - raise CloudFormationException(stack_name, current_status) - else: - raise CloudFormationException( - stack_name, f"Error while waiting for stack status: {str(err)}" - ) + +def add_stack_delete_policy_to_task_role(cloudformation_client, iam_client, task_name: str): + + stack_name = f"task-{task_name}" + stack_resources = cloudformation_client.list_stack_resources(StackName=stack_name)[ + "StackResourceSummaries" + ] + + for resource in stack_resources: + if resource["LogicalResourceId"] == "DefaultTaskRole": + task_role_name = resource["PhysicalResourceId"] + iam_client.put_role_policy( + RoleName=task_role_name, + PolicyName="DeleteCloudFormationStack", + PolicyDocument=json.dumps( + { + "Version": "2012-10-17", + "Statement": [ + { + "Action": ["cloudformation:DeleteStack"], + "Effect": "Allow", + "Resource": f"arn:aws:cloudformation:*:*:stack/{stack_name}/*", + }, + ], + }, + ), + ) + + +def update_conduit_stack_resources( + cloudformation_client, + iam_client, + ssm_client, + application_name: str, + env: str, + addon_type: str, + addon_name: str, + task_name: str, + parameter_name: str, + access: str, +): + + conduit_stack_name = f"task-{task_name}" + template = cloudformation_client.get_template(StackName=conduit_stack_name) + template_yml = load_yaml(template["TemplateBody"]) + template_yml["Resources"]["LogGroup"]["DeletionPolicy"] = "Retain" + template_yml["Resources"]["TaskNameParameter"] = load_yaml( + f""" + Type: AWS::SSM::Parameter + Properties: + Name: {parameter_name} + Type: String + Value: {task_name} + """ + ) + + log_filter_role_arn = iam_client.get_role(RoleName="CWLtoSubscriptionFilterRole")["Role"]["Arn"] + + destination_log_group_arns = json.loads( + ssm_client.get_parameter(Name="/copilot/tools/central_log_groups")["Parameter"]["Value"] + ) + + destination_arn = destination_log_group_arns["dev"] + if env.lower() in ("prod", "production"): + destination_arn = destination_log_group_arns["prod"] + + template_yml["Resources"]["SubscriptionFilter"] = load_yaml( + f""" + Type: AWS::Logs::SubscriptionFilter + DeletionPolicy: Retain + Properties: + RoleArn: {log_filter_role_arn} + LogGroupName: /copilot/{task_name} + FilterName: /copilot/conduit/{application_name}/{env}/{addon_type}/{addon_name}/{task_name.rsplit("-", 1)[1]}/{access} + FilterPattern: '' + DestinationArn: {destination_arn} + """ + ) + + params = [] + if "Parameters" in template_yml: + for param in template_yml["Parameters"]: + # TODO testing missed in codecov, update test to assert on method call below with params including ExistingParameter from cloudformation template. + params.append({"ParameterKey": param, "UsePreviousValue": True}) + + cloudformation_client.update_stack( + StackName=conduit_stack_name, + TemplateBody=dump_yaml(template_yml), + Parameters=params, + Capabilities=["CAPABILITY_IAM"], + ) + + return conduit_stack_name + + +# TODO Catch errors and raise a more human friendly Exception is the CloudFormation stack goes into a "unhappy" state, e.g. ROLLBACK_IN_PROGRESS. Currently we get things like botocore.exceptions.WaiterError: Waiter StackUpdateComplete failed: Waiter encountered a terminal failure state: For expression "Stacks[].StackStatus" we matched expected path: "UPDATE_ROLLBACK_COMPLETE" at least once +def wait_for_cloudformation_to_reach_status(cloudformation_client, stack_status, stack_name): + + waiter = cloudformation_client.get_waiter(stack_status) + waiter.wait(StackName=stack_name, WaiterConfig={"Delay": 5, "MaxAttempts": 20}) diff --git a/dbt_platform_helper/providers/copilot.py b/dbt_platform_helper/providers/copilot.py index 7954f1555..61c1a055d 100644 --- a/dbt_platform_helper/providers/copilot.py +++ b/dbt_platform_helper/providers/copilot.py @@ -5,8 +5,11 @@ from dbt_platform_helper.constants import CONDUIT_DOCKER_IMAGE_LOCATION from dbt_platform_helper.exceptions import CreateTaskTimeoutError -from dbt_platform_helper.providers.ecs import ECS -from dbt_platform_helper.providers.secrets import Secrets +from dbt_platform_helper.providers.ecs import get_ecs_task_arns +from dbt_platform_helper.providers.secrets import get_connection_secret_arn +from dbt_platform_helper.providers.secrets import ( + get_postgres_connection_data_updated_with_master_secret, +) from dbt_platform_helper.utils.application import Application from dbt_platform_helper.utils.messages import abort_with_error @@ -66,7 +69,7 @@ def create_addon_client_task( f"--task-group-name {task_name} " f"{execution_role}" f"--image {CONDUIT_DOCKER_IMAGE_LOCATION}:{addon_type} " - f"--secrets CONNECTION_SECRET={Secrets.get_connection_secret_arn(ssm_client,secrets_manager_client, secret_name)} " + f"--secrets CONNECTION_SECRET={get_connection_secret_arn(ssm_client,secrets_manager_client, secret_name)} " "--platform-os linux " "--platform-arch arm64", shell=True, @@ -92,7 +95,7 @@ def create_postgres_admin_task( "Parameter" ]["Value"] connection_string = json.dumps( - Secrets.get_postgres_connection_data_updated_with_master_secret( + get_postgres_connection_data_updated_with_master_secret( ssm_client, secrets_manager_client, read_only_secret_name, master_secret_arn ) ) @@ -115,13 +118,13 @@ def connect_to_addon_client_task( env, cluster_arn, task_name, - get_ecs_task_arns_fn=ECS.get_ecs_task_arns, + addon_client_is_running_fn=get_ecs_task_arns, ): running = False tries = 0 while tries < 15 and not running: tries += 1 - if get_ecs_task_arns_fn(ecs_client, cluster_arn, task_name): + if addon_client_is_running_fn(ecs_client, cluster_arn, task_name): subprocess.call( "copilot task exec " f"--app {application_name} --env {env} " @@ -134,7 +137,7 @@ def connect_to_addon_client_task( time.sleep(1) if not running: - raise CreateTaskTimeoutError(task_name, application_name, env) + raise CreateTaskTimeoutError def _normalise_secret_name(addon_name: str) -> str: diff --git a/dbt_platform_helper/providers/ecs.py b/dbt_platform_helper/providers/ecs.py index 7ddf0a180..2878e91ac 100644 --- a/dbt_platform_helper/providers/ecs.py +++ b/dbt_platform_helper/providers/ecs.py @@ -7,81 +7,73 @@ from dbt_platform_helper.exceptions import NoClusterError -class ECS: - def __init__(self, ecs_client, ssm_client, application_name: str, env: str): - self.ecs_client = ecs_client - self.ssm_client = ssm_client - self.application_name = application_name - self.env = env - - def get_cluster_arn(self) -> str: - """Returns the ARN of the ECS cluster for the given application and - environment.""" - for cluster_arn in self.ecs_client.list_clusters()["clusterArns"]: - tags_response = self.ecs_client.list_tags_for_resource(resourceArn=cluster_arn) - tags = tags_response["tags"] - - app_key_found = False - env_key_found = False - cluster_key_found = False - - for tag in tags: - if tag["key"] == "copilot-application" and tag["value"] == self.application_name: - app_key_found = True - if tag["key"] == "copilot-environment" and tag["value"] == self.env: - env_key_found = True - if tag["key"] == "aws:cloudformation:logical-id" and tag["value"] == "Cluster": - cluster_key_found = True - - if app_key_found and env_key_found and cluster_key_found: - return cluster_arn - - raise NoClusterError(self.application_name, self.env) - - def get_or_create_task_name(self, addon_name: str, parameter_name: str) -> str: - """Fetches the task name from SSM or creates a new one if not found.""" - try: - return self.ssm_client.get_parameter(Name=parameter_name)["Parameter"]["Value"] - except self.ssm_client.exceptions.ParameterNotFound: - random_id = "".join(random.choices(string.ascii_lowercase + string.digits, k=12)) - return f"conduit-{self.application_name}-{self.env}-{addon_name}-{random_id}" - - def get_ecs_task_arns(self, cluster_arn: str, task_name: str): - """Gets the ECS task ARNs for a given task name and cluster ARN.""" - tasks = self.ecs_client.list_tasks( - cluster=cluster_arn, - desiredStatus="RUNNING", - family=f"copilot-{task_name}", - ) - - if not tasks["taskArns"]: - return [] - - return tasks["taskArns"] - - def ecs_exec_is_available(self, cluster_arn: str, task_arns: List[str]): - """ - Checks if the ExecuteCommandAgent is running on the specified ECS task. - - Waits for up to 25 attempts, then raises ECSAgentNotRunning if still not - running. - """ - current_attempts = 0 - execute_command_agent_status = "" - - while execute_command_agent_status != "RUNNING" and current_attempts < 25: - current_attempts += 1 - - task_details = self.ecs_client.describe_tasks(cluster=cluster_arn, tasks=task_arns) - - managed_agents = task_details["tasks"][0]["containers"][0]["managedAgents"] - execute_command_agent_status = [ - agent["lastStatus"] - for agent in managed_agents - if agent["name"] == "ExecuteCommandAgent" - ][0] - if execute_command_agent_status != "RUNNING": - time.sleep(1) - - if execute_command_agent_status != "RUNNING": - raise ECSAgentNotRunning +# Todo: Refactor to a class, review, then perhaps do the others +def get_cluster_arn(ecs_client, application_name: str, env: str) -> str: + for cluster_arn in ecs_client.list_clusters()["clusterArns"]: + tags_response = ecs_client.list_tags_for_resource(resourceArn=cluster_arn) + tags = tags_response["tags"] + + app_key_found = False + env_key_found = False + cluster_key_found = False + + for tag in tags: + if tag["key"] == "copilot-application" and tag["value"] == application_name: + app_key_found = True + if tag["key"] == "copilot-environment" and tag["value"] == env: + env_key_found = True + if tag["key"] == "aws:cloudformation:logical-id" and tag["value"] == "Cluster": + cluster_key_found = True + + if app_key_found and env_key_found and cluster_key_found: + return cluster_arn + + raise NoClusterError + + +def get_or_create_task_name( + ssm_client, application_name: str, env: str, addon_name: str, parameter_name: str +) -> str: + try: + return ssm_client.get_parameter(Name=parameter_name)["Parameter"]["Value"] + except ssm_client.exceptions.ParameterNotFound: + random_id = "".join(random.choices(string.ascii_lowercase + string.digits, k=12)) + return f"conduit-{application_name}-{env}-{addon_name}-{random_id}" + + +def get_ecs_task_arns(ecs_client, cluster_arn: str, task_name: str): + + tasks = ecs_client.list_tasks( + cluster=cluster_arn, + desiredStatus="RUNNING", + family=f"copilot-{task_name}", + ) + + if not tasks["taskArns"]: + return [] + + return tasks["taskArns"] + + +def ecs_exec_is_available(ecs_client, cluster_arn: str, task_arns: List[str]): + + current_attemps = 0 + execute_command_agent_status = "" + + while execute_command_agent_status != "RUNNING" and current_attemps < 25: + + current_attemps += 1 + + task_details = ecs_client.describe_tasks(cluster=cluster_arn, tasks=task_arns) + + managed_agents = task_details["tasks"][0]["containers"][0]["managedAgents"] + execute_command_agent_status = [ + agent["lastStatus"] + for agent in managed_agents + if agent["name"] == "ExecuteCommandAgent" + ][0] + + time.sleep(1) + + if execute_command_agent_status != "RUNNING": + raise ECSAgentNotRunning diff --git a/dbt_platform_helper/providers/secrets.py b/dbt_platform_helper/providers/secrets.py index c875f7dfc..feeaf0ae1 100644 --- a/dbt_platform_helper/providers/secrets.py +++ b/dbt_platform_helper/providers/secrets.py @@ -9,77 +9,77 @@ from dbt_platform_helper.exceptions import SecretNotFoundError -class Secrets: - def __init__(self, ssm_client, secrets_manager_client, application_name, env): - self.ssm_client = ssm_client - self.secrets_manager_client = secrets_manager_client - self.application_name = application_name - self.env = env - - def get_postgres_connection_data_updated_with_master_secret(self, parameter_name, secret_arn): - response = self.ssm_client.get_parameter(Name=parameter_name, WithDecryption=True) - parameter_value = response["Parameter"]["Value"] - - parameter_data = json.loads(parameter_value) - - secret_response = self.secrets_manager_client.get_secret_value(SecretId=secret_arn) - secret_value = json.loads(secret_response["SecretString"]) - - parameter_data["username"] = urllib.parse.quote(secret_value["username"]) - parameter_data["password"] = urllib.parse.quote(secret_value["password"]) - - return parameter_data - - def get_connection_secret_arn(self, secret_name: str) -> str: - try: - return self.ssm_client.get_parameter(Name=secret_name, WithDecryption=False)[ - "Parameter" - ]["ARN"] - except self.ssm_client.exceptions.ParameterNotFound: - pass - - try: - return self.secrets_manager_client.describe_secret(SecretId=secret_name)["ARN"] - except self.secrets_manager_client.exceptions.ResourceNotFoundException: - pass - - raise SecretNotFoundError(secret_name) - - def get_addon_type(self, addon_name: str) -> str: - addon_type = None - try: - addon_config = json.loads( - self.ssm_client.get_parameter( - Name=f"/copilot/applications/{self.application_name}/environments/{self.env}/addons" - )["Parameter"]["Value"] - ) - except self.ssm_client.exceptions.ParameterNotFound: - raise ParameterNotFoundError(self.application_name, self.env) - - if addon_name not in addon_config.keys(): - raise AddonNotFoundError(addon_name) - - for name, config in addon_config.items(): - if name == addon_name: - if not config.get("type"): - raise AddonTypeMissingFromConfigError(addon_name) - addon_type = config["type"] - - if not addon_type or addon_type not in CONDUIT_ADDON_TYPES: - raise InvalidAddonTypeError(addon_type) - - if "postgres" in addon_type: - addon_type = "postgres" - - return addon_type - - def get_parameter_name(self, addon_type: str, addon_name: str, access: str) -> str: - if addon_type == "postgres": - return f"/copilot/{self.application_name}/{self.env}/conduits/{self._normalise_secret_name(addon_name)}_{access.upper()}" - elif addon_type == "redis" or addon_type == "opensearch": - return f"/copilot/{self.application_name}/{self.env}/conduits/{self._normalise_secret_name(addon_name)}_ENDPOINT" - else: - return f"/copilot/{self.application_name}/{self.env}/conduits/{self._normalise_secret_name(addon_name)}" - - def _normalise_secret_name(self, addon_name: str) -> str: - return addon_name.replace("-", "_").upper() +def get_postgres_connection_data_updated_with_master_secret( + ssm_client, secrets_manager_client, parameter_name, secret_arn +): + response = ssm_client.get_parameter(Name=parameter_name, WithDecryption=True) + parameter_value = response["Parameter"]["Value"] + + parameter_data = json.loads(parameter_value) + + secret_response = secrets_manager_client.get_secret_value(SecretId=secret_arn) + secret_value = json.loads(secret_response["SecretString"]) + + parameter_data["username"] = urllib.parse.quote(secret_value["username"]) + parameter_data["password"] = urllib.parse.quote(secret_value["password"]) + + return parameter_data + + +def get_connection_secret_arn(ssm_client, secrets_manager_client, secret_name: str) -> str: + + try: + return ssm_client.get_parameter(Name=secret_name, WithDecryption=False)["Parameter"]["ARN"] + except ssm_client.exceptions.ParameterNotFound: + pass + + try: + return secrets_manager_client.describe_secret(SecretId=secret_name)["ARN"] + except secrets_manager_client.exceptions.ResourceNotFoundException: + pass + + raise SecretNotFoundError(secret_name) + + +def get_addon_type(ssm_client, application_name: str, env: str, addon_name: str) -> str: + addon_type = None + try: + addon_config = json.loads( + ssm_client.get_parameter( + Name=f"/copilot/applications/{application_name}/environments/{env}/addons" + )["Parameter"]["Value"] + ) + except ssm_client.exceptions.ParameterNotFound: + raise ParameterNotFoundError + + if addon_name not in addon_config.keys(): + raise AddonNotFoundError + + for name, config in addon_config.items(): + if name == addon_name: + if not config.get("type"): + raise AddonTypeMissingFromConfigError() + addon_type = config["type"] + + if not addon_type or addon_type not in CONDUIT_ADDON_TYPES: + raise InvalidAddonTypeError(addon_type) + + if "postgres" in addon_type: + addon_type = "postgres" + + return addon_type + + +def get_parameter_name( + application_name: str, env: str, addon_type: str, addon_name: str, access: str +) -> str: + if addon_type == "postgres": + return f"/copilot/{application_name}/{env}/conduits/{_normalise_secret_name(addon_name)}_{access.upper()}" + elif addon_type == "redis" or addon_type == "opensearch": + return f"/copilot/{application_name}/{env}/conduits/{_normalise_secret_name(addon_name)}_ENDPOINT" + else: + return f"/copilot/{application_name}/{env}/conduits/{_normalise_secret_name(addon_name)}" + + +def _normalise_secret_name(addon_name: str) -> str: + return addon_name.replace("-", "_").upper() diff --git a/dbt_platform_helper/utils/application.py b/dbt_platform_helper/utils/application.py index fba00bdab..420689df5 100644 --- a/dbt_platform_helper/utils/application.py +++ b/dbt_platform_helper/utils/application.py @@ -80,7 +80,7 @@ def load_application(app: str = None, default_session: Session = None) -> Applic WithDecryption=False, ) except ssm_client.exceptions.ParameterNotFound: - raise ApplicationNotFoundError(app) + raise ApplicationNotFoundError path = f"/copilot/applications/{application.name}/environments" secrets = get_ssm_secrets(app, None, current_session, path) diff --git a/dbt_platform_helper/utils/aws.py b/dbt_platform_helper/utils/aws.py index e7814afa6..e59c2f511 100644 --- a/dbt_platform_helper/utils/aws.py +++ b/dbt_platform_helper/utils/aws.py @@ -499,7 +499,7 @@ def check_codebase_exists(session: Session, application, codebase: str): ssm_client.exceptions.ParameterNotFound, json.JSONDecodeError, ): - raise CopilotCodebaseNotFoundError(codebase) + raise CopilotCodebaseNotFoundError def check_image_exists(session, application, codebase, commit): @@ -513,7 +513,7 @@ def check_image_exists(session, application, codebase, commit): ecr_client.exceptions.RepositoryNotFoundException, ecr_client.exceptions.ImageNotFoundException, ): - raise ImageNotFoundError(commit) + raise ImageNotFoundError def get_build_url_from_arn(build_arn: str) -> str: diff --git a/tests/platform_helper/domain/test_codebase.py b/tests/platform_helper/domain/test_codebase.py index 2ea4d9c5f..b05f3dcf2 100644 --- a/tests/platform_helper/domain/test_codebase.py +++ b/tests/platform_helper/domain/test_codebase.py @@ -20,6 +20,7 @@ from dbt_platform_helper.exceptions import ApplicationNotFoundError from dbt_platform_helper.exceptions import CopilotCodebaseNotFoundError from dbt_platform_helper.exceptions import ImageNotFoundError +from dbt_platform_helper.exceptions import NoCopilotCodebasesFoundError from dbt_platform_helper.utils.application import Environment from dbt_platform_helper.utils.git import CommitNotFoundError from tests.platform_helper.conftest import EXPECTED_FILES_DIR @@ -146,15 +147,22 @@ def test_codebase_prepare_does_not_generate_files_in_a_repo_with_a_copilot_direc os.chdir(tmp_path) Path(tmp_path / "copilot").mkdir() - mocks.subprocess.return_value.stdout = mock_suprocess_fixture() + mocks.subprocess.return_value.stderr = mock_suprocess_fixture() - with pytest.raises(NotInCodeBaseRepositoryError): - codebase.prepare() + codebase.prepare() + + mocks.echo_fn.assert_has_calls( + [ + call( + "You are in the deploy repository; make sure you are in the application codebase repository.", + ), + ] + ) def test_codebase_build_does_not_trigger_build_without_an_application(): mocks = CodebaseMocks() - mocks.load_application_fn.side_effect = ApplicationNotFoundError("not-an-application") + mocks.load_application_fn.side_effect = ApplicationNotFoundError() codebase = Codebase(**mocks.params()) with pytest.raises(ApplicationNotFoundError): @@ -178,7 +186,7 @@ def test_codebase_build_commit_not_found(): codebase.build("not-an-application", "application", "ab1c23d") -def test_codebase_prepare_raises_not_in_codebase_exception(tmp_path): +def test_codebase_prepare_does_not_generate_files_in_a_repo_with_a_copilot_directory(tmp_path): mocks = CodebaseMocks() mocks.load_application_fn.side_effect = SystemExit(1) @@ -275,9 +283,7 @@ def test_codebase_deploy_successfully_triggers_a_pipeline_based_deploy(mock_appl def test_codebase_deploy_exception_with_a_nonexistent_codebase(): - mocks = CodebaseMocks( - check_codebase_exists_fn=Mock(side_effect=CopilotCodebaseNotFoundError("application")) - ) + mocks = CodebaseMocks(check_codebase_exists_fn=Mock(side_effect=CopilotCodebaseNotFoundError())) client = mock_aws_client(mocks.get_aws_session_or_abort_fn) @@ -291,9 +297,7 @@ def test_codebase_deploy_exception_with_a_nonexistent_codebase(): def test_check_codebase_exists_returns_error_when_no_json(): - mocks = CodebaseMocks( - check_codebase_exists_fn=Mock(side_effect=CopilotCodebaseNotFoundError("application")) - ) + mocks = CodebaseMocks(check_codebase_exists_fn=Mock(side_effect=CopilotCodebaseNotFoundError)) client = mock_aws_client(mocks.get_aws_session_or_abort_fn) @@ -307,9 +311,7 @@ def test_check_codebase_exists_returns_error_when_no_json(): def test_codebase_deploy_aborts_with_a_nonexistent_image_repository(): - mocks = CodebaseMocks( - check_image_exists_fn=Mock(side_effect=ImageNotFoundError("nonexistent-commit-hash")) - ) + mocks = CodebaseMocks(check_image_exists_fn=Mock(side_effect=ImageNotFoundError)) client = mock_aws_client(mocks.get_aws_session_or_abort_fn) @@ -324,9 +326,7 @@ def test_codebase_deploy_aborts_with_a_nonexistent_image_repository(): def test_codebase_deploy_aborts_with_a_nonexistent_image_tag(): - mocks = CodebaseMocks( - check_image_exists_fn=Mock(side_effect=ImageNotFoundError("nonexistent-commit-hash")) - ) + mocks = CodebaseMocks(check_image_exists_fn=Mock(side_effect=ImageNotFoundError)) client = mock_aws_client(mocks.get_aws_session_or_abort_fn) @@ -374,11 +374,20 @@ def test_codebase_deploy_does_not_trigger_build_without_confirmation(): def test_codebase_deploy_does_not_trigger_build_without_an_application(): mocks = CodebaseMocks() - mocks.load_application_fn.side_effect = ApplicationNotFoundError("not-an-application") + mocks.load_application_fn.side_effect = ApplicationNotFoundError() codebase = Codebase(**mocks.params()) with pytest.raises(ApplicationNotFoundError) as exc: codebase.deploy("not-an-application", "dev", "application", "ab1c23d") + # TODO This assert can probably go now we are catching the errors and outputting them at the command layer + mocks.echo_fn.assert_has_calls( + [ + call( + """The account "foo" does not contain the application "not-an-application"; ensure you have set the environment variable "AWS_PROFILE" correctly.""", + fg="red", + ), + ] + ) def test_codebase_deploy_does_not_trigger_build_with_missing_environment(mock_application): @@ -414,15 +423,15 @@ def test_codebase_deploy_does_not_trigger_deployment_without_confirmation(): def test_codebase_list_does_not_trigger_build_without_an_application(): mocks = CodebaseMocks() - mocks.load_application_fn.side_effect = ApplicationNotFoundError("not-an-application") + mocks.load_application_fn.side_effect = ApplicationNotFoundError() codebase = Codebase(**mocks.params()) with pytest.raises(ApplicationNotFoundError) as exc: codebase.list("not-an-application", True) -def test_codebase_list_returns_empty_when_no_codebases(): - mocks = CodebaseMocks(check_codebase_exists_fn=Mock()) +def test_codebase_list_raises_exception_when_no_codebases(): + mocks = CodebaseMocks(check_codebase_exists_fn=Mock(side_effect=NoCopilotCodebasesFoundError())) client = mock_aws_client(mocks.get_aws_session_or_abort_fn) @@ -430,10 +439,9 @@ def test_codebase_list_returns_empty_when_no_codebases(): "Parameter": {"Value": json.dumps({"name": "application"})}, } - codebase = Codebase(**mocks.params()) - codebase.list("test-application", True) - - mocks.echo_fn.assert_has_calls([]) + with pytest.raises(NoCopilotCodebasesFoundError): + codebase = Codebase(**mocks.params()) + codebase.list("test-application", True) def test_lists_codebases_with_multiple_pages_of_images(): diff --git a/tests/platform_helper/domain/test_conduit.py b/tests/platform_helper/domain/test_conduit.py index 2d499dd47..385566bd7 100644 --- a/tests/platform_helper/domain/test_conduit.py +++ b/tests/platform_helper/domain/test_conduit.py @@ -7,7 +7,6 @@ from dbt_platform_helper.domain.conduit import Conduit from dbt_platform_helper.exceptions import AddonNotFoundError from dbt_platform_helper.exceptions import CreateTaskTimeoutError -from dbt_platform_helper.exceptions import ECSAgentNotRunning from dbt_platform_helper.exceptions import InvalidAddonTypeError from dbt_platform_helper.exceptions import NoClusterError from dbt_platform_helper.exceptions import ParameterNotFoundError @@ -193,11 +192,7 @@ def test_conduit_with_task_already_running(): def test_conduit_domain_when_no_cluster_exists(): conduit_mocks = ConduitMocks( - app_name, - addon_type, - get_cluster_arn_fn=Mock( - side_effect=NoClusterError(application_name=app_name, environment=env) - ), + app_name, addon_type, get_cluster_arn_fn=Mock(side_effect=NoClusterError()) ) conduit = Conduit(**conduit_mocks.params()) ecs_client = conduit.application.environments[env].session.client("ecs") @@ -214,9 +209,7 @@ def test_conduit_domain_when_no_connection_secret_exists(): app_name, addon_type, get_ecs_task_arns_fn=Mock(return_value=False), - create_addon_client_task_fn=Mock( - side_effect=SecretNotFoundError(f"/copilot/{app_name}/{env}/secrets/{addon_name}") - ), + create_addon_client_task_fn=Mock(side_effect=SecretNotFoundError()), ) conduit = Conduit(**conduit_mocks.params()) @@ -237,13 +230,7 @@ def test_conduit_domain_when_client_task_fails_to_start(): conduit_mocks = ConduitMocks( app_name, addon_type, - connect_to_addon_client_task_fn=Mock( - side_effect=CreateTaskTimeoutError( - addon_name=addon_name, - application_name=app_name, - environment=env, - ) - ), + connect_to_addon_client_task_fn=Mock(side_effect=CreateTaskTimeoutError()), ) conduit = Conduit(**conduit_mocks.params()) ecs_client = conduit.application.environments[env].session.client("ecs") @@ -285,7 +272,7 @@ def test_conduit_domain_when_addon_type_is_invalid(): def test_conduit_domain_when_addon_does_not_exist(): addon_name = "addon_doesnt_exist" conduit_mocks = ConduitMocks( - app_name, addon_type, get_addon_type_fn=Mock(side_effect=AddonNotFoundError(addon_name)) + app_name, addon_type, get_addon_type_fn=Mock(side_effect=AddonNotFoundError()) ) conduit = Conduit(**conduit_mocks.params()) @@ -299,14 +286,7 @@ def test_conduit_domain_when_addon_does_not_exist(): def test_conduit_domain_when_no_addon_config_parameter_exists(): addon_name = "parameter_doesnt_exist" conduit_mocks = ConduitMocks( - app_name, - addon_type, - get_addon_type_fn=Mock( - side_effect=ParameterNotFoundError( - application_name=app_name, - environment=env, - ) - ), + app_name, addon_type, get_addon_type_fn=Mock(side_effect=ParameterNotFoundError()) ) conduit = Conduit(**conduit_mocks.params()) @@ -315,25 +295,3 @@ def test_conduit_domain_when_no_addon_config_parameter_exists(): with pytest.raises(ParameterNotFoundError): conduit.start(env, addon_name) conduit.get_ecs_task_arns_fn.assert_called_once_with(ecs_client, cluster_name, task_name) - - -def test_conduit_domain_ecs_exec_agent_does_not_start(): - conduit_mocks = ConduitMocks( - app_name, - addon_type, - get_ecs_task_arns_fn=Mock( - return_value=["arn:aws:ecs:eu-west-2:123456789012:task/MyTaskARN"] - ), - ecs_exec_is_available_fn=Mock(side_effect=ECSAgentNotRunning()), - ) - conduit = Conduit(**conduit_mocks.params()) - ecs_client = conduit.application.environments[env].session.client("ecs") - - with pytest.raises(ECSAgentNotRunning): - conduit.start(env, addon_name) - - conduit.ecs_exec_is_available_fn.assert_called_once_with( - ecs_client, - "arn:aws:ecs:eu-west-2:123456789012:cluster/MyECSCluster1", - ["arn:aws:ecs:eu-west-2:123456789012:task/MyTaskARN"], - ) diff --git a/tests/platform_helper/domain/test_database_copy.py b/tests/platform_helper/domain/test_database_copy.py index fc48b63c6..22bd2d98f 100644 --- a/tests/platform_helper/domain/test_database_copy.py +++ b/tests/platform_helper/domain/test_database_copy.py @@ -303,7 +303,7 @@ def test_database_dump_handles_account_id_errors(is_dump): def test_database_copy_initialization_handles_app_name_errors(): mocks = DataCopyMocks() - mocks.load_application_fn = Mock(side_effect=ApplicationNotFoundError("bad-app")) + mocks.load_application_fn = Mock(side_effect=ApplicationNotFoundError()) with pytest.raises(SystemExit) as exc: DatabaseCopy("bad-app", "test-db", **mocks.params()) diff --git a/tests/platform_helper/providers/test_cloudformation.py b/tests/platform_helper/providers/test_cloudformation.py index 0dab33905..6d44869f9 100644 --- a/tests/platform_helper/providers/test_cloudformation.py +++ b/tests/platform_helper/providers/test_cloudformation.py @@ -4,12 +4,16 @@ import boto3 import pytest -from botocore.exceptions import WaiterError from cfn_tools import load_yaml from moto import mock_aws -from dbt_platform_helper.exceptions import CloudFormationException -from dbt_platform_helper.providers.cloudformation import CloudFormation +from dbt_platform_helper.providers.cloudformation import ( + add_stack_delete_policy_to_task_role, +) +from dbt_platform_helper.providers.cloudformation import update_conduit_stack_resources +from dbt_platform_helper.providers.cloudformation import ( + wait_for_cloudformation_to_reach_status, +) from tests.platform_helper.conftest import mock_parameter_name from tests.platform_helper.conftest import mock_task_name @@ -56,17 +60,21 @@ def test_update_conduit_stack_resources( iam_client = mock_application.environments[env].session.client("iam") ssm_client = mock_application.environments[env].session.client("ssm") - cloudformation = CloudFormation(cloudformation_client, iam_client, ssm_client) - - cloudformation.update_conduit_stack_resources( - mock_application.name, env, addon_type, addon_name, task_name, parameter_name, "read" + update_conduit_stack_resources( + cloudformation_client, + iam_client, + ssm_client, + mock_application.name, + env, + addon_type, + addon_name, + task_name, + parameter_name, + "read", ) template = boto3.client("cloudformation").get_template(StackName=f"task-{task_name}") - stack = boto3.client("cloudformation").describe_stacks(StackName=f"task-{task_name}") template_yml = load_yaml(template["TemplateBody"]) - - assert stack["Stacks"][0]["Parameters"][0]["ParameterValue"] == "does-not-matter" assert template_yml["Resources"]["LogGroup"]["DeletionPolicy"] == "Retain" assert template_yml["Resources"]["TaskNameParameter"]["Properties"]["Name"] == parameter_name assert ( @@ -89,7 +97,7 @@ def test_update_conduit_stack_resources( ) @patch("time.sleep", return_value=None) def test_add_stack_delete_policy_to_task_role(sleep, mock_stack, addon_name, mock_application): - """Test that, given app, env and addon name, + """Test that, given app, env and addon name add_stack_delete_policy_to_task_role adds a policy to the IAM role in a CloudFormation stack.""" @@ -110,9 +118,7 @@ def test_add_stack_delete_policy_to_task_role(sleep, mock_stack, addon_name, moc ], } - cloudformation = CloudFormation(cloudformation_client, iam_client, None) - - cloudformation.add_stack_delete_policy_to_task_role(task_name) + add_stack_delete_policy_to_task_role(cloudformation_client, iam_client, task_name) stack_resources = boto3.client("cloudformation").list_stack_resources(StackName=stack_name)[ "StackResourceSummaries" @@ -132,41 +138,17 @@ def test_add_stack_delete_policy_to_task_role(sleep, mock_stack, addon_name, moc assert policy_document == mock_policy -@mock_aws -def test_wait_for_cloudformation_with_no_success_raises_exception(): - cloudformation_client = Mock() - waiter_mock = Mock() - cloudformation_client.get_waiter = Mock(return_value=waiter_mock) - - waiter_error = WaiterError( - "Waiter StackUpdatecomplete failed", - "Fail!!", - {"Stacks": [{"StackStatus": "ROLLBACK_IN_PROGRESS"}]}, - ) - waiter_mock.wait.side_effect = waiter_error - - cloudformation = CloudFormation(cloudformation_client, None, None) - - with pytest.raises( - CloudFormationException, - match="The CloudFormation stack 'stack-name' is not in a good state: ROLLBACK_IN_PROGRESS", - ): - cloudformation.wait_for_cloudformation_to_reach_status( - "stack_update_complete", "stack-name" - ) - +def test_wait_for_cloudformation_to_reach_status(): -@mock_aws -def test_wait_for_cloudformation_with_update_complete(): cloudformation_client = Mock() - waiter_mock = Mock() - cloudformation_client.get_waiter = Mock(return_value=waiter_mock) - waiter_mock.wait.return_value = None - - cloudformation = CloudFormation(cloudformation_client, None, None) + mock_return = Mock() + mock_waiter = Mock(return_value=mock_return) + cloudformation_client.get_waiter = mock_waiter - cloudformation.wait_for_cloudformation_to_reach_status("stack_update_complete", "stack-name") - - waiter_mock.wait.assert_called_with( - StackName="stack-name", WaiterConfig={"Delay": 5, "MaxAttempts": 20} + wait_for_cloudformation_to_reach_status( + cloudformation_client, "stack_update_complete", "task-stack-name" + ) + mock_waiter.assert_called() + mock_return.wait.assert_called_with( + StackName="task-stack-name", WaiterConfig={"Delay": 5, "MaxAttempts": 20} ) diff --git a/tests/platform_helper/providers/test_copilot.py b/tests/platform_helper/providers/test_copilot.py index dd55eb3c9..671b45f14 100644 --- a/tests/platform_helper/providers/test_copilot.py +++ b/tests/platform_helper/providers/test_copilot.py @@ -1,3 +1,4 @@ +import json from unittest.mock import Mock from unittest.mock import patch @@ -6,29 +7,53 @@ from botocore.exceptions import ClientError from moto import mock_aws -from dbt_platform_helper.exceptions import SecretNotFoundError +from dbt_platform_helper.exceptions import AddonNotFoundError +from dbt_platform_helper.exceptions import AddonTypeMissingFromConfigError +from dbt_platform_helper.exceptions import InvalidAddonTypeError +from dbt_platform_helper.exceptions import ParameterNotFoundError from dbt_platform_helper.providers.copilot import CreateTaskTimeoutError from dbt_platform_helper.providers.copilot import connect_to_addon_client_task from dbt_platform_helper.providers.copilot import create_addon_client_task from dbt_platform_helper.providers.copilot import create_postgres_admin_task +from dbt_platform_helper.providers.secrets import SecretNotFoundError +from dbt_platform_helper.providers.secrets import ( + _normalise_secret_name as normalise_secret_name, +) +from dbt_platform_helper.providers.secrets import get_addon_type +from dbt_platform_helper.providers.secrets import get_parameter_name from tests.platform_helper.conftest import NoSuchEntityException +from tests.platform_helper.conftest import add_addon_config_parameter from tests.platform_helper.conftest import expected_connection_secret_name +from tests.platform_helper.conftest import mock_parameter_name from tests.platform_helper.conftest import mock_task_name env = "development" +@pytest.mark.parametrize( + "test_string", + [ + ("app-rds-postgres", "APP_RDS_POSTGRES"), + ("APP-POSTGRES", "APP_POSTGRES"), + ("APP-OpenSearch", "APP_OPENSEARCH"), + ], +) +def test_normalise_secret_name(test_string): + """Test that given an addon name, normalise_secret_name produces the + expected result.""" + + assert normalise_secret_name(test_string[0]) == test_string[1] + + @mock_aws @patch( # Nested function within provider function - "dbt_platform_helper.providers.secrets.Secrets.get_postgres_connection_data_updated_with_master_secret", + "dbt_platform_helper.providers.copilot.get_postgres_connection_data_updated_with_master_secret", return_value="connection string", ) def test_create_postgres_admin_task(mock_update_parameter, mock_application): addon_name = "dummy-postgres" - master_secret_name = ( - f"/copilot/{mock_application.name}/{env}/secrets/DUMMY_POSTGRES_RDS_MASTER_ARN" - ) + master_secret_name = f"/copilot/{mock_application.name}/{env}/secrets/{normalise_secret_name(addon_name)}_RDS_MASTER_ARN" ssm_client = mock_application.environments[env].session.client("ssm") secrets_manager_client = mock_application.environments[env].session.client("secretsmanager") @@ -82,10 +107,7 @@ def test_create_postgres_admin_task(mock_update_parameter, mock_application): ("opensearch", "custom-name-opensearch"), ], ) -@patch( - "dbt_platform_helper.providers.secrets.Secrets.get_connection_secret_arn", - return_value="test-arn", -) +@patch("dbt_platform_helper.providers.copilot.get_connection_secret_arn", return_value="test-arn") def test_create_redis_or_opensearch_addon_client_task( get_connection_secret_arn, access, @@ -119,6 +141,10 @@ def test_create_redis_or_opensearch_addon_client_task( access, ) + secret_name = expected_connection_secret_name(mock_application, addon_type, addon_name, access) + get_connection_secret_arn.assert_called_once_with( + ssm_client, secretsmanager_client, secret_name + ) mock_subprocess.call.assert_called() mock_subprocess.call.assert_called_once_with( f"copilot task run --app test-application --env {env} " @@ -139,10 +165,7 @@ def test_create_redis_or_opensearch_addon_client_task( "write", ], ) -@patch( - "dbt_platform_helper.providers.secrets.Secrets.get_connection_secret_arn", - return_value="test-arn", -) +@patch("dbt_platform_helper.providers.copilot.get_connection_secret_arn", return_value="test-arn") def test_create_postgres_addon_client_task( get_connection_secret_arn, access, @@ -229,10 +252,7 @@ def test_create_postgres_addon_client_task_admin( ) -@patch( - "dbt_platform_helper.providers.secrets.Secrets.get_connection_secret_arn", - return_value="test-arn", -) +@patch("dbt_platform_helper.providers.copilot.get_connection_secret_arn", return_value="test-arn") def test_create_addon_client_task_does_not_add_execution_role_if_role_not_found( get_connection_secret_arn, mock_application, @@ -284,9 +304,11 @@ def test_create_addon_client_task_does_not_add_execution_role_if_role_not_found( ) +@patch("dbt_platform_helper.providers.copilot.get_connection_secret_arn", return_value="test-arn") @patch("click.secho") def test_create_addon_client_task_abort_with_message_on_other_exceptions( mock_secho, + get_connection_secret_arn, mock_application, ): """Test that if an unexpected ClientError is throw when trying to get the @@ -331,7 +353,7 @@ def test_create_addon_client_task_abort_with_message_on_other_exceptions( ) -@patch("dbt_platform_helper.providers.secrets.Secrets.get_connection_secret_arn") +@patch("dbt_platform_helper.providers.copilot.get_connection_secret_arn") def test_create_addon_client_task_when_no_secret_found(get_connection_secret_arn): """Test that, given app, environment and secret name strings, create_addon_client_task raises a NoConnectionSecretError and does not call @@ -345,9 +367,7 @@ def test_create_addon_client_task_when_no_secret_found(get_connection_secret_arn ssm_client = mock_application.environments[env].session.client("ssm") secretsmanager_client = mock_application.environments[env].session.client("secretsmanager") - get_connection_secret_arn.side_effect = SecretNotFoundError( - "/copilot/test-application/development/secrets/named-postgres" - ) + get_connection_secret_arn.side_effect = SecretNotFoundError with pytest.raises(SecretNotFoundError): create_addon_client_task( @@ -366,6 +386,35 @@ def test_create_addon_client_task_when_no_secret_found(get_connection_secret_arn mock_subprocess.call.assert_not_called() +@mock_aws +@pytest.mark.parametrize( + "access", + [ + "read", + "write", + "admin", + ], +) +@pytest.mark.parametrize( + "addon_type, addon_name", + [ + ("postgres", "custom-name-postgres"), + ("postgres", "custom-name-rds-postgres"), + ("redis", "custom-name-redis"), + ("opensearch", "custom-name-opensearch"), + ("s3", "custon-name-s3"), + ], +) +def test_get_parameter_name(access, addon_type, addon_name, mock_application): + """Test that get_parameter_name builds the correct parameter name given the + addon_name, addon_type and permission.""" + + parameter_name = get_parameter_name( + mock_application.name, "development", addon_type, addon_name, access + ) + assert parameter_name == mock_parameter_name(mock_application, addon_type, addon_name, access) + + @pytest.mark.parametrize( "addon_type", ["postgres", "redis", "opensearch"], @@ -404,6 +453,22 @@ def test_connect_to_addon_client_task(addon_type, mock_application): ) +# Todo: Implement a test to cover the desired behaviour +# @patch("dbt_platform_helper.providers.copilot.addon_client_is_running", return_value=True) +# def test_connect_to_addon_client_task_waits_for_command_agent(addon_client_is_running, mock_application): +# task_name = mock_task_name("postgres") # Addon type for this test does not matter +# ecs_client = mock_application.environments[env].session.client("ecs") +# mock_subprocess = Mock() +# # We want this to throw InvalidParameterException the first time, then behave as normal +# +# connect_to_addon_client_task( +# ecs_client, mock_subprocess, mock_application.name, env, "test-arn", task_name +# ) +# +# # Assert "Unable to connect, execute command agent probably isn’t running yet" in output +# # If it doesn't bomb out with CreateTaskTimeoutError all is good + + @pytest.mark.parametrize( "addon_type", ["postgres", "redis", "opensearch"], @@ -420,7 +485,7 @@ def test_connect_to_addon_client_task_with_timeout_reached_throws_exception( task_name = mock_task_name(addon_type) ecs_client = mock_application.environments[env].session.client("ecs") mock_subprocess = Mock() - get_ecs_task_arns = Mock(return_value=[]) + addon_client_is_running = Mock(return_value=False) with pytest.raises(CreateTaskTimeoutError): connect_to_addon_client_task( @@ -430,9 +495,95 @@ def test_connect_to_addon_client_task_with_timeout_reached_throws_exception( env, "test-arn", task_name, - get_ecs_task_arns_fn=get_ecs_task_arns, + addon_client_is_running_fn=addon_client_is_running, ) - get_ecs_task_arns.assert_called_with(ecs_client, "test-arn", task_name) - assert get_ecs_task_arns.call_count == 15 + addon_client_is_running.assert_called_with(ecs_client, "test-arn", task_name) + assert addon_client_is_running.call_count == 15 mock_subprocess.call.assert_not_called() + + +@mock_aws +@pytest.mark.parametrize( + "addon_name, expected_type", + [ + ("custom-name-postgres", "postgres"), + ("custom-name-redis", "redis"), + ("custom-name-opensearch", "opensearch"), + ], +) +def test_get_addon_type(addon_name, expected_type, mock_application): + """Test that get_addon_type returns the expected addon type.""" + + ssm_client = mock_application.environments[env].session.client("ssm") + + add_addon_config_parameter() + addon_type = get_addon_type(ssm_client, mock_application.name, env, addon_name) + + assert addon_type == expected_type + + +@mock_aws +def test_get_addon_type_with_not_found_throws_exception(mock_application): + """Test that get_addon_type raises the expected error when the addon is not + found in the config file.""" + + add_addon_config_parameter({"different-name": {"type": "redis"}}) + ssm_client = mock_application.environments[env].session.client("ssm") + + with pytest.raises(AddonNotFoundError): + get_addon_type(ssm_client, mock_application.name, env, "custom-name-postgres") + + +@mock_aws +def test_get_addon_type_with_parameter_not_found_throws_exception(mock_application): + """Test that get_addon_type raises the expected error when the addon config + parameter is not found.""" + + ssm_client = mock_application.environments[env].session.client("ssm") + + mock_ssm = boto3.client("ssm") + mock_ssm.put_parameter( + Name=f"/copilot/applications/test-application/environments/development/invalid-parameter", + Type="String", + Value=json.dumps({"custom-name-postgres": {"type": "postgres"}}), + ) + + with pytest.raises(ParameterNotFoundError): + get_addon_type(ssm_client, mock_application.name, env, "custom-name-postgres") + + +@mock_aws +def test_get_addon_type_with_invalid_type_throws_exception(mock_application): + """Test that get_addon_type raises the expected error when the config + contains an invalid addon type.""" + + add_addon_config_parameter(param_value={"invalid-extension": {"type": "invalid"}}) + ssm_client = mock_application.environments[env].session.client("ssm") + + with pytest.raises(InvalidAddonTypeError): + get_addon_type(ssm_client, mock_application.name, env, "invalid-extension") + + +@mock_aws +def test_get_addon_type_with_blank_type_throws_exception(mock_application): + """Test that get_addon_type raises the expected error when the config + contains an empty addon type.""" + + add_addon_config_parameter(param_value={"blank-extension": {}}) + ssm_client = mock_application.environments[env].session.client("ssm") + + with pytest.raises(AddonTypeMissingFromConfigError): + get_addon_type(ssm_client, mock_application.name, env, "blank-extension") + + +@mock_aws +def test_get_addon_type_with_unspecified_type_throws_exception(mock_application): + """Test that get_addon_type raises the expected error when the config + contains an empty addon type.""" + + add_addon_config_parameter(param_value={"addon-type-unspecified": {"type": None}}) + ssm_client = mock_application.environments[env].session.client("ssm") + + with pytest.raises(AddonTypeMissingFromConfigError): + get_addon_type(ssm_client, mock_application.name, env, "addon-type-unspecified") diff --git a/tests/platform_helper/providers/test_ecs.py b/tests/platform_helper/providers/test_ecs.py index 1bb15b022..2d7c96a55 100644 --- a/tests/platform_helper/providers/test_ecs.py +++ b/tests/platform_helper/providers/test_ecs.py @@ -6,66 +6,55 @@ from dbt_platform_helper.exceptions import ECSAgentNotRunning from dbt_platform_helper.exceptions import NoClusterError -from dbt_platform_helper.providers.ecs import ECS +from dbt_platform_helper.providers.ecs import ecs_exec_is_available +from dbt_platform_helper.providers.ecs import get_cluster_arn +from dbt_platform_helper.providers.ecs import get_ecs_task_arns +from dbt_platform_helper.providers.ecs import get_or_create_task_name from tests.platform_helper.conftest import mock_parameter_name from tests.platform_helper.conftest import mock_task_name @mock_aws def test_get_cluster_arn(mocked_cluster, mock_application): - ecs_client = mock_application.environments["development"].session.client("ecs") - ssm_client = mock_application.environments["development"].session.client("ssm") - application_name = mock_application.name - env = "development" - ecs_manager = ECS(ecs_client, ssm_client, application_name, env) - - cluster_arn = ecs_manager.get_cluster_arn() - - assert cluster_arn == mocked_cluster["cluster"]["clusterArn"] + assert ( + get_cluster_arn( + mock_application.environments["development"].session.client("ecs"), + mock_application.name, + "development", + ) + == mocked_cluster["cluster"]["clusterArn"] + ) @mock_aws def test_get_cluster_arn_with_no_cluster_raises_error(mock_application): - ecs_client = mock_application.environments["development"].session.client("ecs") - ssm_client = mock_application.environments["development"].session.client("ssm") - application_name = mock_application.name - env = "does-not-exist" - - ecs_manager = ECS(ecs_client, ssm_client, application_name, env) - with pytest.raises(NoClusterError): - ecs_manager.get_cluster_arn() + get_cluster_arn( + mock_application.environments["development"].session.client("ecs"), + mock_application.name, + "does-not-exist", + ) -@mock_aws def test_get_ecs_task_arns_with_running_task( mock_cluster_client_task, mocked_cluster, mock_application ): + addon_type = "redis" mock_cluster_client_task(addon_type) mocked_cluster_arn = mocked_cluster["cluster"]["clusterArn"] ecs_client = mock_application.environments["development"].session.client("ecs") - ecs_manager = ECS( - ecs_client, - mock_application.environments["development"].session.client("ssm"), - mock_application.name, - "development", - ) - assert ecs_manager.get_ecs_task_arns(mocked_cluster_arn, mock_task_name(addon_type)) + + assert get_ecs_task_arns(ecs_client, mocked_cluster_arn, mock_task_name(addon_type)) -@mock_aws def test_get_ecs_task_arns_with_no_running_task(mocked_cluster, mock_application): + addon_type = "opensearch" mocked_cluster_arn = mocked_cluster["cluster"]["clusterArn"] ecs_client = mock_application.environments["development"].session.client("ecs") - ecs_manager = ECS( - ecs_client, - mock_application.environments["development"].session.client("ssm"), - mock_application.name, - "development", - ) - assert len(ecs_manager.get_ecs_task_arns(mocked_cluster_arn, mock_task_name(addon_type))) == 0 + + assert len(get_ecs_task_arns(ecs_client, mocked_cluster_arn, mock_task_name(addon_type))) is 0 @mock_aws @@ -76,6 +65,8 @@ def test_get_ecs_task_arns_does_not_return_arns_from_other_tasks(mock_applicatio ec2 = boto3.resource("ec2") vpc = ec2.create_vpc(CidrBlock="10.0.0.0/16") subnet = ec2.create_subnet(VpcId=vpc.id, CidrBlock="10.0.0.0/18") + + # create unrelated task mocked_task_definition_arn = ecs_client.register_task_definition( family=f"other-task", requiresCompatibilities=["FARGATE"], @@ -100,51 +91,43 @@ def test_get_ecs_task_arns_does_not_return_arns_from_other_tasks(mock_applicatio } }, ) - ecs_manager = ECS( - ecs_client, - mock_application.environments["development"].session.client("ssm"), - mock_application.name, - "development", - ) - assert len(ecs_manager.get_ecs_task_arns(cluster_arn, task_name)) == 0 + + assert len(get_ecs_task_arns(ecs_client, cluster_arn, task_name)) is 0 -@mock_aws def test_ecs_exec_is_available(mock_cluster_client_task, mocked_cluster, mock_application): + + # use mock ecs_client as describe_tasks is overriden mocked_ecs_client = mock_cluster_client_task("postgres") mocked_cluster_arn = mocked_cluster["cluster"]["clusterArn"] - ecs_manager = ECS( + + ecs_exec_is_available( mocked_ecs_client, - mock_application.environments["development"].session.client("ssm"), - mock_application.name, - "development", - ) - ecs_manager.ecs_exec_is_available( - mocked_cluster_arn, ["arn:aws:ecs:eu-west-2:12345678:task/does-not-matter/1234qwer"] + mocked_cluster_arn, + ["arn:aws:ecs:eu-west-2:12345678:task/does-not-matter/1234qwer"], ) @patch("time.sleep", return_value=None) -@mock_aws -def test_ecs_exec_is_available_with_exec_not_running_raises_exception( +def test_test_ecs_exec_is_available_with_exec_not_running_raises_exception( sleep, mock_cluster_client_task, mocked_cluster, mock_application ): + + # use mock ecs_client as describe_tasks is overriden mocked_ecs_client = mock_cluster_client_task("postgres", "PENDING") mocked_cluster_arn = mocked_cluster["cluster"]["clusterArn"] - ecs_manager = ECS( - mocked_ecs_client, - mock_application.environments["development"].session.client("ssm"), - mock_application.name, - "development", - ) + with pytest.raises(ECSAgentNotRunning): - ecs_manager.ecs_exec_is_available( - mocked_cluster_arn, ["arn:aws:ecs:eu-west-2:12345678:task/does-not-matter/1234qwer"] + ecs_exec_is_available( + mocked_ecs_client, + mocked_cluster_arn, + ["arn:aws:ecs:eu-west-2:12345678:task/does-not-matter/1234qwer"], ) @mock_aws def test_get_or_create_task_name(mock_application): + addon_name = "app-postgres" parameter_name = mock_parameter_name(mock_application, "postgres", addon_name) mock_application.environments["development"].session.client("ssm") @@ -154,24 +137,23 @@ def test_get_or_create_task_name(mock_application): Type="String", Value=mock_task_name(addon_name), ) - ecs_manager = ECS( - mock_application.environments["development"].session.client("ecs"), - mock_ssm, - mock_application.name, - "development", + + task_name = get_or_create_task_name( + mock_ssm, mock_application.name, "development", addon_name, parameter_name ) - task_name = ecs_manager.get_or_create_task_name(addon_name, parameter_name) + assert task_name == mock_task_name(addon_name) @mock_aws def test_get_or_create_task_name_appends_random_id(mock_application): + addon_name = "app-postgres" ssm_client = mock_application.environments["development"].session.client("ssm") parameter_name = mock_parameter_name(mock_application, "postgres", addon_name) - ecs_manager = ECS(ssm_client, ssm_client, mock_application.name, "development") - - task_name = ecs_manager.get_or_create_task_name(addon_name, parameter_name) + task_name = get_or_create_task_name( + ssm_client, mock_application.name, "development", addon_name, parameter_name + ) random_id = task_name.rsplit("-", 1)[1] assert task_name.rsplit("-", 1)[0] == mock_task_name("app-postgres").rsplit("-", 1)[0] diff --git a/tests/platform_helper/providers/test_secrets.py b/tests/platform_helper/providers/test_secrets.py index 43f716398..cea50b10b 100644 --- a/tests/platform_helper/providers/test_secrets.py +++ b/tests/platform_helper/providers/test_secrets.py @@ -1,38 +1,14 @@ -import json - import boto3 import pytest from moto import mock_aws -from dbt_platform_helper.exceptions import AddonNotFoundError -from dbt_platform_helper.exceptions import AddonTypeMissingFromConfigError -from dbt_platform_helper.exceptions import InvalidAddonTypeError -from dbt_platform_helper.exceptions import ParameterNotFoundError -from dbt_platform_helper.exceptions import SecretNotFoundError -from dbt_platform_helper.providers.secrets import Secrets -from tests.platform_helper.conftest import add_addon_config_parameter -from tests.platform_helper.conftest import mock_parameter_name - -env = "development" - - -@pytest.mark.parametrize( - "test_string", - [ - ("app-rds-postgres", "APP_RDS_POSTGRES"), - ("APP-POSTGRES", "APP_POSTGRES"), - ("APP-OpenSearch", "APP_OPENSEARCH"), - ], +from dbt_platform_helper.providers.copilot import ( + get_postgres_connection_data_updated_with_master_secret, ) -def test_normalise_secret_name(test_string, mock_application): - """Test that given an addon name, normalise_secret_name produces the - expected result.""" +from dbt_platform_helper.providers.secrets import SecretNotFoundError +from dbt_platform_helper.providers.secrets import get_connection_secret_arn - ssm_client = mock_application.environments[env].session.client("ssm") - secrets_client = mock_application.environments[env].session.client("secretsmanager") - secrets_manager = Secrets(ssm_client, secrets_client, mock_application.name, env) - - assert secrets_manager._normalise_secret_name(test_string[0]) == test_string[1] +env = "development" @mock_aws @@ -50,9 +26,7 @@ def test_get_connection_secret_arn_from_secrets_manager(mock_application): ssm_client = mock_application.environments[env].session.client("ssm") secrets_client = mock_application.environments[env].session.client("secretsmanager") - secrets_manager = Secrets(ssm_client, secrets_client, mock_application.name, env) - - arn = secrets_manager.get_connection_secret_arn(secret_name) + arn = get_connection_secret_arn(ssm_client, secrets_client, secret_name) assert arn.startswith( "arn:aws:secretsmanager:eu-west-2:123456789012:secret:" @@ -75,9 +49,7 @@ def test_get_connection_secret_arn_from_parameter_store(mock_application): Type="SecureString", ) - secrets_manager = Secrets(ssm_client, secrets_client, mock_application.name, env) - - arn = secrets_manager.get_connection_secret_arn(secret_name) + arn = get_connection_secret_arn(ssm_client, secrets_client, secret_name) assert ( arn @@ -93,19 +65,18 @@ def test_get_connection_secret_arn_when_secret_does_not_exist(mock_application): ssm_client = mock_application.environments[env].session.client("ssm") secrets_client = mock_application.environments[env].session.client("secretsmanager") - secrets_manager = Secrets(ssm_client, secrets_client, mock_application.name, env) - with pytest.raises(SecretNotFoundError) as ex: - secrets_manager.get_connection_secret_arn("POSTGRES") + with pytest.raises(SecretNotFoundError): + get_connection_secret_arn(ssm_client, secrets_client, "POSTGRES") @mock_aws -def test_update_postgres_parameter_with_master_secret(mock_application): +def test_update_postgres_parameter_with_master_secret(): session = boto3.session.Session() parameter_name = "test-parameter" ssm_client = session.client("ssm") - session.client("secretsmanager") + secretsmanager_client = session.client("secretsmanager") ssm_client.put_parameter( Name=parameter_name, Value='{"username": "read-only-user", "password": ">G12345", "host": "test.com", "port": 5432}', @@ -115,14 +86,8 @@ def test_update_postgres_parameter_with_master_secret(mock_application): Name="master-secret", SecretString='{"username": "postgres", "password": ">G6789"}' )["ARN"] - ssm_client = mock_application.environments[env].session.client("ssm") - secrets_client = mock_application.environments[env].session.client("secretsmanager") - secrets_manager = Secrets(ssm_client, secrets_client, mock_application.name, env) - - updated_parameter_value = ( - secrets_manager.get_postgres_connection_data_updated_with_master_secret( - parameter_name, secret_arn - ) + updated_parameter_value = get_postgres_connection_data_updated_with_master_secret( + ssm_client, secretsmanager_client, parameter_name, secret_arn ) assert updated_parameter_value == { @@ -131,131 +96,3 @@ def test_update_postgres_parameter_with_master_secret(mock_application): "host": "test.com", "port": 5432, } - - -@mock_aws -@pytest.mark.parametrize( - "addon_name, expected_type", - [ - ("custom-name-postgres", "postgres"), - ("custom-name-redis", "redis"), - ("custom-name-opensearch", "opensearch"), - ], -) -def test_get_addon_type(addon_name, expected_type, mock_application): - """Test that get_addon_type returns the expected addon type.""" - - ssm_client = mock_application.environments[env].session.client("ssm") - secrets_client = mock_application.environments[env].session.client("secretsmanager") - secrets_manager = Secrets(ssm_client, secrets_client, mock_application.name, env) - - add_addon_config_parameter() - addon_type = secrets_manager.get_addon_type(addon_name) - - assert addon_type == expected_type - - -@mock_aws -def test_get_addon_type_with_not_found_throws_exception(mock_application): - """Test that get_addon_type raises the expected error when the addon is not - found in the config file.""" - - add_addon_config_parameter({"different-name": {"type": "redis"}}) - ssm_client = mock_application.environments[env].session.client("ssm") - secrets_client = mock_application.environments[env].session.client("secretsmanager") - secrets_manager = Secrets(ssm_client, secrets_client, mock_application.name, env) - - with pytest.raises(AddonNotFoundError): - secrets_manager.get_addon_type("custom-name-postgres") - - -@mock_aws -def test_get_addon_type_with_parameter_not_found_throws_exception(mock_application): - """Test that get_addon_type raises the expected error when the addon config - parameter is not found.""" - - ssm_client = mock_application.environments[env].session.client("ssm") - secrets_client = mock_application.environments[env].session.client("secretsmanager") - secrets_manager = Secrets(ssm_client, secrets_client, mock_application.name, env) - - mock_ssm = boto3.client("ssm") - mock_ssm.put_parameter( - Name=f"/copilot/applications/test-application/environments/development/invalid-parameter", - Type="String", - Value=json.dumps({"custom-name-postgres": {"type": "postgres"}}), - ) - - with pytest.raises(ParameterNotFoundError): - secrets_manager.get_addon_type("custom-name-postgres") - - -@mock_aws -def test_get_addon_type_with_invalid_type_throws_exception(mock_application): - """Test that get_addon_type raises the expected error when the config - contains an invalid addon type.""" - - add_addon_config_parameter(param_value={"invalid-extension": {"type": "invalid"}}) - ssm_client = mock_application.environments[env].session.client("ssm") - secrets_client = mock_application.environments[env].session.client("secretsmanager") - secrets_manager = Secrets(ssm_client, secrets_client, mock_application.name, env) - - with pytest.raises(InvalidAddonTypeError): - secrets_manager.get_addon_type("invalid-extension") - - -@mock_aws -def test_get_addon_type_with_blank_type_throws_exception(mock_application): - """Test that get_addon_type raises the expected error when the config - contains an empty addon type.""" - - add_addon_config_parameter(param_value={"blank-extension": {}}) - ssm_client = mock_application.environments[env].session.client("ssm") - secrets_client = mock_application.environments[env].session.client("secretsmanager") - secrets_manager = Secrets(ssm_client, secrets_client, mock_application.name, env) - - with pytest.raises(AddonTypeMissingFromConfigError): - secrets_manager.get_addon_type("blank-extension") - - -@mock_aws -def test_get_addon_type_with_unspecified_type_throws_exception(mock_application): - """Test that get_addon_type raises the expected error when the config - contains an empty addon type.""" - - add_addon_config_parameter(param_value={"addon-type-unspecified": {"type": None}}) - ssm_client = mock_application.environments[env].session.client("ssm") - secrets_client = mock_application.environments[env].session.client("secretsmanager") - secrets_manager = Secrets(ssm_client, secrets_client, mock_application.name, env) - - with pytest.raises(AddonTypeMissingFromConfigError): - secrets_manager.get_addon_type("addon-type-unspecified") - - -@mock_aws -@pytest.mark.parametrize( - "access", - [ - "read", - "write", - "admin", - ], -) -@pytest.mark.parametrize( - "addon_type, addon_name", - [ - ("postgres", "custom-name-postgres"), - ("postgres", "custom-name-rds-postgres"), - ("redis", "custom-name-redis"), - ("opensearch", "custom-name-opensearch"), - ("s3", "custon-name-s3"), - ], -) -def test_get_parameter_name(access, addon_type, addon_name, mock_application): - """Test that get_parameter_name builds the correct parameter name given the - addon_name, addon_type and permission.""" - - ssm_client = mock_application.environments[env].session.client("ssm") - secrets_client = mock_application.environments[env].session.client("secretsmanager") - secrets_manager = Secrets(ssm_client, secrets_client, mock_application.name, env) - parameter_name = secrets_manager.get_parameter_name(addon_type, addon_name, access) - assert parameter_name == mock_parameter_name(mock_application, addon_type, addon_name, access) diff --git a/tests/platform_helper/test_command_codebase.py b/tests/platform_helper/test_command_codebase.py index 894b746f7..137b4c038 100644 --- a/tests/platform_helper/test_command_codebase.py +++ b/tests/platform_helper/test_command_codebase.py @@ -45,6 +45,8 @@ def test_aborts_when_not_in_a_codebase_repository(self, mock_click, mock_codebas result = CliRunner().invoke(prepare_command) + expected_message = "You are in the deploy repository; make sure you are in the application codebase repository." + mock_click.assert_called_with(expected_message, fg="red") assert result.exit_code == 1 @@ -70,6 +72,8 @@ def test_codebase_build_does_not_trigger_build_without_an_application( "ab1c23d", ], ) + expected_message = f"""The account "foo" does not contain the application "not-an-application"; ensure you have set the environment variable "AWS_PROFILE" correctly.""" + mock_click.assert_called_with(expected_message, fg="red") assert result.exit_code == 1 @@ -97,7 +101,8 @@ def test_codebase_build_aborts_with_a_nonexistent_commit_hash( mock_codebase_object_instance.build.assert_called_once_with( "test-application", "application", "nonexistent-commit-hash" ) - + expected_message = f"""The commit hash "nonexistent-commit-hash" either does not exist or you need to run `git fetch`.""" + mock_click.assert_called_with(expected_message, fg="red") assert result.exit_code == 1 @@ -151,6 +156,8 @@ def test_codebase_deploy_aborts_with_a_nonexistent_image_repository_or_image_tag mock_codebase_object_instance.deploy.assert_called_once_with( "test-application", "development", "application", "nonexistent-commit-hash" ) + expected_message = f"""The commit hash "nonexistent-commit-hash" has not been built into an image, try the `platform-helper codebase build` command first.""" + mock_click.assert_called_with(expected_message, fg="red") assert result.exit_code == 1 @patch("dbt_platform_helper.commands.codebase.Codebase") @@ -179,6 +186,8 @@ def test_codebase_deploy_does_not_trigger_build_without_an_application( mock_codebase_object_instance.deploy.assert_called_once_with( "not-an-application", "dev", "application", "ab1c23d" ) + expected_message = f"""The account "foo" does not contain the application "not-an-application"; ensure you have set the environment variable "AWS_PROFILE" correctly.""" + mock_click.assert_called_with(expected_message, fg="red") assert result.exit_code == 1 @patch("dbt_platform_helper.commands.codebase.Codebase") @@ -207,6 +216,8 @@ def test_codebase_deploy_does_not_trigger_build_with_missing_environment( mock_codebase_object_instance.deploy.assert_called_once_with( "test-application", "not-an-environment", "application", "ab1c23d" ) + expected_message = f"""The environment "not-an-environment" either does not exist or has not been deployed.""" + mock_click.assert_called_with(expected_message, fg="red") assert result.exit_code == 1 @patch("dbt_platform_helper.commands.codebase.Codebase") @@ -235,6 +246,10 @@ def test_codebase_deploy_does_not_trigger_build_with_missing_codebase( mock_codebase_object_instance.deploy.assert_called_once_with( "test-application", "test-environment", "not-a-codebase", "ab1c23d" ) + expected_message = ( + f"""The codebase "not-a-codebase" either does not exist or has not been deployed.""" + ) + mock_click.assert_called_with(expected_message, fg="red") assert result.exit_code == 1 @@ -258,6 +273,8 @@ def test_list_aborts_when_application_has_no_codebases(self, mock_click, mock_co result = CliRunner().invoke(list, ["--app", "test-application", "--with-images"]) + expected_message = f"""No codebases found for application "test-application""" + mock_click.assert_called_with(expected_message, fg="red") assert result.exit_code == 1 @patch("dbt_platform_helper.commands.codebase.Codebase") @@ -269,6 +286,9 @@ def test_aborts_when_application_does_not_exist(self, mock_click, mock_codebase_ result = CliRunner().invoke(list, ["--app", "test-application", "--with-images"]) + app = "test-application" + expected_message = f"""The account "{os.environ.get("AWS_PROFILE")}" does not contain the application "{app}"; ensure you have set the environment variable "AWS_PROFILE" correctly.""" + mock_click.assert_called_with(expected_message, fg="red") assert result.exit_code == 1 diff --git a/tests/platform_helper/test_command_conduit.py b/tests/platform_helper/test_command_conduit.py index 880486335..c5c9990b5 100644 --- a/tests/platform_helper/test_command_conduit.py +++ b/tests/platform_helper/test_command_conduit.py @@ -5,7 +5,13 @@ from click.testing import CliRunner from dbt_platform_helper.commands.conduit import conduit -from dbt_platform_helper.exceptions import SecretNotFoundError +from dbt_platform_helper.exceptions import AddonNotFoundError +from dbt_platform_helper.exceptions import AddonTypeMissingFromConfigError +from dbt_platform_helper.exceptions import InvalidAddonTypeError +from dbt_platform_helper.exceptions import NoClusterError +from dbt_platform_helper.exceptions import ParameterNotFoundError +from dbt_platform_helper.providers.copilot import CreateTaskTimeoutError +from dbt_platform_helper.providers.secrets import SecretNotFoundError @pytest.mark.parametrize( @@ -46,6 +52,42 @@ def test_start_conduit(mock_application, mock_conduit_object, addon_name, valida mock_conduit_instance.start.assert_called_with("development", addon_name, "read") +@pytest.mark.parametrize( + "exception_type,exception_input_params,expected_message", + [ + ( + SecretNotFoundError, + {}, + """No secret called "" for "test-application" in "development" environment.""", + ), + (AddonNotFoundError, {}, """Addon "important-db" does not exist."""), + ( + CreateTaskTimeoutError, + {}, + """Client (important-db) ECS task has failed to start for "test-application" in "development" environment.""", + ), + ( + NoClusterError, + {}, + """No ECS cluster found for "test-application" in "development" environment.""", + ), + ( + ParameterNotFoundError, + {}, + """No parameter called "/copilot/applications/test-application/environments/development/addons". Try deploying the "test-application" "development" environment.""", + ), + ( + InvalidAddonTypeError, + {"addon_type": "fake-postgres"}, + """Addon type "fake-postgres" is not supported, we support: opensearch, postgres, redis.""", + ), + ( + AddonTypeMissingFromConfigError, + {}, + """The configuration for the addon important-db, is missconfigured and missing the addon type.""", + ), + ], +) @patch("dbt_platform_helper.commands.conduit.Conduit") @patch( "dbt_platform_helper.utils.versioning.running_as_installed_package", @@ -53,15 +95,20 @@ def test_start_conduit(mock_application, mock_conduit_object, addon_name, valida ) @patch("dbt_platform_helper.commands.conduit.load_application") @patch("click.secho") -def test_start_conduit_with_exception_raised_exit_1( +def test_start_conduit_exception_is_raised( mock_click, mock_application, mock_conduit_object, validate_version, + exception_type, + exception_input_params, + expected_message, ): + """Test that given an app, env and addon name strings, the conduit command + calls start_conduit with app, env, addon type and addon name.""" mock_conduit_instance = mock_conduit_object.return_value - mock_conduit_instance.start.side_effect = SecretNotFoundError(secret_name="test-secret") + mock_conduit_instance.start.side_effect = exception_type(**exception_input_params) addon_name = "important-db" result = CliRunner().invoke( conduit, @@ -74,7 +121,7 @@ def test_start_conduit_with_exception_raised_exit_1( ], ) - mock_click.assert_called_with("""No secret called "test-secret".""", fg="red") + mock_click.assert_called_with(expected_message, fg="red") assert result.exit_code == 1 diff --git a/tests/platform_helper/test_exceptions.py b/tests/platform_helper/test_exceptions.py deleted file mode 100644 index 7c7d7a8d3..000000000 --- a/tests/platform_helper/test_exceptions.py +++ /dev/null @@ -1,110 +0,0 @@ -import os - -import pytest - -from dbt_platform_helper.exceptions import AddonNotFoundError -from dbt_platform_helper.exceptions import AddonTypeMissingFromConfigError -from dbt_platform_helper.exceptions import ApplicationDeploymentNotTriggered -from dbt_platform_helper.exceptions import ApplicationEnvironmentNotFoundError -from dbt_platform_helper.exceptions import ApplicationNotFoundError -from dbt_platform_helper.exceptions import CopilotCodebaseNotFoundError -from dbt_platform_helper.exceptions import CreateTaskTimeoutError -from dbt_platform_helper.exceptions import ECSAgentNotRunning -from dbt_platform_helper.exceptions import ImageNotFoundError -from dbt_platform_helper.exceptions import InvalidAddonTypeError -from dbt_platform_helper.exceptions import NoClusterError -from dbt_platform_helper.exceptions import NoCopilotCodebasesFoundError -from dbt_platform_helper.exceptions import NotInCodeBaseRepositoryError -from dbt_platform_helper.exceptions import ParameterNotFoundError -from dbt_platform_helper.exceptions import SecretNotFoundError - - -@pytest.mark.parametrize( - "exception, exception_params, expected_message", - [ - ( - AddonNotFoundError, - {"addon_name": "test-addon"}, - """Addon "test-addon" does not exist.""", - ), - ( - AddonTypeMissingFromConfigError, - {"addon_name": "test-addon"}, - """The configuration for the addon test-addon, is misconfigured and missing the addon type.""", - ), - ( - ApplicationDeploymentNotTriggered, - {"codebase": "test-codebase"}, - """Your deployment for test-codebase was not triggered.""", - ), - ( - ApplicationEnvironmentNotFoundError, - {"environment": "development"}, - """The environment "development" either does not exist or has not been deployed.""", - ), - ( - ApplicationNotFoundError, - {"application_name": "test-application"}, - """The account "foo" does not contain the application "test-application"; ensure you have set the environment variable "AWS_PROFILE" correctly.""", - ), - ( - CopilotCodebaseNotFoundError, - {"codebase": "test-codebase-exists"}, - """The codebase "test-codebase-exists" either does not exist or has not been deployed.""", - ), - ( - CreateTaskTimeoutError, - { - "addon_name": "test-addon", - "application_name": "test-application", - "environment": "environment", - }, - """Client (test-addon) ECS task has failed to start for "test-application" in "environment" environment.""", - ), - ( - InvalidAddonTypeError, - {"addon_type": "test-addon-type"}, - """Addon type "test-addon-type" is not supported, we support: opensearch, postgres, redis.""", - ), - ( - ImageNotFoundError, - {"commit": "test-commit-hash"}, - """The commit hash "test-commit-hash" has not been built into an image, try the `platform-helper codebase build` command first.""", - ), - ( - NoCopilotCodebasesFoundError, - {"application_name": "test-application"}, - """No codebases found for application "test-application".""", - ), - ( - NoClusterError, - {"application_name": "test-application", "environment": "environment"}, - """No ECS cluster found for "test-application" in "environment" environment.""", - ), - ( - NotInCodeBaseRepositoryError, - {}, - """You are in the deploy repository; make sure you are in the application codebase repository.""", - ), - ( - ParameterNotFoundError, - {"application_name": "test-application", "environment": "environment"}, - """No parameter called "/copilot/applications/test-application/environments/environment/addons". Try deploying the "test-application" "environment" environment.""", - ), - ( - SecretNotFoundError, - {"secret_name": "test-secret"}, - """No secret called "test-secret".""", - ), - ( - ECSAgentNotRunning, - {}, - """ECS exec agent never reached "RUNNING" status""", - ), - ], -) -def test_exception_message(exception, exception_params, expected_message): - os.environ["AWS_PROFILE"] = "foo" - - exception = exception(**exception_params) - assert str(exception) == expected_message From 7d35599533b55f15fb08801c50ce538a8a32b847 Mon Sep 17 00:00:00 2001 From: John Stainsby Date: Tue, 3 Dec 2024 16:09:05 +0000 Subject: [PATCH 22/38] feat: DBTP-1299 - Cross account database copy (#657) --- dbt_platform_helper/domain/database_copy.py | 23 ++- dbt_platform_helper/exceptions.py | 4 + dbt_platform_helper/utils/aws.py | 22 +++ dbt_platform_helper/utils/validation.py | 41 ++++- images/tools/database-copy/entrypoint.sh | 15 +- tests/platform_helper/conftest.py | 11 +- .../domain/test_database_copy.py | 58 ++++-- tests/platform_helper/utils/test_aws.py | 19 ++ .../platform_helper/utils/test_validation.py | 169 ++++++++++++++---- 9 files changed, 283 insertions(+), 79 deletions(-) diff --git a/dbt_platform_helper/domain/database_copy.py b/dbt_platform_helper/domain/database_copy.py index a84cbc806..3aef077f5 100644 --- a/dbt_platform_helper/domain/database_copy.py +++ b/dbt_platform_helper/domain/database_copy.py @@ -15,6 +15,8 @@ from dbt_platform_helper.utils.aws import Vpc from dbt_platform_helper.utils.aws import get_connection_string from dbt_platform_helper.utils.aws import get_vpc_info_by_name +from dbt_platform_helper.utils.aws import wait_for_log_group_to_exist +from dbt_platform_helper.utils.files import apply_environment_defaults from dbt_platform_helper.utils.messages import abort_with_error from dbt_platform_helper.utils.validation import load_and_validate_platform_config @@ -59,7 +61,7 @@ def __init__( except ApplicationNotFoundError: abort_fn(f"No such application '{app}'.") - def _execute_operation(self, is_dump: bool, env: str, vpc_name: str): + def _execute_operation(self, is_dump: bool, env: str, vpc_name: str, to_env: str): vpc_name = self.enrich_vpc_name(env, vpc_name) environments = self.application.environments @@ -87,7 +89,7 @@ def _execute_operation(self, is_dump: bool, env: str, vpc_name: str): try: task_arn = self.run_database_copy_task( - env_session, env, vpc_config, is_dump, db_connection_string + env_session, env, vpc_config, is_dump, db_connection_string, to_env ) except Exception as exc: self.abort_fn(f"{exc} (Account id: {self.account_id(env)})") @@ -111,7 +113,8 @@ def enrich_vpc_name(self, env, vpc_name): "You must either be in a deploy repo, or provide the vpc name option." ) config = load_and_validate_platform_config() - vpc_name = config.get("environments", {}).get(env, {}).get("vpc") + env_config = apply_environment_defaults(config)["environments"] + vpc_name = env_config.get(env, {}).get("vpc") return vpc_name def run_database_copy_task( @@ -121,12 +124,14 @@ def run_database_copy_task( vpc_config: Vpc, is_dump: bool, db_connection_string: str, + to_env: str, ) -> str: client = session.client("ecs") action = "dump" if is_dump else "load" env_vars = [ {"name": "DATA_COPY_OPERATION", "value": action.upper()}, {"name": "DB_CONNECTION_STRING", "value": db_connection_string}, + {"name": "TO_ENVIRONMENT", "value": to_env}, ] if not is_dump: env_vars.append({"name": "ECS_CLUSTER", "value": f"{self.app}-{env}"}) @@ -156,12 +161,12 @@ def run_database_copy_task( return response.get("tasks", [{}])[0].get("taskArn") - def dump(self, env: str, vpc_name: str): - self._execute_operation(True, env, vpc_name) + def dump(self, env: str, vpc_name: str, to_env: str): + self._execute_operation(True, env, vpc_name, to_env) def load(self, env: str, vpc_name: str): if self.is_confirmed_ready_to_load(env): - self._execute_operation(False, env, vpc_name) + self._execute_operation(False, env, vpc_name, to_env=env) def copy( self, @@ -176,7 +181,7 @@ def copy( to_vpc = self.enrich_vpc_name(to_env, to_vpc) if not no_maintenance_page: self.maintenance_page_provider.activate(self.app, to_env, services, template, to_vpc) - self.dump(from_env, from_vpc) + self.dump(from_env, from_vpc, to_env) self.load(to_env, to_vpc) if not no_maintenance_page: self.maintenance_page_provider.deactivate(self.app, to_env) @@ -196,7 +201,9 @@ def tail_logs(self, is_dump: bool, env: str): log_group_arn = f"arn:aws:logs:eu-west-2:{self.account_id(env)}:log-group:{log_group_name}" self.echo_fn(f"Tailing {log_group_name} logs", fg="yellow") session = self.application.environments[env].session - response = session.client("logs").start_live_tail(logGroupIdentifiers=[log_group_arn]) + log_client = session.client("logs") + wait_for_log_group_to_exist(log_client, log_group_name) + response = log_client.start_live_tail(logGroupIdentifiers=[log_group_arn]) stopped = False for data in response["responseStream"]: diff --git a/dbt_platform_helper/exceptions.py b/dbt_platform_helper/exceptions.py index 2917d1059..edf4f057f 100644 --- a/dbt_platform_helper/exceptions.py +++ b/dbt_platform_helper/exceptions.py @@ -79,3 +79,7 @@ class SecretNotFoundError(AWSException): class ECSAgentNotRunning(AWSException): pass + + +class ResourceNotFoundException(AWSException): + pass diff --git a/dbt_platform_helper/utils/aws.py b/dbt_platform_helper/utils/aws.py index e59c2f511..ac3239a0e 100644 --- a/dbt_platform_helper/utils/aws.py +++ b/dbt_platform_helper/utils/aws.py @@ -1,5 +1,6 @@ import json import os +import time import urllib.parse from configparser import ConfigParser from pathlib import Path @@ -15,6 +16,7 @@ from dbt_platform_helper.exceptions import AWSException from dbt_platform_helper.exceptions import CopilotCodebaseNotFoundError from dbt_platform_helper.exceptions import ImageNotFoundError +from dbt_platform_helper.exceptions import ResourceNotFoundException from dbt_platform_helper.exceptions import ValidationException from dbt_platform_helper.utils.files import cache_refresh_required from dbt_platform_helper.utils.files import read_supported_versions_from_cache @@ -555,3 +557,23 @@ def list_latest_images(ecr_client, ecr_repository_name, codebase_repository, ech ) except StopIteration: continue + + +def wait_for_log_group_to_exist(log_client, log_group_name, attempts=30): + current_attempts = 0 + log_group_exists = False + + while not log_group_exists and current_attempts < attempts: + current_attempts += 1 + + log_group_response = log_client.describe_log_groups(logGroupNamePrefix=log_group_name) + log_groups = log_group_response.get("logGroups", []) + + for group in log_groups: + if group["logGroupName"] == log_group_name: + log_group_exists = True + + time.sleep(1) + + if not log_group_exists: + raise ResourceNotFoundException diff --git a/dbt_platform_helper/utils/validation.py b/dbt_platform_helper/utils/validation.py index 809c3acda..cbe06e3b1 100644 --- a/dbt_platform_helper/utils/validation.py +++ b/dbt_platform_helper/utils/validation.py @@ -210,7 +210,12 @@ def is_between(value): }, ) -DATABASE_COPY = {"from": ENV_NAME, "to": ENV_NAME} +DATABASE_COPY = { + "from": ENV_NAME, + "to": ENV_NAME, + Optional("from_account"): str, + Optional("to_account"): str, +} POSTGRES_DEFINITION = { "type": "postgres", @@ -638,6 +643,9 @@ def validate_database_copy_section(config): from_env = section["from"] to_env = section["to"] + from_account = _get_env_deploy_account_info(config, from_env, "id") + to_account = _get_env_deploy_account_info(config, to_env, "id") + if from_env == to_env: errors.append( f"database_copy 'to' and 'from' cannot be the same environment in extension '{extension_name}'." @@ -658,10 +666,33 @@ def validate_database_copy_section(config): f"database_copy 'to' parameter must be a valid environment ({all_envs_string}) but was '{to_env}' in extension '{extension_name}'." ) + if from_account != to_account: + if "from_account" not in section: + errors.append( + f"Environments '{from_env}' and '{to_env}' are in different AWS accounts. The 'from_account' parameter must be present." + ) + elif section["from_account"] != from_account: + errors.append( + f"Incorrect value for 'from_account' for environment '{from_env}'" + ) + + if "to_account" not in section: + errors.append( + f"Environments '{from_env}' and '{to_env}' are in different AWS accounts. The 'to_account' parameter must be present." + ) + elif section["to_account"] != to_account: + errors.append(f"Incorrect value for 'to_account' for environment '{to_env}'") + if errors: abort_with_error("\n".join(errors)) +def _get_env_deploy_account_info(config, env, key): + return ( + config.get("environments", {}).get(env, {}).get("accounts", {}).get("deploy", {}).get(key) + ) + + def _validate_environment_pipelines(config): bad_pipelines = {} for pipeline_name, pipeline in config.get("environment_pipelines", {}).items(): @@ -669,13 +700,7 @@ def _validate_environment_pipelines(config): pipeline_account = pipeline.get("account", None) if pipeline_account: for env in pipeline.get("environments", {}).keys(): - env_account = ( - config.get("environments", {}) - .get(env, {}) - .get("accounts", {}) - .get("deploy", {}) - .get("name") - ) + env_account = _get_env_deploy_account_info(config, env, "name") if not env_account == pipeline_account: bad_envs.append(env) if bad_envs: diff --git a/images/tools/database-copy/entrypoint.sh b/images/tools/database-copy/entrypoint.sh index 71b4bd6df..4dd56c8a3 100644 --- a/images/tools/database-copy/entrypoint.sh +++ b/images/tools/database-copy/entrypoint.sh @@ -2,13 +2,14 @@ clean_up(){ echo "Cleaning up dump file" - rm data_dump.sql + rm "data_dump_${TO_ENVIRONMENT}.sql" echo "Removing dump file from S3" - aws s3 rm s3://${S3_BUCKET_NAME}/data_dump.sql + aws s3 rm s3://${S3_BUCKET_NAME}/"data_dump_${TO_ENVIRONMENT}.sql" + exit_code=$? if [ ${exit_code} -ne 0 ] then echo "Aborting data load: Clean up failed" - exit $exit_code + exit $exit_code fi } @@ -26,7 +27,7 @@ handle_errors(){ if [ "${DATA_COPY_OPERATION:-DUMP}" != "LOAD" ] then echo "Starting data dump" - pg_dump --no-owner --no-acl --format c "${DB_CONNECTION_STRING}" > data_dump.sql + pg_dump --no-owner --no-acl --format c "${DB_CONNECTION_STRING}" > "data_dump_${TO_ENVIRONMENT}.sql" exit_code=$? if [ ${exit_code} -ne 0 ] @@ -35,7 +36,7 @@ then exit $exit_code fi - aws s3 cp data_dump.sql s3://${S3_BUCKET_NAME}/ + aws s3 cp "data_dump_${TO_ENVIRONMENT}.sql" s3://${S3_BUCKET_NAME}/ exit_code=$? if [ ${exit_code} -ne 0 ] @@ -49,7 +50,7 @@ else echo "Starting data load" echo "Copying data dump from S3" - aws s3 cp s3://${S3_BUCKET_NAME}/data_dump.sql data_dump.sql + aws s3 cp s3://${S3_BUCKET_NAME}/"data_dump_${TO_ENVIRONMENT}.sql" "data_dump_${TO_ENVIRONMENT}.sql" handle_errors $? "Copy failed" @@ -81,7 +82,7 @@ else handle_errors $? "Clear down failed" echo "Restoring data from dump file" - pg_restore --format c --dbname "${DB_CONNECTION_STRING}" data_dump.sql + pg_restore --format c --dbname "${DB_CONNECTION_STRING}" "data_dump_${TO_ENVIRONMENT}.sql" handle_errors $? "Restore failed" for service in ${SERVICES} diff --git a/tests/platform_helper/conftest.py b/tests/platform_helper/conftest.py index 783fb7dc1..95a0d23b6 100644 --- a/tests/platform_helper/conftest.py +++ b/tests/platform_helper/conftest.py @@ -430,6 +430,15 @@ def valid_platform_config(): versions: terraform-platform-modules: 1.2.3 staging: + hotfix: + accounts: + deploy: + name: "prod-acc" + id: "9999999999" + dns: + name: "non-prod-dns-acc" + id: "6677889900" + vpc: hotfix-vpc prod: accounts: deploy: @@ -466,7 +475,7 @@ def valid_platform_config(): deletion_policy: Retain database_copy: - from: prod - to: staging + to: hotfix test-app-opensearch: type: opensearch diff --git a/tests/platform_helper/domain/test_database_copy.py b/tests/platform_helper/domain/test_database_copy.py index 22bd2d98f..4bb2b6853 100644 --- a/tests/platform_helper/domain/test_database_copy.py +++ b/tests/platform_helper/domain/test_database_copy.py @@ -58,7 +58,7 @@ def test_run_database_copy_task(is_dump, exp_operation): mock_client.run_task.return_value = {"tasks": [{"taskArn": "arn:aws:ecs:test-task-arn"}]} actual_task_arn = db_copy.run_database_copy_task( - mock_session, "test-env", vpc, is_dump, db_connection_string + mock_session, "test-env", vpc, is_dump, db_connection_string, "test-env" ) assert actual_task_arn == "arn:aws:ecs:test-task-arn" @@ -67,6 +67,7 @@ def test_run_database_copy_task(is_dump, exp_operation): expected_env_vars = [ {"name": "DATA_COPY_OPERATION", "value": exp_operation.upper()}, {"name": "DB_CONNECTION_STRING", "value": "connection_string"}, + {"name": "TO_ENVIRONMENT", "value": "test-env"}, ] if not is_dump: expected_env_vars.append( @@ -116,7 +117,7 @@ def test_database_dump(): db_copy.enrich_vpc_name = Mock() db_copy.enrich_vpc_name.return_value = "test-vpc-override" - db_copy.dump(env, vpc_name) + db_copy.dump(env, vpc_name, "test-env") mocks.load_application_fn.assert_called_once() mocks.vpc_config_fn.assert_called_once_with( @@ -126,11 +127,7 @@ def test_database_dump(): mocks.environment.session, app, env, "test-app-test-env-test-db" ) mock_run_database_copy_task.assert_called_once_with( - mocks.environment.session, - env, - mocks.vpc, - True, - "test-db-connection-string", + mocks.environment.session, env, mocks.vpc, True, "test-db-connection-string", "test-env" ) mocks.input_fn.assert_not_called() mocks.echo_fn.assert_has_calls( @@ -173,11 +170,7 @@ def test_database_load_with_response_of_yes(): ) mock_run_database_copy_task.assert_called_once_with( - mocks.environment.session, - env, - mocks.vpc, - False, - "test-db-connection-string", + mocks.environment.session, env, mocks.vpc, False, "test-db-connection-string", "test-env" ) mocks.input_fn.assert_called_once_with( @@ -237,7 +230,7 @@ def test_database_dump_handles_vpc_errors(is_dump): with pytest.raises(SystemExit) as exc: if is_dump: - db_copy.dump("test-env", "bad-vpc-name") + db_copy.dump("test-env", "bad-vpc-name", "test-env") else: db_copy.load("test-env", "bad-vpc-name") @@ -254,7 +247,7 @@ def test_database_dump_handles_db_name_errors(is_dump): with pytest.raises(SystemExit) as exc: if is_dump: - db_copy.dump("test-env", "vpc-name") + db_copy.dump("test-env", "vpc-name", "test-env") else: db_copy.load("test-env", "vpc-name") @@ -272,7 +265,7 @@ def test_database_dump_handles_env_name_errors(is_dump): with pytest.raises(SystemExit) as exc: if is_dump: - db_copy.dump("bad-env", "vpc-name") + db_copy.dump("bad-env", "vpc-name", "test-env") else: db_copy.load("bad-env", "vpc-name") @@ -293,7 +286,7 @@ def test_database_dump_handles_account_id_errors(is_dump): with pytest.raises(SystemExit) as exc: if is_dump: - db_copy.dump("test-env", "vpc-name") + db_copy.dump("test-env", "vpc-name", "test-env") else: db_copy.load("test-env", "vpc-name") @@ -372,7 +365,7 @@ def test_copy_command(services, template): mocks.maintenance_page_provider.activate.assert_called_once_with( "test-app", "test-to-env", services, template, "test-vpc-override" ) - db_copy.dump.assert_called_once_with("test-from-env", "test-from-vpc") + db_copy.dump.assert_called_once_with("test-from-env", "test-from-vpc", "test-to-env") db_copy.load.assert_called_once_with("test-to-env", "test-vpc-override") mocks.maintenance_page_provider.deactivate.assert_called_once_with("test-app", "test-to-env") @@ -418,6 +411,10 @@ def test_tail_logs(is_dump): ] } + mocks.client.describe_log_groups.return_value = { + "logGroups": [{"logGroupName": f"/ecs/test-app-test-env-test-db-{action}"}] + } + db_copy = DatabaseCopy("test-app", "test-db", **mocks.params()) db_copy.tail_logs(is_dump, "test-env") @@ -458,6 +455,10 @@ def test_tail_logs_exits_with_error_if_task_aborts(is_dump): ] } + mocks.client.describe_log_groups.return_value = { + "logGroups": [{"logGroupName": f"/ecs/test-app-test-env-test-db-{action}"}] + } + db_copy = DatabaseCopy("test-app", "test-db", **mocks.params()) with pytest.raises(SystemExit) as exc: @@ -518,7 +519,7 @@ def test_database_dump_with_no_vpc_works_in_deploy_repo(fs, is_dump): db_copy.tail_logs = Mock() if is_dump: - db_copy.dump(env, None) + db_copy.dump(env, None, "test-env") else: db_copy.load(env, None) @@ -544,7 +545,7 @@ def test_database_dump_with_no_vpc_fails_if_not_in_deploy_repo(fs, is_dump): with pytest.raises(SystemExit) as exc: if is_dump: - db_copy.dump(env, None) + db_copy.dump(env, None, "test-env") else: db_copy.load(env, None) @@ -588,3 +589,22 @@ def test_enrich_vpc_name_enriches_vpc_name_from_platform_config(fs): vpc_name = db_copy.enrich_vpc_name("test-env", None) assert vpc_name == "test-env-vpc" + + +def test_enrich_vpc_name_enriches_vpc_name_from_environment_defaults(fs): + # fakefs used here to ensure the platform-config.yml isn't picked up from the filesystem + fs.create_file( + PLATFORM_CONFIG_FILE, + contents=yaml.dump( + { + "application": "test-app", + "environments": {"*": {"vpc": "test-env-vpc"}, "test-env": {}}, + } + ), + ) + mocks = DataCopyMocks() + db_copy = DatabaseCopy("test-app", "test-db", **mocks.params()) + + vpc_name = db_copy.enrich_vpc_name("test-env", None) + + assert vpc_name == "test-env-vpc" diff --git a/tests/platform_helper/utils/test_aws.py b/tests/platform_helper/utils/test_aws.py index 6ccbc7281..add894d01 100644 --- a/tests/platform_helper/utils/test_aws.py +++ b/tests/platform_helper/utils/test_aws.py @@ -12,6 +12,7 @@ from dbt_platform_helper.exceptions import AWSException from dbt_platform_helper.exceptions import CopilotCodebaseNotFoundError +from dbt_platform_helper.exceptions import ResourceNotFoundException from dbt_platform_helper.exceptions import ValidationException from dbt_platform_helper.utils.aws import NoProfileForAccountIdError from dbt_platform_helper.utils.aws import Vpc @@ -31,6 +32,7 @@ from dbt_platform_helper.utils.aws import get_supported_redis_versions from dbt_platform_helper.utils.aws import get_vpc_info_by_name from dbt_platform_helper.utils.aws import set_ssm_param +from dbt_platform_helper.utils.aws import wait_for_log_group_to_exist from tests.platform_helper.conftest import mock_aws_client from tests.platform_helper.conftest import mock_codestar_connections_boto_client from tests.platform_helper.conftest import mock_ecr_public_repositories_boto_client @@ -995,3 +997,20 @@ def test_get_vpc_info_by_name_failure_no_matching_security_groups(): get_vpc_info_by_name(mock_session, "my_app", "my_env", "my_vpc") assert "No matching security groups found in vpc 'my_vpc'" in str(ex) + + +def test_wait_for_log_group_to_exist_success(): + log_group_name = "/ecs/test-log-group" + mock_client = Mock() + mock_client.describe_log_groups.return_value = {"logGroups": [{"logGroupName": log_group_name}]} + + wait_for_log_group_to_exist(mock_client, log_group_name) + + +def test_wait_for_log_group_to_exist_fails_when_log_group_not_found(): + log_group_name = "/ecs/test-log-group" + mock_client = Mock() + mock_client.describe_log_groups.return_value = {"logGroups": [{"logGroupName": log_group_name}]} + + with pytest.raises(ResourceNotFoundException): + wait_for_log_group_to_exist(mock_client, "not_found", 1) diff --git a/tests/platform_helper/utils/test_validation.py b/tests/platform_helper/utils/test_validation.py index 6de93e31a..ab6f954d8 100644 --- a/tests/platform_helper/utils/test_validation.py +++ b/tests/platform_helper/utils/test_validation.py @@ -1,4 +1,3 @@ -import os import re from pathlib import Path from unittest.mock import patch @@ -583,21 +582,17 @@ def test_lint_yaml_for_duplicate_keys_fails_when_duplicate_keys_provided( """ Path(PLATFORM_CONFIG_FILE).write_text(invalid_platform_config) + expected_error = f'duplication of key "{duplicate_key}"' linting_failures = lint_yaml_for_duplicate_keys(PLATFORM_CONFIG_FILE) - assert linting_failures == [f'\tLine 100: duplication of key "{duplicate_key}"'] + assert expected_error in linting_failures[0] with pytest.raises(SystemExit) as excinfo: load_and_validate_platform_config(PLATFORM_CONFIG_FILE) captured = capsys.readouterr() - expected_error_message = ( - "Duplicate keys found in platform-config:" - + os.linesep - + f'\tLine 100: duplication of key "{duplicate_key}"' - ) - assert expected_error_message in captured.err + assert expected_error in captured.err assert excinfo.value.code == 1 @@ -740,6 +735,54 @@ def test_config_file_check_fails_for_unsupported_files_exist( assert expected_message in console_message +@pytest.mark.parametrize( + "database_copy_section", + [ + None, + [{"from": "dev", "to": "test"}], + [{"from": "test", "to": "dev"}], + [ + { + "from": "prod", + "to": "test", + "from_account": "9999999999", + "to_account": "1122334455", + } + ], + [ + { + "from": "dev", + "to": "test", + "from_account": "9999999999", + "to_account": "9999999999", + } + ], + ], +) +def test_validate_database_copy_section_success_cases(database_copy_section): + config = { + "application": "test-app", + "environments": { + "dev": {"accounts": {"deploy": {"id": "1122334455"}}}, + "test": {"accounts": {"deploy": {"id": "1122334455"}}}, + "prod": {"accounts": {"deploy": {"id": "9999999999"}}}, + }, + "extensions": { + "our-postgres": { + "type": "postgres", + "version": 7, + } + }, + } + + if database_copy_section: + config["extensions"]["our-postgres"]["database_copy"] = database_copy_section + + validate_database_copy_section(config) + + # Should get here fine if the config is valid. + + @pytest.mark.parametrize( "files, expected_messages", [ @@ -767,34 +810,6 @@ def test_config_file_check_warns_if_deprecated_files_exist( assert expected_message in console_message -@pytest.mark.parametrize( - "database_copy_section", - [ - None, - [{"from": "dev", "to": "test"}], - [{"from": "test", "to": "dev"}, {"from": "prod", "to": "test"}], - ], -) -def test_validate_database_copy_section_success_cases(database_copy_section): - config = { - "application": "test-app", - "environments": {"dev": {}, "test": {}, "prod": {}}, - "extensions": { - "our-postgres": { - "type": "postgres", - "version": 7, - } - }, - } - - if database_copy_section: - config["extensions"]["our-postgres"]["database_copy"] = database_copy_section - - validate_database_copy_section(config) - - # Should get here fine if the config is valid. - - @pytest.mark.parametrize( "database_copy_section, expected_parameters", [ @@ -941,6 +956,88 @@ def test_validate_database_copy_multi_postgres_failures(capfd): ) +def test_validate_database_copy_fails_if_cross_account_with_no_from_account(capfd): + config = { + "application": "test-app", + "environments": { + "dev": {"accounts": {"deploy": {"id": "1122334455"}}}, + "prod": {"accounts": {"deploy": {"id": "9999999999"}}}, + }, + "extensions": { + "our-postgres": { + "type": "postgres", + "version": 7, + "database_copy": [{"from": "prod", "to": "dev"}], + } + }, + } + + with pytest.raises(SystemExit): + validate_database_copy_section(config) + + console_message = capfd.readouterr().err + + msg = f"Environments 'prod' and 'dev' are in different AWS accounts. The 'from_account' parameter must be present." + assert msg in console_message + + +def test_validate_database_copy_fails_if_cross_account_with_no_to_account(capfd): + config = { + "application": "test-app", + "environments": { + "dev": {"accounts": {"deploy": {"id": "1122334455"}}}, + "prod": {"accounts": {"deploy": {"id": "9999999999"}}}, + }, + "extensions": { + "our-postgres": { + "type": "postgres", + "version": 7, + "database_copy": [{"from": "prod", "to": "dev", "from_account": "9999999999"}], + } + }, + } + + with pytest.raises(SystemExit): + validate_database_copy_section(config) + + console_message = capfd.readouterr().err + + msg = f"Environments 'prod' and 'dev' are in different AWS accounts. The 'to_account' parameter must be present." + assert msg in console_message + + +def test_validate_database_copy_fails_if_cross_account_with_incorrect_account_ids(capfd): + config = { + "application": "test-app", + "environments": { + "dev": {"accounts": {"deploy": {"id": "1122334455"}}}, + "prod": {"accounts": {"deploy": {"id": "9999999999"}}}, + }, + "extensions": { + "our-postgres": { + "type": "postgres", + "version": 7, + "database_copy": [ + { + "from": "prod", + "to": "dev", + "from_account": "000000000", + "to_account": "1111111111", + } + ], + } + }, + } + + with pytest.raises(SystemExit): + validate_database_copy_section(config) + + console_message = capfd.readouterr().err + + msg = f"Incorrect value for 'from_account' for environment 'prod'" + assert msg in console_message + + @pytest.mark.parametrize( "config, expected_response", [ From 4355182e50d902a091e7c55a4093fea3dbabed76 Mon Sep 17 00:00:00 2001 From: Will Gibson <8738245+WillGibson@users.noreply.github.com> Date: Tue, 3 Dec 2024 17:48:28 +0000 Subject: [PATCH 23/38] refactor: DBTP-1520 Improving provider structure and exception handling (2nd pass) (#672) Co-authored-by: Connor Hindle Co-authored-by: chiaramapellimt Co-authored-by: Chiara <95863059+chiaramapellimt@users.noreply.github.com> --- dbt_platform_helper/commands/codebase.py | 89 +------ dbt_platform_helper/commands/conduit.py | 70 ++---- dbt_platform_helper/commands/secrets.py | 2 +- dbt_platform_helper/domain/codebase.py | 12 +- dbt_platform_helper/domain/conduit.py | 91 ++----- dbt_platform_helper/exceptions.py | 106 ++++++-- .../providers/cloudformation.py | 222 +++++++++-------- dbt_platform_helper/providers/copilot.py | 44 +++- dbt_platform_helper/providers/ecs.py | 148 +++++------ dbt_platform_helper/providers/secrets.py | 148 +++++------ dbt_platform_helper/utils/application.py | 2 +- dbt_platform_helper/utils/aws.py | 5 +- tests/platform_helper/domain/test_codebase.py | 58 ++--- tests/platform_helper/domain/test_conduit.py | 231 +++++++++--------- .../domain/test_database_copy.py | 2 +- .../providers/test_cloudformation.py | 78 +++--- .../platform_helper/providers/test_copilot.py | 211 ++-------------- tests/platform_helper/providers/test_ecs.py | 118 +++++---- .../platform_helper/providers/test_secrets.py | 189 +++++++++++++- .../platform_helper/test_command_codebase.py | 22 +- tests/platform_helper/test_command_conduit.py | 55 +---- tests/platform_helper/test_exceptions.py | 110 +++++++++ 22 files changed, 1036 insertions(+), 977 deletions(-) create mode 100644 tests/platform_helper/test_exceptions.py diff --git a/dbt_platform_helper/commands/codebase.py b/dbt_platform_helper/commands/codebase.py index cdf4ee23c..e082f5b1c 100644 --- a/dbt_platform_helper/commands/codebase.py +++ b/dbt_platform_helper/commands/codebase.py @@ -1,18 +1,8 @@ -import json -import os - import click from dbt_platform_helper.domain.codebase import Codebase -from dbt_platform_helper.exceptions import ApplicationDeploymentNotTriggered -from dbt_platform_helper.exceptions import ApplicationEnvironmentNotFoundError -from dbt_platform_helper.exceptions import ApplicationNotFoundError -from dbt_platform_helper.exceptions import CopilotCodebaseNotFoundError -from dbt_platform_helper.exceptions import ImageNotFoundError -from dbt_platform_helper.exceptions import NoCopilotCodebasesFoundError -from dbt_platform_helper.exceptions import NotInCodeBaseRepositoryError +from dbt_platform_helper.exceptions import PlatformException from dbt_platform_helper.utils.click import ClickDocOptGroup -from dbt_platform_helper.utils.git import CommitNotFoundError from dbt_platform_helper.utils.versioning import ( check_platform_helper_version_needs_update, ) @@ -29,12 +19,8 @@ def prepare(): """Sets up an application codebase for use within a DBT platform project.""" try: Codebase().prepare() - except NotInCodeBaseRepositoryError: - # TODO: Set exception message in the exceptions and just output the message in the command code - click.secho( - "You are in the deploy repository; make sure you are in the application codebase repository.", - fg="red", - ) + except PlatformException as err: + click.secho(str(err), fg="red") raise click.Abort @@ -50,17 +36,8 @@ def list(app, with_images): """List available codebases for the application.""" try: Codebase().list(app, with_images) - except NoCopilotCodebasesFoundError: - click.secho( - f"""No codebases found for application "{app}""", - fg="red", - ) - raise click.Abort - except ApplicationNotFoundError: - click.secho( - f"""The account "{os.environ.get("AWS_PROFILE")}" does not contain the application "{app}"; ensure you have set the environment variable "AWS_PROFILE" correctly.""", - fg="red", - ) + except PlatformException as err: + click.secho(str(err), fg="red") raise click.Abort @@ -76,23 +53,8 @@ def build(app, codebase, commit): """Trigger a CodePipeline pipeline based build.""" try: Codebase().build(app, codebase, commit) - except ApplicationNotFoundError: - click.secho( - f"""The account "{os.environ.get("AWS_PROFILE")}" does not contain the application "{app}"; ensure you have set the environment variable "AWS_PROFILE" correctly.""", - fg="red", - ) - raise click.Abort - except CommitNotFoundError: - click.secho( - f'The commit hash "{commit}" either does not exist or you need to run `git fetch`.', - fg="red", - ) - raise click.Abort - except ApplicationDeploymentNotTriggered: - click.secho( - f"Your build for {codebase} was not triggered.", - fg="red", - ) + except PlatformException as err: + click.secho(str(err), fg="red") raise click.Abort @@ -108,39 +70,6 @@ def build(app, codebase, commit): def deploy(app, env, codebase, commit): try: Codebase().deploy(app, env, codebase, commit) - except ApplicationNotFoundError: - # TODO: Set exception message in the exceptions and just output the message in the command code - click.secho( - f"""The account "{os.environ.get("AWS_PROFILE")}" does not contain the application "{app}"; ensure you have set the environment variable "AWS_PROFILE" correctly.""", - fg="red", - ) - raise click.Abort - except ApplicationEnvironmentNotFoundError: - click.secho( - f"""The environment "{env}" either does not exist or has not been deployed.""", - fg="red", - ) - raise click.Abort - except ( - CopilotCodebaseNotFoundError, - # TODO: Catch this error earlier and throw a more meaningful error, maybe it's CopilotCodebaseNotFoundError? - json.JSONDecodeError, - ): - click.secho( - f"""The codebase "{codebase}" either does not exist or has not been deployed.""", - fg="red", - ) - raise click.Abort - except ImageNotFoundError: - click.secho( - f'The commit hash "{commit}" has not been built into an image, try the ' - "`platform-helper codebase build` command first.", - fg="red", - ) - raise click.Abort - except ApplicationDeploymentNotTriggered: - click.secho( - f"Your deployment for {codebase} was not triggered.", - fg="red", - ) + except PlatformException as err: + click.secho(str(err), fg="red") raise click.Abort diff --git a/dbt_platform_helper/commands/conduit.py b/dbt_platform_helper/commands/conduit.py index a2828841c..8d3051bb6 100644 --- a/dbt_platform_helper/commands/conduit.py +++ b/dbt_platform_helper/commands/conduit.py @@ -1,14 +1,10 @@ import click -from dbt_platform_helper.constants import CONDUIT_ADDON_TYPES from dbt_platform_helper.domain.conduit import Conduit -from dbt_platform_helper.exceptions import AddonNotFoundError -from dbt_platform_helper.exceptions import AddonTypeMissingFromConfigError -from dbt_platform_helper.exceptions import CreateTaskTimeoutError -from dbt_platform_helper.exceptions import InvalidAddonTypeError -from dbt_platform_helper.exceptions import NoClusterError -from dbt_platform_helper.exceptions import ParameterNotFoundError -from dbt_platform_helper.providers.secrets import SecretNotFoundError +from dbt_platform_helper.exceptions import AWSException +from dbt_platform_helper.providers.cloudformation import CloudFormation +from dbt_platform_helper.providers.ecs import ECS +from dbt_platform_helper.providers.secrets import Secrets from dbt_platform_helper.utils.application import load_application from dbt_platform_helper.utils.click import ClickDocOptCommand from dbt_platform_helper.utils.versioning import ( @@ -35,44 +31,28 @@ def conduit(addon_name: str, app: str, env: str, access: str): application = load_application(app) try: - Conduit(application).start(env, addon_name, access) - except NoClusterError: - # TODO: Set exception message in the exceptions and just output the message in the command code, should be able to catch all errors in one block - click.secho(f"""No ECS cluster found for "{app}" in "{env}" environment.""", fg="red") - exit(1) - except SecretNotFoundError as err: - click.secho( - f"""No secret called "{err}" for "{app}" in "{env}" environment.""", - fg="red", + secrets_provider: Secrets = Secrets( + application.environments[env].session.client("ssm"), + application.environments[env].session.client("secretsmanager"), + application.name, + env, ) - exit(1) - except CreateTaskTimeoutError: - click.secho( - f"""Client ({addon_name}) ECS task has failed to start for "{app}" in "{env}" environment.""", - fg="red", + cloudformation_provider: CloudFormation = CloudFormation( + application.environments[env].session.client("cloudformation"), + application.environments[env].session.client("iam"), + application.environments[env].session.client("ssm"), ) - exit(1) - except ParameterNotFoundError: - click.secho( - f"""No parameter called "/copilot/applications/{app}/environments/{env}/addons". Try deploying the "{app}" "{env}" environment.""", - fg="red", - ) - exit(1) - except AddonNotFoundError: - click.secho( - f"""Addon "{addon_name}" does not exist.""", - fg="red", - ) - exit(1) - except InvalidAddonTypeError as err: - click.secho( - f"""Addon type "{err.addon_type}" is not supported, we support: {", ".join(CONDUIT_ADDON_TYPES)}.""", - fg="red", + + ecs_provider: ECS = ECS( + application.environments[env].session.client("ecs"), + application.environments[env].session.client("ssm"), + application.name, + env, ) - exit(1) - except AddonTypeMissingFromConfigError: - click.secho( - f"""The configuration for the addon {addon_name}, is missconfigured and missing the addon type.""", - fg="red", + + Conduit(application, secrets_provider, cloudformation_provider, ecs_provider).start( + env, addon_name, access ) - exit(1) + except AWSException as err: + click.secho(str(err), fg="red") + raise click.Abort diff --git a/dbt_platform_helper/commands/secrets.py b/dbt_platform_helper/commands/secrets.py index f237d19fc..b67b7e182 100755 --- a/dbt_platform_helper/commands/secrets.py +++ b/dbt_platform_helper/commands/secrets.py @@ -102,7 +102,6 @@ def list(app, env): params = dict(Path=path, Recursive=False, WithDecryption=True, MaxResults=10) secrets = [] - # TODO: refactor shared code with get_ssm_secret_names - Check if this is still valid while True: response = client.get_parameters_by_path(**params) @@ -114,6 +113,7 @@ def list(app, env): else: break + # Todo: When we refactor this, the above could probably just use dbt_platform_helper.utils.aws.get_ssm_secret_names so we would end up with print("\n".join(get_ssm_secret_names(app, env))) print("\n".join(sorted(secrets))) diff --git a/dbt_platform_helper/domain/codebase.py b/dbt_platform_helper/domain/codebase.py index eb1b807e2..053a25a60 100644 --- a/dbt_platform_helper/domain/codebase.py +++ b/dbt_platform_helper/domain/codebase.py @@ -11,7 +11,6 @@ from dbt_platform_helper.exceptions import ApplicationDeploymentNotTriggered from dbt_platform_helper.exceptions import ApplicationEnvironmentNotFoundError -from dbt_platform_helper.exceptions import NoCopilotCodebasesFoundError from dbt_platform_helper.exceptions import NotInCodeBaseRepositoryError from dbt_platform_helper.utils.application import Application from dbt_platform_helper.utils.application import load_application @@ -67,7 +66,7 @@ def prepare(self): .removesuffix(".git") ) if repository.endswith("-deploy") or Path("./copilot").exists(): - raise NotInCodeBaseRepositoryError + raise NotInCodeBaseRepositoryError() builder_configuration_url = "https://raw.githubusercontent.com/uktrade/ci-image-builder/main/image_builder/configuration/builder_configuration.yml" builder_configuration_response = requests.get(builder_configuration_url) @@ -134,7 +133,7 @@ def build(self, app: str, codebase: str, commit: str): f"Your build has been triggered. Check your build progress in the AWS Console: {build_url}" ) - raise ApplicationDeploymentNotTriggered() + raise ApplicationDeploymentNotTriggered(codebase) def deploy(self, app, env, codebase, commit): """Trigger a CodePipeline pipeline based deployment.""" @@ -142,7 +141,7 @@ def deploy(self, app, env, codebase, commit): application = self.load_application_fn(app, default_session=session) if not application.environments.get(env): - raise ApplicationEnvironmentNotFoundError() + raise ApplicationEnvironmentNotFoundError(env) self.check_codebase_exists_fn(session, application, codebase) @@ -171,7 +170,7 @@ def deploy(self, app, env, codebase, commit): f"{build_url}", ) - raise ApplicationDeploymentNotTriggered() + raise ApplicationDeploymentNotTriggered(codebase) def list(self, app: str, with_images: bool): """List available codebases for the application.""" @@ -204,8 +203,7 @@ def __get_codebases(self, application, ssm_client): codebases = [json.loads(p["Value"]) for p in parameters] if not codebases: - # TODO Is this really an error? Or just no codebases so we could return an empty list? - raise NoCopilotCodebasesFoundError + return [] return codebases def __start_build_with_confirmation( diff --git a/dbt_platform_helper/domain/conduit.py b/dbt_platform_helper/domain/conduit.py index 349df2617..3ac798c29 100644 --- a/dbt_platform_helper/domain/conduit.py +++ b/dbt_platform_helper/domain/conduit.py @@ -3,74 +3,48 @@ import click -from dbt_platform_helper.exceptions import ECSAgentNotRunning -from dbt_platform_helper.providers.cloudformation import ( - add_stack_delete_policy_to_task_role, -) -from dbt_platform_helper.providers.cloudformation import update_conduit_stack_resources -from dbt_platform_helper.providers.cloudformation import ( - wait_for_cloudformation_to_reach_status, -) +from dbt_platform_helper.providers.cloudformation import CloudFormation from dbt_platform_helper.providers.copilot import connect_to_addon_client_task from dbt_platform_helper.providers.copilot import create_addon_client_task from dbt_platform_helper.providers.copilot import create_postgres_admin_task -from dbt_platform_helper.providers.ecs import ecs_exec_is_available -from dbt_platform_helper.providers.ecs import get_cluster_arn -from dbt_platform_helper.providers.ecs import get_ecs_task_arns -from dbt_platform_helper.providers.ecs import get_or_create_task_name -from dbt_platform_helper.providers.secrets import get_addon_type -from dbt_platform_helper.providers.secrets import get_parameter_name +from dbt_platform_helper.providers.ecs import ECS +from dbt_platform_helper.providers.secrets import Secrets from dbt_platform_helper.utils.application import Application -from dbt_platform_helper.utils.messages import abort_with_error class Conduit: def __init__( self, application: Application, + secrets_provider: Secrets, + cloudformation_provider: CloudFormation, + ecs_provider: ECS, echo_fn: Callable[[str], str] = click.secho, subprocess_fn: subprocess = subprocess, - get_ecs_task_arns_fn=get_ecs_task_arns, connect_to_addon_client_task_fn=connect_to_addon_client_task, create_addon_client_task_fn=create_addon_client_task, create_postgres_admin_task_fn=create_postgres_admin_task, - get_addon_type_fn=get_addon_type, - ecs_exec_is_available_fn=ecs_exec_is_available, - get_cluster_arn_fn=get_cluster_arn, - get_parameter_name_fn=get_parameter_name, - get_or_create_task_name_fn=get_or_create_task_name, - add_stack_delete_policy_to_task_role_fn=add_stack_delete_policy_to_task_role, - update_conduit_stack_resources_fn=update_conduit_stack_resources, - wait_for_cloudformation_to_reach_status_fn=wait_for_cloudformation_to_reach_status, - abort_fn=abort_with_error, ): self.application = application + self.secrets_provider = secrets_provider + self.cloudformation_provider = cloudformation_provider + self.ecs_provider = ecs_provider self.subprocess_fn = subprocess_fn self.echo_fn = echo_fn - self.get_ecs_task_arns_fn = get_ecs_task_arns_fn self.connect_to_addon_client_task_fn = connect_to_addon_client_task_fn self.create_addon_client_task_fn = create_addon_client_task_fn self.create_postgres_admin_task = create_postgres_admin_task_fn - self.get_addon_type_fn = get_addon_type_fn - self.ecs_exec_is_available_fn = ecs_exec_is_available_fn - self.get_cluster_arn_fn = get_cluster_arn_fn - self.get_parameter_name_fn = get_parameter_name_fn - self.get_or_create_task_name_fn = get_or_create_task_name_fn - self.add_stack_delete_policy_to_task_role_fn = add_stack_delete_policy_to_task_role_fn - self.update_conduit_stack_resources_fn = update_conduit_stack_resources_fn - self.wait_for_cloudformation_to_reach_status_fn = wait_for_cloudformation_to_reach_status_fn - self.abort_fn = abort_fn def start(self, env: str, addon_name: str, access: str = "read"): clients = self._initialise_clients(env) addon_type, cluster_arn, parameter_name, task_name = self._get_addon_details( - env, addon_name, access + addon_name, access ) self.echo_fn(f"Checking if a conduit task is already running for {addon_type}") - task_arn = self.get_ecs_task_arns_fn(clients["ecs"], cluster_arn, task_name) - if not task_arn: + task_arns = self.ecs_provider.get_ecs_task_arns(cluster_arn, task_name) + if not task_arns: self.echo_fn("Creating conduit task") self.create_addon_client_task_fn( clients["iam"], @@ -87,9 +61,6 @@ def start(self, env: str, addon_name: str, access: str = "read"): self.echo_fn("Updating conduit task") self._update_stack_resources( - clients["cloudformation"], - clients["iam"], - clients["ssm"], self.application.name, env, addon_type, @@ -99,17 +70,14 @@ def start(self, env: str, addon_name: str, access: str = "read"): access, ) - task_arn = self.get_ecs_task_arns_fn(clients["ecs"], cluster_arn, task_name) + task_arns = self.ecs_provider.get_ecs_task_arns(cluster_arn, task_name) else: self.echo_fn("Conduit task already running") self.echo_fn(f"Checking if exec is available for conduit task...") - try: - self.ecs_exec_is_available_fn(clients["ecs"], cluster_arn, task_arn) - except ECSAgentNotRunning: - self.abort_fn('ECS exec agent never reached "RUNNING" status') + self.ecs_provider.ecs_exec_is_available(cluster_arn, task_arns) self.echo_fn("Connecting to conduit task") self.connect_to_addon_client_task_fn( @@ -125,26 +93,16 @@ def _initialise_clients(self, env): "secrets_manager": self.application.environments[env].session.client("secretsmanager"), } - def _get_addon_details(self, env, addon_name, access): - ssm_client = self.application.environments[env].session.client("ssm") - ecs_client = self.application.environments[env].session.client("ecs") - - addon_type = self.get_addon_type_fn(ssm_client, self.application.name, env, addon_name) - cluster_arn = self.get_cluster_arn_fn(ecs_client, self.application.name, env) - parameter_name = self.get_parameter_name_fn( - self.application.name, env, addon_type, addon_name, access - ) - task_name = self.get_or_create_task_name_fn( - ssm_client, self.application.name, env, addon_name, parameter_name - ) + def _get_addon_details(self, addon_name, access): + addon_type = self.secrets_provider.get_addon_type(addon_name) + cluster_arn = self.ecs_provider.get_cluster_arn() + parameter_name = self.secrets_provider.get_parameter_name(addon_type, addon_name, access) + task_name = self.ecs_provider.get_or_create_task_name(addon_name, parameter_name) return addon_type, cluster_arn, parameter_name, task_name def _update_stack_resources( self, - cloudformation_client, - iam_client, - ssm_client, app_name, env, addon_type, @@ -153,11 +111,8 @@ def _update_stack_resources( parameter_name, access, ): - self.add_stack_delete_policy_to_task_role_fn(cloudformation_client, iam_client, task_name) - stack_name = self.update_conduit_stack_resources_fn( - cloudformation_client, - iam_client, - ssm_client, + self.cloudformation_provider.add_stack_delete_policy_to_task_role(task_name) + stack_name = self.cloudformation_provider.update_conduit_stack_resources( app_name, env, addon_type, @@ -167,6 +122,6 @@ def _update_stack_resources( access, ) self.echo_fn("Waiting for conduit task update to complete...") - self.wait_for_cloudformation_to_reach_status_fn( - cloudformation_client, "stack_update_complete", stack_name + self.cloudformation_provider.wait_for_cloudformation_to_reach_status( + "stack_update_complete", stack_name ) diff --git a/dbt_platform_helper/exceptions.py b/dbt_platform_helper/exceptions.py index edf4f057f..e5eab9e57 100644 --- a/dbt_platform_helper/exceptions.py +++ b/dbt_platform_helper/exceptions.py @@ -1,11 +1,38 @@ +import os + +from dbt_platform_helper.constants import CONDUIT_ADDON_TYPES + + class ValidationException(Exception): pass -class AWSException(Exception): +class PlatformException(Exception): + pass + + +class AWSException(PlatformException): + pass + + +class ApplicationException(PlatformException): pass +class CloudFormationException(AWSException): + def __init__(self, stack_name: str, current_status: str): + super().__init__( + f"The CloudFormation stack '{stack_name}' is not in a good state: {current_status}" + ) + + +class CommitNotFoundError: + def __init__(self, commit: str): + super().__init__( + f"""The commit hash "{commit}" either does not exist or you need to run `git fetch`.""" + ) + + class IncompatibleMajorVersion(ValidationException): def __init__(self, app_version: str, check_version: str): super().__init__() @@ -21,64 +48,99 @@ def __init__(self, app_version: str, check_version: str): class NoClusterError(AWSException): - pass + def __init__(self, application_name: str, environment: str): + super().__init__( + f"""No ECS cluster found for "{application_name}" in "{environment}" environment.""" + ) class CreateTaskTimeoutError(AWSException): - pass + def __init__(self, addon_name: str, application_name: str, environment: str): + super().__init__( + f"""Client ({addon_name}) ECS task has failed to start for "{application_name}" in "{environment}" environment.""" + ) class ParameterNotFoundError(AWSException): - pass + def __init__(self, application_name: str, environment: str): + super().__init__( + f"""No parameter called "/copilot/applications/{application_name}/environments/{environment}/addons". Try deploying the "{application_name}" "{environment}" environment.""" + ) class AddonNotFoundError(AWSException): - pass + def __init__(self, addon_name: str): + super().__init__(f"""Addon "{addon_name}" does not exist.""") class InvalidAddonTypeError(AWSException): def __init__(self, addon_type): self.addon_type = addon_type + super().__init__( + f"""Addon type "{self.addon_type}" is not supported, we support: {", ".join(CONDUIT_ADDON_TYPES)}.""" + ) class AddonTypeMissingFromConfigError(AWSException): - pass + def __init__(self, addon_name: str): + super().__init__( + f"""The configuration for the addon {addon_name}, is misconfigured and missing the addon type.""" + ) -class CopilotCodebaseNotFoundError(Exception): - pass +class CopilotCodebaseNotFoundError(PlatformException): + def __init__(self, codebase: str): + super().__init__( + f"""The codebase "{codebase}" either does not exist or has not been deployed.""" + ) -class NotInCodeBaseRepositoryError(Exception): - pass +class NotInCodeBaseRepositoryError(PlatformException): + def __init__(self): + super().__init__( + "You are in the deploy repository; make sure you are in the application codebase repository.", + ) -class NoCopilotCodebasesFoundError(Exception): - pass +class NoCopilotCodebasesFoundError(PlatformException): + def __init__(self, application_name: str): + super().__init__(f"""No codebases found for application "{application_name}".""") -class ImageNotFoundError(Exception): - pass +class ImageNotFoundError(PlatformException): + def __init__(self, commit: str): + super().__init__( + f"""The commit hash "{commit}" has not been built into an image, try the `platform-helper codebase build` command first.""" + ) -class ApplicationDeploymentNotTriggered(Exception): - pass +class ApplicationDeploymentNotTriggered(PlatformException): + def __init__(self, codebase: str): + super().__init__(f"""Your deployment for {codebase} was not triggered.""") -class ApplicationNotFoundError(Exception): - pass +class ApplicationNotFoundError(ApplicationException): + def __init__(self, application_name: str): + super().__init__( + f"""The account "{os.environ.get("AWS_PROFILE")}" does not contain the application "{application_name}"; ensure you have set the environment variable "AWS_PROFILE" correctly.""" + ) -class ApplicationEnvironmentNotFoundError(Exception): - pass +class ApplicationEnvironmentNotFoundError(ApplicationException): + def __init__(self, environment: str): + super().__init__( + f"""The environment "{environment}" either does not exist or has not been deployed.""" + ) class SecretNotFoundError(AWSException): - pass + def __init__(self, secret_name: str): + super().__init__(f"""No secret called "{secret_name}".""") class ECSAgentNotRunning(AWSException): - pass + def __init__(self): + super().__init__("""ECS exec agent never reached "RUNNING" status""") class ResourceNotFoundException(AWSException): diff --git a/dbt_platform_helper/providers/cloudformation.py b/dbt_platform_helper/providers/cloudformation.py index 395a49c71..4cc45fc24 100644 --- a/dbt_platform_helper/providers/cloudformation.py +++ b/dbt_platform_helper/providers/cloudformation.py @@ -1,105 +1,127 @@ import json +import botocore from cfn_tools import dump_yaml from cfn_tools import load_yaml - -def add_stack_delete_policy_to_task_role(cloudformation_client, iam_client, task_name: str): - - stack_name = f"task-{task_name}" - stack_resources = cloudformation_client.list_stack_resources(StackName=stack_name)[ - "StackResourceSummaries" - ] - - for resource in stack_resources: - if resource["LogicalResourceId"] == "DefaultTaskRole": - task_role_name = resource["PhysicalResourceId"] - iam_client.put_role_policy( - RoleName=task_role_name, - PolicyName="DeleteCloudFormationStack", - PolicyDocument=json.dumps( - { - "Version": "2012-10-17", - "Statement": [ - { - "Action": ["cloudformation:DeleteStack"], - "Effect": "Allow", - "Resource": f"arn:aws:cloudformation:*:*:stack/{stack_name}/*", - }, - ], - }, - ), - ) - - -def update_conduit_stack_resources( - cloudformation_client, - iam_client, - ssm_client, - application_name: str, - env: str, - addon_type: str, - addon_name: str, - task_name: str, - parameter_name: str, - access: str, -): - - conduit_stack_name = f"task-{task_name}" - template = cloudformation_client.get_template(StackName=conduit_stack_name) - template_yml = load_yaml(template["TemplateBody"]) - template_yml["Resources"]["LogGroup"]["DeletionPolicy"] = "Retain" - template_yml["Resources"]["TaskNameParameter"] = load_yaml( - f""" - Type: AWS::SSM::Parameter - Properties: - Name: {parameter_name} - Type: String - Value: {task_name} - """ - ) - - log_filter_role_arn = iam_client.get_role(RoleName="CWLtoSubscriptionFilterRole")["Role"]["Arn"] - - destination_log_group_arns = json.loads( - ssm_client.get_parameter(Name="/copilot/tools/central_log_groups")["Parameter"]["Value"] - ) - - destination_arn = destination_log_group_arns["dev"] - if env.lower() in ("prod", "production"): - destination_arn = destination_log_group_arns["prod"] - - template_yml["Resources"]["SubscriptionFilter"] = load_yaml( - f""" - Type: AWS::Logs::SubscriptionFilter - DeletionPolicy: Retain - Properties: - RoleArn: {log_filter_role_arn} - LogGroupName: /copilot/{task_name} - FilterName: /copilot/conduit/{application_name}/{env}/{addon_type}/{addon_name}/{task_name.rsplit("-", 1)[1]}/{access} - FilterPattern: '' - DestinationArn: {destination_arn} - """ - ) - - params = [] - if "Parameters" in template_yml: - for param in template_yml["Parameters"]: - # TODO testing missed in codecov, update test to assert on method call below with params including ExistingParameter from cloudformation template. - params.append({"ParameterKey": param, "UsePreviousValue": True}) - - cloudformation_client.update_stack( - StackName=conduit_stack_name, - TemplateBody=dump_yaml(template_yml), - Parameters=params, - Capabilities=["CAPABILITY_IAM"], - ) - - return conduit_stack_name - - -# TODO Catch errors and raise a more human friendly Exception is the CloudFormation stack goes into a "unhappy" state, e.g. ROLLBACK_IN_PROGRESS. Currently we get things like botocore.exceptions.WaiterError: Waiter StackUpdateComplete failed: Waiter encountered a terminal failure state: For expression "Stacks[].StackStatus" we matched expected path: "UPDATE_ROLLBACK_COMPLETE" at least once -def wait_for_cloudformation_to_reach_status(cloudformation_client, stack_status, stack_name): - - waiter = cloudformation_client.get_waiter(stack_status) - waiter.wait(StackName=stack_name, WaiterConfig={"Delay": 5, "MaxAttempts": 20}) +from dbt_platform_helper.exceptions import CloudFormationException + + +class CloudFormation: + def __init__(self, cloudformation_client, iam_client, ssm_client): + self.cloudformation_client = cloudformation_client + self.iam_client = iam_client + self.ssm_client = ssm_client + + def add_stack_delete_policy_to_task_role(self, task_name: str): + stack_name = f"task-{task_name}" + stack_resources = self.cloudformation_client.list_stack_resources(StackName=stack_name)[ + "StackResourceSummaries" + ] + + for resource in stack_resources: + if resource["LogicalResourceId"] == "DefaultTaskRole": + task_role_name = resource["PhysicalResourceId"] + self.iam_client.put_role_policy( + RoleName=task_role_name, + PolicyName="DeleteCloudFormationStack", + PolicyDocument=json.dumps( + { + "Version": "2012-10-17", + "Statement": [ + { + "Action": ["cloudformation:DeleteStack"], + "Effect": "Allow", + "Resource": f"arn:aws:cloudformation:*:*:stack/{stack_name}/*", + }, + ], + }, + ), + ) + + def update_conduit_stack_resources( + self, + application_name: str, + env: str, + addon_type: str, + addon_name: str, + task_name: str, + parameter_name: str, + access: str, + ): + conduit_stack_name = f"task-{task_name}" + template = self.cloudformation_client.get_template(StackName=conduit_stack_name) + template_yml = load_yaml(template["TemplateBody"]) + + template_yml["Resources"]["LogGroup"]["DeletionPolicy"] = "Retain" + + template_yml["Resources"]["TaskNameParameter"] = load_yaml( + f""" + Type: AWS::SSM::Parameter + Properties: + Name: {parameter_name} + Type: String + Value: {task_name} + """ + ) + + log_filter_role_arn = self.iam_client.get_role(RoleName="CWLtoSubscriptionFilterRole")[ + "Role" + ]["Arn"] + + destination_log_group_arns = json.loads( + self.ssm_client.get_parameter(Name="/copilot/tools/central_log_groups")["Parameter"][ + "Value" + ] + ) + + destination_arn = destination_log_group_arns["dev"] + if env.lower() in ("prod", "production"): + destination_arn = destination_log_group_arns["prod"] + + template_yml["Resources"]["SubscriptionFilter"] = load_yaml( + f""" + Type: AWS::Logs::SubscriptionFilter + DeletionPolicy: Retain + Properties: + RoleArn: {log_filter_role_arn} + LogGroupName: /copilot/{task_name} + FilterName: /copilot/conduit/{application_name}/{env}/{addon_type}/{addon_name}/{task_name.rsplit("-", 1)[1]}/{access} + FilterPattern: '' + DestinationArn: {destination_arn} + """ + ) + + params = [] + # TODO Currently not covered by tests - see https://uktrade.atlassian.net/browse/DBTP-1582 + if "Parameters" in template_yml: + for param in template_yml["Parameters"]: + params.append({"ParameterKey": param, "UsePreviousValue": True}) + + self.cloudformation_client.update_stack( + StackName=conduit_stack_name, + TemplateBody=dump_yaml(template_yml), + Parameters=params, + Capabilities=["CAPABILITY_IAM"], + ) + + return conduit_stack_name + + def wait_for_cloudformation_to_reach_status(self, stack_status, stack_name): + waiter = self.cloudformation_client.get_waiter(stack_status) + + try: + waiter.wait(StackName=stack_name, WaiterConfig={"Delay": 5, "MaxAttempts": 20}) + except botocore.exceptions.WaiterError as err: + current_status = err.last_response.get("Stacks", [{}])[0].get("StackStatus", "") + + if current_status in [ + "ROLLBACK_IN_PROGRESS", + "UPDATE_ROLLBACK_IN_PROGRESS", + "ROLLBACK_FAILED", + ]: + raise CloudFormationException(stack_name, current_status) + else: + raise CloudFormationException( + stack_name, f"Error while waiting for stack status: {str(err)}" + ) diff --git a/dbt_platform_helper/providers/copilot.py b/dbt_platform_helper/providers/copilot.py index 61c1a055d..503a82f10 100644 --- a/dbt_platform_helper/providers/copilot.py +++ b/dbt_platform_helper/providers/copilot.py @@ -5,11 +5,7 @@ from dbt_platform_helper.constants import CONDUIT_DOCKER_IMAGE_LOCATION from dbt_platform_helper.exceptions import CreateTaskTimeoutError -from dbt_platform_helper.providers.ecs import get_ecs_task_arns -from dbt_platform_helper.providers.secrets import get_connection_secret_arn -from dbt_platform_helper.providers.secrets import ( - get_postgres_connection_data_updated_with_master_secret, -) +from dbt_platform_helper.providers.secrets import Secrets from dbt_platform_helper.utils.application import Application from dbt_platform_helper.utils.messages import abort_with_error @@ -59,7 +55,7 @@ def create_addon_client_task( # We cannot check for botocore.errorfactory.NoSuchEntityException as botocore generates that class on the fly as part of errorfactory. # factory. Checking the error code is the recommended way of handling these exceptions. if ex.response.get("Error", {}).get("Code", None) != "NoSuchEntity": - # TODO Raise an exception to be caught at the command layer + # TODO When we are refactoring this, raise an exception to be caught at the command layer abort_with_error( f"cannot obtain Role {role_name}: {ex.response.get('Error', {}).get('Message', '')}" ) @@ -69,7 +65,7 @@ def create_addon_client_task( f"--task-group-name {task_name} " f"{execution_role}" f"--image {CONDUIT_DOCKER_IMAGE_LOCATION}:{addon_type} " - f"--secrets CONNECTION_SECRET={get_connection_secret_arn(ssm_client,secrets_manager_client, secret_name)} " + f"--secrets CONNECTION_SECRET={_get_secrets_provider(application, env).get_connection_secret_arn(secret_name)} " "--platform-os linux " "--platform-arch arm64", shell=True, @@ -95,8 +91,8 @@ def create_postgres_admin_task( "Parameter" ]["Value"] connection_string = json.dumps( - get_postgres_connection_data_updated_with_master_secret( - ssm_client, secrets_manager_client, read_only_secret_name, master_secret_arn + _get_secrets_provider(app, env).get_postgres_connection_data_updated_with_master_secret( + read_only_secret_name, master_secret_arn ) ) @@ -111,6 +107,19 @@ def create_postgres_admin_task( ) +def _temp_until_refactor_get_ecs_task_arns(ecs_client, cluster_arn: str, task_name: str): + tasks = ecs_client.list_tasks( + cluster=cluster_arn, + desiredStatus="RUNNING", + family=f"copilot-{task_name}", + ) + + if not tasks["taskArns"]: + return [] + + return tasks["taskArns"] + + def connect_to_addon_client_task( ecs_client, subprocess, @@ -118,13 +127,14 @@ def connect_to_addon_client_task( env, cluster_arn, task_name, - addon_client_is_running_fn=get_ecs_task_arns, + get_ecs_task_arns_fn=_temp_until_refactor_get_ecs_task_arns, ): running = False tries = 0 while tries < 15 and not running: tries += 1 - if addon_client_is_running_fn(ecs_client, cluster_arn, task_name): + # Todo: Use from ECS provider when we refactor this + if get_ecs_task_arns_fn(ecs_client, cluster_arn, task_name): subprocess.call( "copilot task exec " f"--app {application_name} --env {env} " @@ -137,8 +147,18 @@ def connect_to_addon_client_task( time.sleep(1) if not running: - raise CreateTaskTimeoutError + raise CreateTaskTimeoutError(task_name, application_name, env) def _normalise_secret_name(addon_name: str) -> str: return addon_name.replace("-", "_").upper() + + +def _get_secrets_provider(application: Application, env: str) -> Secrets: + # Todo: We instantiate the secrets provider here to avoid rabbit holing, but something better probably possible when we are refactoring this area + return Secrets( + application.environments[env].session.client("ssm"), + application.environments[env].session.client("secretsmanager"), + application.name, + env, + ) diff --git a/dbt_platform_helper/providers/ecs.py b/dbt_platform_helper/providers/ecs.py index 2878e91ac..7ddf0a180 100644 --- a/dbt_platform_helper/providers/ecs.py +++ b/dbt_platform_helper/providers/ecs.py @@ -7,73 +7,81 @@ from dbt_platform_helper.exceptions import NoClusterError -# Todo: Refactor to a class, review, then perhaps do the others -def get_cluster_arn(ecs_client, application_name: str, env: str) -> str: - for cluster_arn in ecs_client.list_clusters()["clusterArns"]: - tags_response = ecs_client.list_tags_for_resource(resourceArn=cluster_arn) - tags = tags_response["tags"] - - app_key_found = False - env_key_found = False - cluster_key_found = False - - for tag in tags: - if tag["key"] == "copilot-application" and tag["value"] == application_name: - app_key_found = True - if tag["key"] == "copilot-environment" and tag["value"] == env: - env_key_found = True - if tag["key"] == "aws:cloudformation:logical-id" and tag["value"] == "Cluster": - cluster_key_found = True - - if app_key_found and env_key_found and cluster_key_found: - return cluster_arn - - raise NoClusterError - - -def get_or_create_task_name( - ssm_client, application_name: str, env: str, addon_name: str, parameter_name: str -) -> str: - try: - return ssm_client.get_parameter(Name=parameter_name)["Parameter"]["Value"] - except ssm_client.exceptions.ParameterNotFound: - random_id = "".join(random.choices(string.ascii_lowercase + string.digits, k=12)) - return f"conduit-{application_name}-{env}-{addon_name}-{random_id}" - - -def get_ecs_task_arns(ecs_client, cluster_arn: str, task_name: str): - - tasks = ecs_client.list_tasks( - cluster=cluster_arn, - desiredStatus="RUNNING", - family=f"copilot-{task_name}", - ) - - if not tasks["taskArns"]: - return [] - - return tasks["taskArns"] - - -def ecs_exec_is_available(ecs_client, cluster_arn: str, task_arns: List[str]): - - current_attemps = 0 - execute_command_agent_status = "" - - while execute_command_agent_status != "RUNNING" and current_attemps < 25: - - current_attemps += 1 - - task_details = ecs_client.describe_tasks(cluster=cluster_arn, tasks=task_arns) - - managed_agents = task_details["tasks"][0]["containers"][0]["managedAgents"] - execute_command_agent_status = [ - agent["lastStatus"] - for agent in managed_agents - if agent["name"] == "ExecuteCommandAgent" - ][0] - - time.sleep(1) - - if execute_command_agent_status != "RUNNING": - raise ECSAgentNotRunning +class ECS: + def __init__(self, ecs_client, ssm_client, application_name: str, env: str): + self.ecs_client = ecs_client + self.ssm_client = ssm_client + self.application_name = application_name + self.env = env + + def get_cluster_arn(self) -> str: + """Returns the ARN of the ECS cluster for the given application and + environment.""" + for cluster_arn in self.ecs_client.list_clusters()["clusterArns"]: + tags_response = self.ecs_client.list_tags_for_resource(resourceArn=cluster_arn) + tags = tags_response["tags"] + + app_key_found = False + env_key_found = False + cluster_key_found = False + + for tag in tags: + if tag["key"] == "copilot-application" and tag["value"] == self.application_name: + app_key_found = True + if tag["key"] == "copilot-environment" and tag["value"] == self.env: + env_key_found = True + if tag["key"] == "aws:cloudformation:logical-id" and tag["value"] == "Cluster": + cluster_key_found = True + + if app_key_found and env_key_found and cluster_key_found: + return cluster_arn + + raise NoClusterError(self.application_name, self.env) + + def get_or_create_task_name(self, addon_name: str, parameter_name: str) -> str: + """Fetches the task name from SSM or creates a new one if not found.""" + try: + return self.ssm_client.get_parameter(Name=parameter_name)["Parameter"]["Value"] + except self.ssm_client.exceptions.ParameterNotFound: + random_id = "".join(random.choices(string.ascii_lowercase + string.digits, k=12)) + return f"conduit-{self.application_name}-{self.env}-{addon_name}-{random_id}" + + def get_ecs_task_arns(self, cluster_arn: str, task_name: str): + """Gets the ECS task ARNs for a given task name and cluster ARN.""" + tasks = self.ecs_client.list_tasks( + cluster=cluster_arn, + desiredStatus="RUNNING", + family=f"copilot-{task_name}", + ) + + if not tasks["taskArns"]: + return [] + + return tasks["taskArns"] + + def ecs_exec_is_available(self, cluster_arn: str, task_arns: List[str]): + """ + Checks if the ExecuteCommandAgent is running on the specified ECS task. + + Waits for up to 25 attempts, then raises ECSAgentNotRunning if still not + running. + """ + current_attempts = 0 + execute_command_agent_status = "" + + while execute_command_agent_status != "RUNNING" and current_attempts < 25: + current_attempts += 1 + + task_details = self.ecs_client.describe_tasks(cluster=cluster_arn, tasks=task_arns) + + managed_agents = task_details["tasks"][0]["containers"][0]["managedAgents"] + execute_command_agent_status = [ + agent["lastStatus"] + for agent in managed_agents + if agent["name"] == "ExecuteCommandAgent" + ][0] + if execute_command_agent_status != "RUNNING": + time.sleep(1) + + if execute_command_agent_status != "RUNNING": + raise ECSAgentNotRunning diff --git a/dbt_platform_helper/providers/secrets.py b/dbt_platform_helper/providers/secrets.py index feeaf0ae1..c875f7dfc 100644 --- a/dbt_platform_helper/providers/secrets.py +++ b/dbt_platform_helper/providers/secrets.py @@ -9,77 +9,77 @@ from dbt_platform_helper.exceptions import SecretNotFoundError -def get_postgres_connection_data_updated_with_master_secret( - ssm_client, secrets_manager_client, parameter_name, secret_arn -): - response = ssm_client.get_parameter(Name=parameter_name, WithDecryption=True) - parameter_value = response["Parameter"]["Value"] - - parameter_data = json.loads(parameter_value) - - secret_response = secrets_manager_client.get_secret_value(SecretId=secret_arn) - secret_value = json.loads(secret_response["SecretString"]) - - parameter_data["username"] = urllib.parse.quote(secret_value["username"]) - parameter_data["password"] = urllib.parse.quote(secret_value["password"]) - - return parameter_data - - -def get_connection_secret_arn(ssm_client, secrets_manager_client, secret_name: str) -> str: - - try: - return ssm_client.get_parameter(Name=secret_name, WithDecryption=False)["Parameter"]["ARN"] - except ssm_client.exceptions.ParameterNotFound: - pass - - try: - return secrets_manager_client.describe_secret(SecretId=secret_name)["ARN"] - except secrets_manager_client.exceptions.ResourceNotFoundException: - pass - - raise SecretNotFoundError(secret_name) - - -def get_addon_type(ssm_client, application_name: str, env: str, addon_name: str) -> str: - addon_type = None - try: - addon_config = json.loads( - ssm_client.get_parameter( - Name=f"/copilot/applications/{application_name}/environments/{env}/addons" - )["Parameter"]["Value"] - ) - except ssm_client.exceptions.ParameterNotFound: - raise ParameterNotFoundError - - if addon_name not in addon_config.keys(): - raise AddonNotFoundError - - for name, config in addon_config.items(): - if name == addon_name: - if not config.get("type"): - raise AddonTypeMissingFromConfigError() - addon_type = config["type"] - - if not addon_type or addon_type not in CONDUIT_ADDON_TYPES: - raise InvalidAddonTypeError(addon_type) - - if "postgres" in addon_type: - addon_type = "postgres" - - return addon_type - - -def get_parameter_name( - application_name: str, env: str, addon_type: str, addon_name: str, access: str -) -> str: - if addon_type == "postgres": - return f"/copilot/{application_name}/{env}/conduits/{_normalise_secret_name(addon_name)}_{access.upper()}" - elif addon_type == "redis" or addon_type == "opensearch": - return f"/copilot/{application_name}/{env}/conduits/{_normalise_secret_name(addon_name)}_ENDPOINT" - else: - return f"/copilot/{application_name}/{env}/conduits/{_normalise_secret_name(addon_name)}" - - -def _normalise_secret_name(addon_name: str) -> str: - return addon_name.replace("-", "_").upper() +class Secrets: + def __init__(self, ssm_client, secrets_manager_client, application_name, env): + self.ssm_client = ssm_client + self.secrets_manager_client = secrets_manager_client + self.application_name = application_name + self.env = env + + def get_postgres_connection_data_updated_with_master_secret(self, parameter_name, secret_arn): + response = self.ssm_client.get_parameter(Name=parameter_name, WithDecryption=True) + parameter_value = response["Parameter"]["Value"] + + parameter_data = json.loads(parameter_value) + + secret_response = self.secrets_manager_client.get_secret_value(SecretId=secret_arn) + secret_value = json.loads(secret_response["SecretString"]) + + parameter_data["username"] = urllib.parse.quote(secret_value["username"]) + parameter_data["password"] = urllib.parse.quote(secret_value["password"]) + + return parameter_data + + def get_connection_secret_arn(self, secret_name: str) -> str: + try: + return self.ssm_client.get_parameter(Name=secret_name, WithDecryption=False)[ + "Parameter" + ]["ARN"] + except self.ssm_client.exceptions.ParameterNotFound: + pass + + try: + return self.secrets_manager_client.describe_secret(SecretId=secret_name)["ARN"] + except self.secrets_manager_client.exceptions.ResourceNotFoundException: + pass + + raise SecretNotFoundError(secret_name) + + def get_addon_type(self, addon_name: str) -> str: + addon_type = None + try: + addon_config = json.loads( + self.ssm_client.get_parameter( + Name=f"/copilot/applications/{self.application_name}/environments/{self.env}/addons" + )["Parameter"]["Value"] + ) + except self.ssm_client.exceptions.ParameterNotFound: + raise ParameterNotFoundError(self.application_name, self.env) + + if addon_name not in addon_config.keys(): + raise AddonNotFoundError(addon_name) + + for name, config in addon_config.items(): + if name == addon_name: + if not config.get("type"): + raise AddonTypeMissingFromConfigError(addon_name) + addon_type = config["type"] + + if not addon_type or addon_type not in CONDUIT_ADDON_TYPES: + raise InvalidAddonTypeError(addon_type) + + if "postgres" in addon_type: + addon_type = "postgres" + + return addon_type + + def get_parameter_name(self, addon_type: str, addon_name: str, access: str) -> str: + if addon_type == "postgres": + return f"/copilot/{self.application_name}/{self.env}/conduits/{self._normalise_secret_name(addon_name)}_{access.upper()}" + elif addon_type == "redis" or addon_type == "opensearch": + return f"/copilot/{self.application_name}/{self.env}/conduits/{self._normalise_secret_name(addon_name)}_ENDPOINT" + else: + return f"/copilot/{self.application_name}/{self.env}/conduits/{self._normalise_secret_name(addon_name)}" + + def _normalise_secret_name(self, addon_name: str) -> str: + return addon_name.replace("-", "_").upper() diff --git a/dbt_platform_helper/utils/application.py b/dbt_platform_helper/utils/application.py index 420689df5..fba00bdab 100644 --- a/dbt_platform_helper/utils/application.py +++ b/dbt_platform_helper/utils/application.py @@ -80,7 +80,7 @@ def load_application(app: str = None, default_session: Session = None) -> Applic WithDecryption=False, ) except ssm_client.exceptions.ParameterNotFound: - raise ApplicationNotFoundError + raise ApplicationNotFoundError(app) path = f"/copilot/applications/{application.name}/environments" secrets = get_ssm_secrets(app, None, current_session, path) diff --git a/dbt_platform_helper/utils/aws.py b/dbt_platform_helper/utils/aws.py index ac3239a0e..1faa3b1a9 100644 --- a/dbt_platform_helper/utils/aws.py +++ b/dbt_platform_helper/utils/aws.py @@ -342,6 +342,7 @@ def get_load_balancer_configuration( def get_postgres_connection_data_updated_with_master_secret(session, parameter_name, secret_arn): + # Todo: This is pretty much the same as dbt_platform_helper.providers.secrets.Secrets.get_postgres_connection_data_updated_with_master_secret ssm_client = session.client("ssm") secrets_manager_client = session.client("secretsmanager") response = ssm_client.get_parameter(Name=parameter_name, WithDecryption=True) @@ -501,7 +502,7 @@ def check_codebase_exists(session: Session, application, codebase: str): ssm_client.exceptions.ParameterNotFound, json.JSONDecodeError, ): - raise CopilotCodebaseNotFoundError + raise CopilotCodebaseNotFoundError(codebase) def check_image_exists(session, application, codebase, commit): @@ -515,7 +516,7 @@ def check_image_exists(session, application, codebase, commit): ecr_client.exceptions.RepositoryNotFoundException, ecr_client.exceptions.ImageNotFoundException, ): - raise ImageNotFoundError + raise ImageNotFoundError(commit) def get_build_url_from_arn(build_arn: str) -> str: diff --git a/tests/platform_helper/domain/test_codebase.py b/tests/platform_helper/domain/test_codebase.py index b05f3dcf2..2ea4d9c5f 100644 --- a/tests/platform_helper/domain/test_codebase.py +++ b/tests/platform_helper/domain/test_codebase.py @@ -20,7 +20,6 @@ from dbt_platform_helper.exceptions import ApplicationNotFoundError from dbt_platform_helper.exceptions import CopilotCodebaseNotFoundError from dbt_platform_helper.exceptions import ImageNotFoundError -from dbt_platform_helper.exceptions import NoCopilotCodebasesFoundError from dbt_platform_helper.utils.application import Environment from dbt_platform_helper.utils.git import CommitNotFoundError from tests.platform_helper.conftest import EXPECTED_FILES_DIR @@ -147,22 +146,15 @@ def test_codebase_prepare_does_not_generate_files_in_a_repo_with_a_copilot_direc os.chdir(tmp_path) Path(tmp_path / "copilot").mkdir() - mocks.subprocess.return_value.stderr = mock_suprocess_fixture() + mocks.subprocess.return_value.stdout = mock_suprocess_fixture() - codebase.prepare() - - mocks.echo_fn.assert_has_calls( - [ - call( - "You are in the deploy repository; make sure you are in the application codebase repository.", - ), - ] - ) + with pytest.raises(NotInCodeBaseRepositoryError): + codebase.prepare() def test_codebase_build_does_not_trigger_build_without_an_application(): mocks = CodebaseMocks() - mocks.load_application_fn.side_effect = ApplicationNotFoundError() + mocks.load_application_fn.side_effect = ApplicationNotFoundError("not-an-application") codebase = Codebase(**mocks.params()) with pytest.raises(ApplicationNotFoundError): @@ -186,7 +178,7 @@ def test_codebase_build_commit_not_found(): codebase.build("not-an-application", "application", "ab1c23d") -def test_codebase_prepare_does_not_generate_files_in_a_repo_with_a_copilot_directory(tmp_path): +def test_codebase_prepare_raises_not_in_codebase_exception(tmp_path): mocks = CodebaseMocks() mocks.load_application_fn.side_effect = SystemExit(1) @@ -283,7 +275,9 @@ def test_codebase_deploy_successfully_triggers_a_pipeline_based_deploy(mock_appl def test_codebase_deploy_exception_with_a_nonexistent_codebase(): - mocks = CodebaseMocks(check_codebase_exists_fn=Mock(side_effect=CopilotCodebaseNotFoundError())) + mocks = CodebaseMocks( + check_codebase_exists_fn=Mock(side_effect=CopilotCodebaseNotFoundError("application")) + ) client = mock_aws_client(mocks.get_aws_session_or_abort_fn) @@ -297,7 +291,9 @@ def test_codebase_deploy_exception_with_a_nonexistent_codebase(): def test_check_codebase_exists_returns_error_when_no_json(): - mocks = CodebaseMocks(check_codebase_exists_fn=Mock(side_effect=CopilotCodebaseNotFoundError)) + mocks = CodebaseMocks( + check_codebase_exists_fn=Mock(side_effect=CopilotCodebaseNotFoundError("application")) + ) client = mock_aws_client(mocks.get_aws_session_or_abort_fn) @@ -311,7 +307,9 @@ def test_check_codebase_exists_returns_error_when_no_json(): def test_codebase_deploy_aborts_with_a_nonexistent_image_repository(): - mocks = CodebaseMocks(check_image_exists_fn=Mock(side_effect=ImageNotFoundError)) + mocks = CodebaseMocks( + check_image_exists_fn=Mock(side_effect=ImageNotFoundError("nonexistent-commit-hash")) + ) client = mock_aws_client(mocks.get_aws_session_or_abort_fn) @@ -326,7 +324,9 @@ def test_codebase_deploy_aborts_with_a_nonexistent_image_repository(): def test_codebase_deploy_aborts_with_a_nonexistent_image_tag(): - mocks = CodebaseMocks(check_image_exists_fn=Mock(side_effect=ImageNotFoundError)) + mocks = CodebaseMocks( + check_image_exists_fn=Mock(side_effect=ImageNotFoundError("nonexistent-commit-hash")) + ) client = mock_aws_client(mocks.get_aws_session_or_abort_fn) @@ -374,20 +374,11 @@ def test_codebase_deploy_does_not_trigger_build_without_confirmation(): def test_codebase_deploy_does_not_trigger_build_without_an_application(): mocks = CodebaseMocks() - mocks.load_application_fn.side_effect = ApplicationNotFoundError() + mocks.load_application_fn.side_effect = ApplicationNotFoundError("not-an-application") codebase = Codebase(**mocks.params()) with pytest.raises(ApplicationNotFoundError) as exc: codebase.deploy("not-an-application", "dev", "application", "ab1c23d") - # TODO This assert can probably go now we are catching the errors and outputting them at the command layer - mocks.echo_fn.assert_has_calls( - [ - call( - """The account "foo" does not contain the application "not-an-application"; ensure you have set the environment variable "AWS_PROFILE" correctly.""", - fg="red", - ), - ] - ) def test_codebase_deploy_does_not_trigger_build_with_missing_environment(mock_application): @@ -423,15 +414,15 @@ def test_codebase_deploy_does_not_trigger_deployment_without_confirmation(): def test_codebase_list_does_not_trigger_build_without_an_application(): mocks = CodebaseMocks() - mocks.load_application_fn.side_effect = ApplicationNotFoundError() + mocks.load_application_fn.side_effect = ApplicationNotFoundError("not-an-application") codebase = Codebase(**mocks.params()) with pytest.raises(ApplicationNotFoundError) as exc: codebase.list("not-an-application", True) -def test_codebase_list_raises_exception_when_no_codebases(): - mocks = CodebaseMocks(check_codebase_exists_fn=Mock(side_effect=NoCopilotCodebasesFoundError())) +def test_codebase_list_returns_empty_when_no_codebases(): + mocks = CodebaseMocks(check_codebase_exists_fn=Mock()) client = mock_aws_client(mocks.get_aws_session_or_abort_fn) @@ -439,9 +430,10 @@ def test_codebase_list_raises_exception_when_no_codebases(): "Parameter": {"Value": json.dumps({"name": "application"})}, } - with pytest.raises(NoCopilotCodebasesFoundError): - codebase = Codebase(**mocks.params()) - codebase.list("test-application", True) + codebase = Codebase(**mocks.params()) + codebase.list("test-application", True) + + mocks.echo_fn.assert_has_calls([]) def test_lists_codebases_with_multiple_pages_of_images(): diff --git a/tests/platform_helper/domain/test_conduit.py b/tests/platform_helper/domain/test_conduit.py index 385566bd7..52e67ac14 100644 --- a/tests/platform_helper/domain/test_conduit.py +++ b/tests/platform_helper/domain/test_conduit.py @@ -1,4 +1,3 @@ -from unittest.mock import MagicMock from unittest.mock import Mock from unittest.mock import call @@ -7,6 +6,7 @@ from dbt_platform_helper.domain.conduit import Conduit from dbt_platform_helper.exceptions import AddonNotFoundError from dbt_platform_helper.exceptions import CreateTaskTimeoutError +from dbt_platform_helper.exceptions import ECSAgentNotRunning from dbt_platform_helper.exceptions import InvalidAddonTypeError from dbt_platform_helper.exceptions import NoClusterError from dbt_platform_helper.exceptions import ParameterNotFoundError @@ -18,64 +18,39 @@ addon_name = "important-db" addon_type = "postgres" env = "development" -cluster_name = "arn:aws:ecs:eu-west-2:123456789012:cluster/MyECSCluster1" +cluster_arn = "arn:aws:ecs:eu-west-2:123456789012:cluster/MyECSCluster1" task_name = "task_name" addon_name = "custom-name-rds-postgres" class ConduitMocks: - def __init__(self, app_name="test-application", addon_type="postgres", *args, **kwargs): + def __init__(self, app_name="test-application", *args, **kwargs): session = Mock() sessions = {"000000000": session} dummy_application = Application(app_name) dummy_application.environments = {env: Environment(env, "000000000", sessions)} - self.add_stack_delete_policy_to_task_role_fn = kwargs.get( - "add_stack_delete_policy_to_task_role_fn", Mock() - ) - self.get_ecs_task_arns_fn = kwargs.get("get_ecs_task_arns_fn", Mock(return_value=[])) self.application = dummy_application - self.ecs_exec_is_available_fn = kwargs.get("ecs_exec_is_available_fn", Mock()) + self.secrets_provider = kwargs.get("secrets_provider", Mock()) + self.cloudformation_provider = kwargs.get("cloudformation_provider", Mock()) + self.ecs_provider = kwargs.get("ecs_provider", Mock()) self.connect_to_addon_client_task_fn = kwargs.get("connect_to_addon_client_task_fn", Mock()) self.create_addon_client_task_fn = kwargs.get("create_addon_client_task_fn", Mock()) self.create_postgres_admin_task_fn = kwargs.get("create_postgres_admin_task_fn", Mock()) self.echo_fn = kwargs.get("echo_fn", Mock()) - self.get_addon_type_fn = kwargs.get("get_addon_type_fn", Mock(return_value=addon_type)) - self.get_cluster_arn_fn = kwargs.get( - "get_cluster_arn_fn", - Mock(return_value="arn:aws:ecs:eu-west-2:123456789012:cluster/MyECSCluster1"), - ) - self.get_or_create_task_name_fn = kwargs.get( - "get_or_create_task_name_fn", Mock(return_value="task_name") - ) - self.get_parameter_name_fn = kwargs.get( - "get_parameter_name", Mock(return_value="parameter_name") - ) self.subprocess = kwargs.get("subprocess", Mock(return_value="task_name")) - self.update_conduit_stack_resources_fn = kwargs.get( - "update_conduit_stack_resources_fn", Mock(return_value=f"task-{task_name}") - ) - self.wait_for_cloudformation_to_reach_status_fn = kwargs.get( - "wait_for_cloudformation_to_reach_status_fn", Mock() - ) def params(self): return { - "add_stack_delete_policy_to_task_role_fn": self.add_stack_delete_policy_to_task_role_fn, - "get_ecs_task_arns_fn": self.get_ecs_task_arns_fn, "application": self.application, - "ecs_exec_is_available_fn": self.ecs_exec_is_available_fn, + "secrets_provider": self.secrets_provider, + "cloudformation_provider": self.cloudformation_provider, + "ecs_provider": self.ecs_provider, "connect_to_addon_client_task_fn": self.connect_to_addon_client_task_fn, "create_addon_client_task_fn": self.create_addon_client_task_fn, "create_postgres_admin_task_fn": self.create_postgres_admin_task_fn, "echo_fn": self.echo_fn, - "get_addon_type_fn": self.get_addon_type_fn, - "get_cluster_arn_fn": self.get_cluster_arn_fn, - "get_or_create_task_name_fn": self.get_or_create_task_name_fn, - "get_parameter_name_fn": self.get_parameter_name_fn, "subprocess_fn": self.subprocess, - "update_conduit_stack_resources_fn": self.update_conduit_stack_resources_fn, - "wait_for_cloudformation_to_reach_status_fn": self.wait_for_cloudformation_to_reach_status_fn, } @@ -90,34 +65,39 @@ def params(self): ) def test_conduit(app_name, addon_type, addon_name, access): conduit_mocks = ConduitMocks(app_name, addon_type) + conduit_mocks.cloudformation_provider.update_conduit_stack_resources.return_value = ( + f"task-{task_name}" + ) + conduit_mocks.ecs_provider.get_cluster_arn.return_value = cluster_arn + conduit_mocks.ecs_provider.get_or_create_task_name.return_value = task_name + conduit_mocks.ecs_provider.get_ecs_task_arns.return_value = [] + conduit_mocks.secrets_provider.get_parameter_name.return_value = "parameter_name" + conduit_mocks.secrets_provider.get_addon_type.return_value = addon_type conduit = Conduit(**conduit_mocks.params()) + + # Todo: Should be able to lose these during future refactorings ecs_client = conduit.application.environments[env].session.client("ecs") ssm_client = conduit.application.environments[env].session.client("ssm") - cloudformation_client = conduit.application.environments[env].session.client("cloudformation") iam_client = conduit.application.environments[env].session.client("iam") secretsmanager_client = conduit.application.environments[env].session.client("secretsmanager") conduit.start(env, addon_name, access) - conduit.get_ecs_task_arns_fn.assert_has_calls( - [call(ecs_client, cluster_name, task_name), call(ecs_client, cluster_name, task_name)] + conduit.ecs_provider.get_ecs_task_arns.assert_has_calls( + [call(cluster_arn, task_name), call(cluster_arn, task_name)] ) conduit.connect_to_addon_client_task_fn.assert_called_once_with( - ecs_client, conduit.subprocess_fn, app_name, env, cluster_name, task_name + ecs_client, conduit.subprocess_fn, app_name, env, cluster_arn, task_name ) - conduit.get_addon_type_fn.assert_called_once_with(ssm_client, app_name, env, addon_name) - conduit.get_cluster_arn_fn.assert_called_once_with(ecs_client, app_name, env) - conduit.get_or_create_task_name_fn.assert_called_once_with( - ssm_client, app_name, env, addon_name, "parameter_name" + conduit.secrets_provider.get_addon_type.assert_called_once_with(addon_name) + conduit.ecs_provider.get_cluster_arn.assert_called_once() + conduit.ecs_provider.get_or_create_task_name.assert_called_once_with( + addon_name, "parameter_name" ) - - conduit.add_stack_delete_policy_to_task_role_fn.assert_called_once_with( - cloudformation_client, iam_client, task_name + conduit.cloudformation_provider.add_stack_delete_policy_to_task_role.assert_called_once_with( + task_name ) - conduit.update_conduit_stack_resources_fn.assert_called_once_with( - cloudformation_client, - iam_client, - ssm_client, + conduit.cloudformation_provider.update_conduit_stack_resources.assert_called_once_with( app_name, env, addon_type, @@ -126,8 +106,8 @@ def test_conduit(app_name, addon_type, addon_name, access): "parameter_name", access, ) - conduit.wait_for_cloudformation_to_reach_status_fn.assert_called_once_with( - cloudformation_client, "stack_update_complete", f"task-{task_name}" + conduit.cloudformation_provider.wait_for_cloudformation_to_reach_status.assert_called_once_with( + "stack_update_complete", f"task-{task_name}" ) conduit.create_addon_client_task_fn.assert_called_once_with( iam_client, @@ -141,7 +121,6 @@ def test_conduit(app_name, addon_type, addon_name, access): task_name, access, ) - conduit_mocks.echo_fn.assert_has_calls( [ call("Creating conduit task"), @@ -154,30 +133,31 @@ def test_conduit(app_name, addon_type, addon_name, access): def test_conduit_with_task_already_running(): - conduit_mocks = ConduitMocks( - app_name, - addon_type, - get_ecs_task_arns_fn=MagicMock( - return_value=["arn:aws:ecs:eu-west-2:12345678:task/does-not-matter/1234qwer"] - ), - ) + conduit_mocks = ConduitMocks(app_name, addon_type) + conduit_mocks.ecs_provider.get_cluster_arn.return_value = cluster_arn + conduit_mocks.ecs_provider.get_or_create_task_name.return_value = task_name + conduit_mocks.ecs_provider.get_ecs_task_arns.return_value = [ + "arn:aws:ecs:eu-west-2:12345678:task/does-not-matter/1234qwer" + ] + conduit_mocks.secrets_provider.get_parameter_name.return_value = "parameter_name" + conduit_mocks.secrets_provider.get_addon_type.return_value = "postgres" conduit = Conduit(**conduit_mocks.params()) + # Todo: This client can go during further refactoring ecs_client = conduit.application.environments[env].session.client("ecs") - ssm_client = conduit.application.environments[env].session.client("ssm") conduit.start(env, addon_name, "read") - conduit.get_ecs_task_arns_fn.assert_called_once_with(ecs_client, cluster_name, task_name) + conduit.ecs_provider.get_ecs_task_arns.assert_called_once_with(cluster_arn, task_name) conduit.connect_to_addon_client_task_fn.assert_called_once_with( - ecs_client, conduit.subprocess_fn, app_name, env, cluster_name, task_name + ecs_client, conduit.subprocess_fn, app_name, env, cluster_arn, task_name ) - conduit.get_addon_type_fn.assert_called_once_with(ssm_client, app_name, env, addon_name) - conduit.get_cluster_arn_fn.assert_called_once_with(ecs_client, app_name, env) - conduit.get_or_create_task_name_fn.assert_called_once_with( - ssm_client, app_name, env, addon_name, "parameter_name" + conduit.secrets_provider.get_addon_type.assert_called_once_with(addon_name) + conduit.ecs_provider.get_cluster_arn.assert_called_once() + conduit.ecs_provider.get_or_create_task_name.assert_called_once_with( + addon_name, "parameter_name" ) - conduit.add_stack_delete_policy_to_task_role_fn.assert_not_called() - conduit.update_conduit_stack_resources_fn.assert_not_called() + conduit.cloudformation_provider.add_stack_delete_policy_to_task_role.assert_not_called() + conduit.cloudformation_provider.update_conduit_stack_resources.assert_not_called() conduit.create_addon_client_task_fn.assert_not_called() conduit_mocks.echo_fn.assert_has_calls( @@ -191,38 +171,38 @@ def test_conduit_with_task_already_running(): def test_conduit_domain_when_no_cluster_exists(): - conduit_mocks = ConduitMocks( - app_name, addon_type, get_cluster_arn_fn=Mock(side_effect=NoClusterError()) + conduit_mocks = ConduitMocks(app_name, addon_type) + conduit_mocks.ecs_provider.get_cluster_arn.side_effect = NoClusterError( + application_name=app_name, + environment=env, ) conduit = Conduit(**conduit_mocks.params()) - ecs_client = conduit.application.environments[env].session.client("ecs") - ssm_client = conduit.application.environments[env].session.client("ssm") with pytest.raises(NoClusterError): conduit.start(env, addon_name) - conduit.get_addon_type_fn.assert_called_once_with(ssm_client, app_name, env, addon_name) - conduit.get_cluster_arn_fn.assert_called_once_with(ecs_client, app_name, env) + conduit.secrets_provider.get_addon_type.assert_called_once_with(addon_name) + conduit.ecs_provider.get_cluster_arn.assert_called_once() def test_conduit_domain_when_no_connection_secret_exists(): conduit_mocks = ConduitMocks( app_name, addon_type, - get_ecs_task_arns_fn=Mock(return_value=False), - create_addon_client_task_fn=Mock(side_effect=SecretNotFoundError()), ) - + conduit_mocks.ecs_provider.get_ecs_task_arns.return_value = [] + conduit_mocks.secrets_provider.get_parameter_name.return_value = "parameter_name" + conduit_mocks.create_addon_client_task_fn.side_effect = SecretNotFoundError( + f"/copilot/{app_name}/{env}/secrets/{addon_name}" + ) conduit = Conduit(**conduit_mocks.params()) - ecs_client = conduit.application.environments[env].session.client("ecs") - ssm_client = conduit.application.environments[env].session.client("ssm") with pytest.raises(SecretNotFoundError): conduit.start(env, addon_name) - conduit.get_addon_type_fn.assert_called_once_with(ssm_client, app_name, env, addon_name) - conduit.get_cluster_arn_fn.assert_called_once_with(ecs_client, app_name, env) - conduit.get_cluster_arn_fn.assert_called_once_with(ecs_client, app_name, env) - conduit.get_or_create_task_name_fn.assert_called_once_with( - ssm_client, app_name, env, addon_name, "parameter_name" + + conduit.secrets_provider.get_addon_type.assert_called_once_with(addon_name) + conduit.ecs_provider.get_cluster_arn.assert_called_once() + conduit.ecs_provider.get_or_create_task_name.assert_called_once_with( + addon_name, "parameter_name" ) @@ -230,68 +210,95 @@ def test_conduit_domain_when_client_task_fails_to_start(): conduit_mocks = ConduitMocks( app_name, addon_type, - connect_to_addon_client_task_fn=Mock(side_effect=CreateTaskTimeoutError()), ) + conduit_mocks.connect_to_addon_client_task_fn.side_effect = ( + CreateTaskTimeoutError( + addon_name=addon_name, + application_name=app_name, + environment=env, + ), + ) + conduit = Conduit(**conduit_mocks.params()) - ecs_client = conduit.application.environments[env].session.client("ecs") - ssm_client = conduit.application.environments[env].session.client("ssm") with pytest.raises(CreateTaskTimeoutError): conduit.start(env, addon_name) - conduit.get_ecs_task_arns_fn.assert_called_once_with(ecs_client, cluster_name, task_name) + conduit.ecs_provider.get_ecs_task_arns.assert_called_once_with(cluster_arn, task_name) conduit.connect_to_addon_client_task_fn.assert_called_once_with( - ecs_client, conduit.subprocess_fn, app_name, env, cluster_name, task_name + conduit.subprocess_fn, app_name, env, cluster_arn, task_name ) - conduit.get_addon_type_fn.assert_called_once_with(ssm_client, app_name, env, addon_name) - conduit.get_cluster_arn_fn.assert_called_once_with(ecs_client, app_name, env) - conduit.get_or_create_task_name_fn.assert_called_once_with( - ssm_client, app_name, env, addon_name, "parameter_name" + conduit.secrets_provider.get_addon_type.assert_called_once_with(app_name, env, addon_name) + conduit.ecs_provider.get_cluster_arn.assert_called_once() + conduit.ecs_provider.get_or_create_task_name.assert_called_once_with( + addon_name, "parameter_name" ) conduit.create_addon_client_task_fn.assert_not_called() - conduit.add_stack_delete_policy_to_task_role_fn.assert_not_called() - conduit.update_conduit_stack_resources_fn.assert_not_called() + conduit.cloudformation_provider.add_stack_delete_policy_to_task_role.assert_not_called() + conduit.cloudformation_provider.update_conduit_stack_resources.assert_not_called() def test_conduit_domain_when_addon_type_is_invalid(): addon_name = "invalid_addon" addon_type = "invalid_addon_type" - conduit_mocks = ConduitMocks( - app_name, - addon_type, - get_addon_type_fn=Mock(side_effect=InvalidAddonTypeError(addon_type=addon_type)), - ) + conduit_mocks = ConduitMocks(app_name, addon_type) + + conduit_mocks.secrets_provider.get_addon_type.side_effect = InvalidAddonTypeError(addon_type) conduit = Conduit(**conduit_mocks.params()) - ecs_client = conduit.application.environments[env].session.client("ecs") + conduit.application.environments[env].session.client("ecs") with pytest.raises(InvalidAddonTypeError): conduit.start(env, addon_name) - conduit.get_ecs_task_arns_fn.assert_called_once_with(ecs_client, cluster_name, task_name) + conduit.ecs_provider.get_ecs_task_arns.assert_called_once_with(cluster_arn, task_name) -def test_conduit_domain_when_addon_does_not_exist(): +def test_start_with_addon_does_not_exist_raises_error(): addon_name = "addon_doesnt_exist" - conduit_mocks = ConduitMocks( - app_name, addon_type, get_addon_type_fn=Mock(side_effect=AddonNotFoundError()) - ) + conduit_mocks = ConduitMocks(app_name, addon_type) + conduit_mocks.secrets_provider.get_addon_type.side_effect = AddonNotFoundError(addon_name) conduit = Conduit(**conduit_mocks.params()) - ecs_client = conduit.application.environments[env].session.client("ecs") with pytest.raises(AddonNotFoundError): conduit.start(env, addon_name) - conduit.get_ecs_task_arns_fn.assert_called_once_with(ecs_client, cluster_name, task_name) def test_conduit_domain_when_no_addon_config_parameter_exists(): addon_name = "parameter_doesnt_exist" - conduit_mocks = ConduitMocks( - app_name, addon_type, get_addon_type_fn=Mock(side_effect=ParameterNotFoundError()) + conduit_mocks = ConduitMocks(app_name, addon_type) + + conduit_mocks.secrets_provider.get_addon_type.side_effect = ParameterNotFoundError( + application_name=app_name, + environment=env, ) conduit = Conduit(**conduit_mocks.params()) - ecs_client = conduit.application.environments[env].session.client("ecs") + conduit.application.environments[env].session.client("ecs") with pytest.raises(ParameterNotFoundError): conduit.start(env, addon_name) - conduit.get_ecs_task_arns_fn.assert_called_once_with(ecs_client, cluster_name, task_name) + conduit.ecs_provider.get_ecs_task_arns.assert_called_once_with(cluster_arn, task_name) + + +def test_conduit_domain_ecs_exec_agent_does_not_start(): + conduit_mocks = ConduitMocks( + app_name, + addon_type, + ) + + conduit_mocks.ecs_provider.get_ecs_task_arns.return_value = [ + "arn:aws:ecs:eu-west-2:123456789012:task/MyTaskARN" + ] + conduit_mocks.ecs_provider.ecs_exec_is_available.side_effect = ECSAgentNotRunning() + conduit_mocks.ecs_provider.get_cluster_arn.return_value = cluster_arn + + conduit = Conduit(**conduit_mocks.params()) + conduit.application.environments[env].session.client("ecs") + + with pytest.raises(ECSAgentNotRunning): + conduit.start(env, addon_name) + + conduit.ecs_provider.ecs_exec_is_available.assert_called_once_with( + cluster_arn, + ["arn:aws:ecs:eu-west-2:123456789012:task/MyTaskARN"], + ) diff --git a/tests/platform_helper/domain/test_database_copy.py b/tests/platform_helper/domain/test_database_copy.py index 4bb2b6853..fd6bf3c11 100644 --- a/tests/platform_helper/domain/test_database_copy.py +++ b/tests/platform_helper/domain/test_database_copy.py @@ -296,7 +296,7 @@ def test_database_dump_handles_account_id_errors(is_dump): def test_database_copy_initialization_handles_app_name_errors(): mocks = DataCopyMocks() - mocks.load_application_fn = Mock(side_effect=ApplicationNotFoundError()) + mocks.load_application_fn = Mock(side_effect=ApplicationNotFoundError("bad-app")) with pytest.raises(SystemExit) as exc: DatabaseCopy("bad-app", "test-db", **mocks.params()) diff --git a/tests/platform_helper/providers/test_cloudformation.py b/tests/platform_helper/providers/test_cloudformation.py index 6d44869f9..0dab33905 100644 --- a/tests/platform_helper/providers/test_cloudformation.py +++ b/tests/platform_helper/providers/test_cloudformation.py @@ -4,16 +4,12 @@ import boto3 import pytest +from botocore.exceptions import WaiterError from cfn_tools import load_yaml from moto import mock_aws -from dbt_platform_helper.providers.cloudformation import ( - add_stack_delete_policy_to_task_role, -) -from dbt_platform_helper.providers.cloudformation import update_conduit_stack_resources -from dbt_platform_helper.providers.cloudformation import ( - wait_for_cloudformation_to_reach_status, -) +from dbt_platform_helper.exceptions import CloudFormationException +from dbt_platform_helper.providers.cloudformation import CloudFormation from tests.platform_helper.conftest import mock_parameter_name from tests.platform_helper.conftest import mock_task_name @@ -60,21 +56,17 @@ def test_update_conduit_stack_resources( iam_client = mock_application.environments[env].session.client("iam") ssm_client = mock_application.environments[env].session.client("ssm") - update_conduit_stack_resources( - cloudformation_client, - iam_client, - ssm_client, - mock_application.name, - env, - addon_type, - addon_name, - task_name, - parameter_name, - "read", + cloudformation = CloudFormation(cloudformation_client, iam_client, ssm_client) + + cloudformation.update_conduit_stack_resources( + mock_application.name, env, addon_type, addon_name, task_name, parameter_name, "read" ) template = boto3.client("cloudformation").get_template(StackName=f"task-{task_name}") + stack = boto3.client("cloudformation").describe_stacks(StackName=f"task-{task_name}") template_yml = load_yaml(template["TemplateBody"]) + + assert stack["Stacks"][0]["Parameters"][0]["ParameterValue"] == "does-not-matter" assert template_yml["Resources"]["LogGroup"]["DeletionPolicy"] == "Retain" assert template_yml["Resources"]["TaskNameParameter"]["Properties"]["Name"] == parameter_name assert ( @@ -97,7 +89,7 @@ def test_update_conduit_stack_resources( ) @patch("time.sleep", return_value=None) def test_add_stack_delete_policy_to_task_role(sleep, mock_stack, addon_name, mock_application): - """Test that, given app, env and addon name + """Test that, given app, env and addon name, add_stack_delete_policy_to_task_role adds a policy to the IAM role in a CloudFormation stack.""" @@ -118,7 +110,9 @@ def test_add_stack_delete_policy_to_task_role(sleep, mock_stack, addon_name, moc ], } - add_stack_delete_policy_to_task_role(cloudformation_client, iam_client, task_name) + cloudformation = CloudFormation(cloudformation_client, iam_client, None) + + cloudformation.add_stack_delete_policy_to_task_role(task_name) stack_resources = boto3.client("cloudformation").list_stack_resources(StackName=stack_name)[ "StackResourceSummaries" @@ -138,17 +132,41 @@ def test_add_stack_delete_policy_to_task_role(sleep, mock_stack, addon_name, moc assert policy_document == mock_policy -def test_wait_for_cloudformation_to_reach_status(): - +@mock_aws +def test_wait_for_cloudformation_with_no_success_raises_exception(): cloudformation_client = Mock() - mock_return = Mock() - mock_waiter = Mock(return_value=mock_return) - cloudformation_client.get_waiter = mock_waiter + waiter_mock = Mock() + cloudformation_client.get_waiter = Mock(return_value=waiter_mock) - wait_for_cloudformation_to_reach_status( - cloudformation_client, "stack_update_complete", "task-stack-name" + waiter_error = WaiterError( + "Waiter StackUpdatecomplete failed", + "Fail!!", + {"Stacks": [{"StackStatus": "ROLLBACK_IN_PROGRESS"}]}, ) - mock_waiter.assert_called() - mock_return.wait.assert_called_with( - StackName="task-stack-name", WaiterConfig={"Delay": 5, "MaxAttempts": 20} + waiter_mock.wait.side_effect = waiter_error + + cloudformation = CloudFormation(cloudformation_client, None, None) + + with pytest.raises( + CloudFormationException, + match="The CloudFormation stack 'stack-name' is not in a good state: ROLLBACK_IN_PROGRESS", + ): + cloudformation.wait_for_cloudformation_to_reach_status( + "stack_update_complete", "stack-name" + ) + + +@mock_aws +def test_wait_for_cloudformation_with_update_complete(): + cloudformation_client = Mock() + waiter_mock = Mock() + cloudformation_client.get_waiter = Mock(return_value=waiter_mock) + waiter_mock.wait.return_value = None + + cloudformation = CloudFormation(cloudformation_client, None, None) + + cloudformation.wait_for_cloudformation_to_reach_status("stack_update_complete", "stack-name") + + waiter_mock.wait.assert_called_with( + StackName="stack-name", WaiterConfig={"Delay": 5, "MaxAttempts": 20} ) diff --git a/tests/platform_helper/providers/test_copilot.py b/tests/platform_helper/providers/test_copilot.py index 671b45f14..9b9f27d4d 100644 --- a/tests/platform_helper/providers/test_copilot.py +++ b/tests/platform_helper/providers/test_copilot.py @@ -1,4 +1,3 @@ -import json from unittest.mock import Mock from unittest.mock import patch @@ -7,53 +6,29 @@ from botocore.exceptions import ClientError from moto import mock_aws -from dbt_platform_helper.exceptions import AddonNotFoundError -from dbt_platform_helper.exceptions import AddonTypeMissingFromConfigError -from dbt_platform_helper.exceptions import InvalidAddonTypeError -from dbt_platform_helper.exceptions import ParameterNotFoundError +from dbt_platform_helper.exceptions import SecretNotFoundError from dbt_platform_helper.providers.copilot import CreateTaskTimeoutError from dbt_platform_helper.providers.copilot import connect_to_addon_client_task from dbt_platform_helper.providers.copilot import create_addon_client_task from dbt_platform_helper.providers.copilot import create_postgres_admin_task -from dbt_platform_helper.providers.secrets import SecretNotFoundError -from dbt_platform_helper.providers.secrets import ( - _normalise_secret_name as normalise_secret_name, -) -from dbt_platform_helper.providers.secrets import get_addon_type -from dbt_platform_helper.providers.secrets import get_parameter_name from tests.platform_helper.conftest import NoSuchEntityException -from tests.platform_helper.conftest import add_addon_config_parameter from tests.platform_helper.conftest import expected_connection_secret_name -from tests.platform_helper.conftest import mock_parameter_name from tests.platform_helper.conftest import mock_task_name env = "development" -@pytest.mark.parametrize( - "test_string", - [ - ("app-rds-postgres", "APP_RDS_POSTGRES"), - ("APP-POSTGRES", "APP_POSTGRES"), - ("APP-OpenSearch", "APP_OPENSEARCH"), - ], -) -def test_normalise_secret_name(test_string): - """Test that given an addon name, normalise_secret_name produces the - expected result.""" - - assert normalise_secret_name(test_string[0]) == test_string[1] - - @mock_aws @patch( # Nested function within provider function - "dbt_platform_helper.providers.copilot.get_postgres_connection_data_updated_with_master_secret", + "dbt_platform_helper.providers.secrets.Secrets.get_postgres_connection_data_updated_with_master_secret", return_value="connection string", ) def test_create_postgres_admin_task(mock_update_parameter, mock_application): addon_name = "dummy-postgres" - master_secret_name = f"/copilot/{mock_application.name}/{env}/secrets/{normalise_secret_name(addon_name)}_RDS_MASTER_ARN" + master_secret_name = ( + f"/copilot/{mock_application.name}/{env}/secrets/DUMMY_POSTGRES_RDS_MASTER_ARN" + ) ssm_client = mock_application.environments[env].session.client("ssm") secrets_manager_client = mock_application.environments[env].session.client("secretsmanager") @@ -75,8 +50,6 @@ def test_create_postgres_admin_task(mock_update_parameter, mock_application): ) mock_update_parameter.assert_called_once_with( - ssm_client, - secrets_manager_client, "POSTGRES_SECRET_NAME_READ_ONLY_USER", "master-secret-arn", ) @@ -107,7 +80,10 @@ def test_create_postgres_admin_task(mock_update_parameter, mock_application): ("opensearch", "custom-name-opensearch"), ], ) -@patch("dbt_platform_helper.providers.copilot.get_connection_secret_arn", return_value="test-arn") +@patch( + "dbt_platform_helper.providers.secrets.Secrets.get_connection_secret_arn", + return_value="test-arn", +) def test_create_redis_or_opensearch_addon_client_task( get_connection_secret_arn, access, @@ -141,10 +117,6 @@ def test_create_redis_or_opensearch_addon_client_task( access, ) - secret_name = expected_connection_secret_name(mock_application, addon_type, addon_name, access) - get_connection_secret_arn.assert_called_once_with( - ssm_client, secretsmanager_client, secret_name - ) mock_subprocess.call.assert_called() mock_subprocess.call.assert_called_once_with( f"copilot task run --app test-application --env {env} " @@ -165,7 +137,10 @@ def test_create_redis_or_opensearch_addon_client_task( "write", ], ) -@patch("dbt_platform_helper.providers.copilot.get_connection_secret_arn", return_value="test-arn") +@patch( + "dbt_platform_helper.providers.secrets.Secrets.get_connection_secret_arn", + return_value="test-arn", +) def test_create_postgres_addon_client_task( get_connection_secret_arn, access, @@ -196,9 +171,7 @@ def test_create_postgres_addon_client_task( access, ) secret_name = expected_connection_secret_name(mock_application, addon_type, addon_name, access) - get_connection_secret_arn.assert_called_once_with( - ssm_client, secretsmanager_client, secret_name - ) + get_connection_secret_arn.assert_called_once_with(secret_name) mock_subprocess.call.assert_called() mock_subprocess.call.assert_called_once_with( f"copilot task run --app test-application --env {env} " @@ -252,7 +225,10 @@ def test_create_postgres_addon_client_task_admin( ) -@patch("dbt_platform_helper.providers.copilot.get_connection_secret_arn", return_value="test-arn") +@patch( + "dbt_platform_helper.providers.secrets.Secrets.get_connection_secret_arn", + return_value="test-arn", +) def test_create_addon_client_task_does_not_add_execution_role_if_role_not_found( get_connection_secret_arn, mock_application, @@ -289,9 +265,7 @@ def test_create_addon_client_task_does_not_add_execution_role_if_role_not_found( ) secret_name = expected_connection_secret_name(mock_application, addon_type, addon_name, access) - get_connection_secret_arn.assert_called_once_with( - ssm_client, secretsmanager_client, secret_name - ) + get_connection_secret_arn.assert_called_once_with(secret_name) mock_subprocess.call.assert_called_once_with( f"copilot task run --app test-application --env {env} " @@ -304,11 +278,9 @@ def test_create_addon_client_task_does_not_add_execution_role_if_role_not_found( ) -@patch("dbt_platform_helper.providers.copilot.get_connection_secret_arn", return_value="test-arn") @patch("click.secho") def test_create_addon_client_task_abort_with_message_on_other_exceptions( mock_secho, - get_connection_secret_arn, mock_application, ): """Test that if an unexpected ClientError is throw when trying to get the @@ -353,7 +325,7 @@ def test_create_addon_client_task_abort_with_message_on_other_exceptions( ) -@patch("dbt_platform_helper.providers.copilot.get_connection_secret_arn") +@patch("dbt_platform_helper.providers.secrets.Secrets.get_connection_secret_arn") def test_create_addon_client_task_when_no_secret_found(get_connection_secret_arn): """Test that, given app, environment and secret name strings, create_addon_client_task raises a NoConnectionSecretError and does not call @@ -367,7 +339,9 @@ def test_create_addon_client_task_when_no_secret_found(get_connection_secret_arn ssm_client = mock_application.environments[env].session.client("ssm") secretsmanager_client = mock_application.environments[env].session.client("secretsmanager") - get_connection_secret_arn.side_effect = SecretNotFoundError + get_connection_secret_arn.side_effect = SecretNotFoundError( + "/copilot/test-application/development/secrets/named-postgres" + ) with pytest.raises(SecretNotFoundError): create_addon_client_task( @@ -386,35 +360,6 @@ def test_create_addon_client_task_when_no_secret_found(get_connection_secret_arn mock_subprocess.call.assert_not_called() -@mock_aws -@pytest.mark.parametrize( - "access", - [ - "read", - "write", - "admin", - ], -) -@pytest.mark.parametrize( - "addon_type, addon_name", - [ - ("postgres", "custom-name-postgres"), - ("postgres", "custom-name-rds-postgres"), - ("redis", "custom-name-redis"), - ("opensearch", "custom-name-opensearch"), - ("s3", "custon-name-s3"), - ], -) -def test_get_parameter_name(access, addon_type, addon_name, mock_application): - """Test that get_parameter_name builds the correct parameter name given the - addon_name, addon_type and permission.""" - - parameter_name = get_parameter_name( - mock_application.name, "development", addon_type, addon_name, access - ) - assert parameter_name == mock_parameter_name(mock_application, addon_type, addon_name, access) - - @pytest.mark.parametrize( "addon_type", ["postgres", "redis", "opensearch"], @@ -453,22 +398,6 @@ def test_connect_to_addon_client_task(addon_type, mock_application): ) -# Todo: Implement a test to cover the desired behaviour -# @patch("dbt_platform_helper.providers.copilot.addon_client_is_running", return_value=True) -# def test_connect_to_addon_client_task_waits_for_command_agent(addon_client_is_running, mock_application): -# task_name = mock_task_name("postgres") # Addon type for this test does not matter -# ecs_client = mock_application.environments[env].session.client("ecs") -# mock_subprocess = Mock() -# # We want this to throw InvalidParameterException the first time, then behave as normal -# -# connect_to_addon_client_task( -# ecs_client, mock_subprocess, mock_application.name, env, "test-arn", task_name -# ) -# -# # Assert "Unable to connect, execute command agent probably isn’t running yet" in output -# # If it doesn't bomb out with CreateTaskTimeoutError all is good - - @pytest.mark.parametrize( "addon_type", ["postgres", "redis", "opensearch"], @@ -485,7 +414,7 @@ def test_connect_to_addon_client_task_with_timeout_reached_throws_exception( task_name = mock_task_name(addon_type) ecs_client = mock_application.environments[env].session.client("ecs") mock_subprocess = Mock() - addon_client_is_running = Mock(return_value=False) + get_ecs_task_arns = Mock(return_value=[]) with pytest.raises(CreateTaskTimeoutError): connect_to_addon_client_task( @@ -495,95 +424,9 @@ def test_connect_to_addon_client_task_with_timeout_reached_throws_exception( env, "test-arn", task_name, - addon_client_is_running_fn=addon_client_is_running, + get_ecs_task_arns_fn=get_ecs_task_arns, ) - addon_client_is_running.assert_called_with(ecs_client, "test-arn", task_name) - assert addon_client_is_running.call_count == 15 + get_ecs_task_arns.assert_called_with(ecs_client, "test-arn", task_name) + assert get_ecs_task_arns.call_count == 15 mock_subprocess.call.assert_not_called() - - -@mock_aws -@pytest.mark.parametrize( - "addon_name, expected_type", - [ - ("custom-name-postgres", "postgres"), - ("custom-name-redis", "redis"), - ("custom-name-opensearch", "opensearch"), - ], -) -def test_get_addon_type(addon_name, expected_type, mock_application): - """Test that get_addon_type returns the expected addon type.""" - - ssm_client = mock_application.environments[env].session.client("ssm") - - add_addon_config_parameter() - addon_type = get_addon_type(ssm_client, mock_application.name, env, addon_name) - - assert addon_type == expected_type - - -@mock_aws -def test_get_addon_type_with_not_found_throws_exception(mock_application): - """Test that get_addon_type raises the expected error when the addon is not - found in the config file.""" - - add_addon_config_parameter({"different-name": {"type": "redis"}}) - ssm_client = mock_application.environments[env].session.client("ssm") - - with pytest.raises(AddonNotFoundError): - get_addon_type(ssm_client, mock_application.name, env, "custom-name-postgres") - - -@mock_aws -def test_get_addon_type_with_parameter_not_found_throws_exception(mock_application): - """Test that get_addon_type raises the expected error when the addon config - parameter is not found.""" - - ssm_client = mock_application.environments[env].session.client("ssm") - - mock_ssm = boto3.client("ssm") - mock_ssm.put_parameter( - Name=f"/copilot/applications/test-application/environments/development/invalid-parameter", - Type="String", - Value=json.dumps({"custom-name-postgres": {"type": "postgres"}}), - ) - - with pytest.raises(ParameterNotFoundError): - get_addon_type(ssm_client, mock_application.name, env, "custom-name-postgres") - - -@mock_aws -def test_get_addon_type_with_invalid_type_throws_exception(mock_application): - """Test that get_addon_type raises the expected error when the config - contains an invalid addon type.""" - - add_addon_config_parameter(param_value={"invalid-extension": {"type": "invalid"}}) - ssm_client = mock_application.environments[env].session.client("ssm") - - with pytest.raises(InvalidAddonTypeError): - get_addon_type(ssm_client, mock_application.name, env, "invalid-extension") - - -@mock_aws -def test_get_addon_type_with_blank_type_throws_exception(mock_application): - """Test that get_addon_type raises the expected error when the config - contains an empty addon type.""" - - add_addon_config_parameter(param_value={"blank-extension": {}}) - ssm_client = mock_application.environments[env].session.client("ssm") - - with pytest.raises(AddonTypeMissingFromConfigError): - get_addon_type(ssm_client, mock_application.name, env, "blank-extension") - - -@mock_aws -def test_get_addon_type_with_unspecified_type_throws_exception(mock_application): - """Test that get_addon_type raises the expected error when the config - contains an empty addon type.""" - - add_addon_config_parameter(param_value={"addon-type-unspecified": {"type": None}}) - ssm_client = mock_application.environments[env].session.client("ssm") - - with pytest.raises(AddonTypeMissingFromConfigError): - get_addon_type(ssm_client, mock_application.name, env, "addon-type-unspecified") diff --git a/tests/platform_helper/providers/test_ecs.py b/tests/platform_helper/providers/test_ecs.py index 2d7c96a55..1bb15b022 100644 --- a/tests/platform_helper/providers/test_ecs.py +++ b/tests/platform_helper/providers/test_ecs.py @@ -6,55 +6,66 @@ from dbt_platform_helper.exceptions import ECSAgentNotRunning from dbt_platform_helper.exceptions import NoClusterError -from dbt_platform_helper.providers.ecs import ecs_exec_is_available -from dbt_platform_helper.providers.ecs import get_cluster_arn -from dbt_platform_helper.providers.ecs import get_ecs_task_arns -from dbt_platform_helper.providers.ecs import get_or_create_task_name +from dbt_platform_helper.providers.ecs import ECS from tests.platform_helper.conftest import mock_parameter_name from tests.platform_helper.conftest import mock_task_name @mock_aws def test_get_cluster_arn(mocked_cluster, mock_application): - assert ( - get_cluster_arn( - mock_application.environments["development"].session.client("ecs"), - mock_application.name, - "development", - ) - == mocked_cluster["cluster"]["clusterArn"] - ) + ecs_client = mock_application.environments["development"].session.client("ecs") + ssm_client = mock_application.environments["development"].session.client("ssm") + application_name = mock_application.name + env = "development" + ecs_manager = ECS(ecs_client, ssm_client, application_name, env) + + cluster_arn = ecs_manager.get_cluster_arn() + + assert cluster_arn == mocked_cluster["cluster"]["clusterArn"] @mock_aws def test_get_cluster_arn_with_no_cluster_raises_error(mock_application): + ecs_client = mock_application.environments["development"].session.client("ecs") + ssm_client = mock_application.environments["development"].session.client("ssm") + application_name = mock_application.name + env = "does-not-exist" + + ecs_manager = ECS(ecs_client, ssm_client, application_name, env) + with pytest.raises(NoClusterError): - get_cluster_arn( - mock_application.environments["development"].session.client("ecs"), - mock_application.name, - "does-not-exist", - ) + ecs_manager.get_cluster_arn() +@mock_aws def test_get_ecs_task_arns_with_running_task( mock_cluster_client_task, mocked_cluster, mock_application ): - addon_type = "redis" mock_cluster_client_task(addon_type) mocked_cluster_arn = mocked_cluster["cluster"]["clusterArn"] ecs_client = mock_application.environments["development"].session.client("ecs") - - assert get_ecs_task_arns(ecs_client, mocked_cluster_arn, mock_task_name(addon_type)) + ecs_manager = ECS( + ecs_client, + mock_application.environments["development"].session.client("ssm"), + mock_application.name, + "development", + ) + assert ecs_manager.get_ecs_task_arns(mocked_cluster_arn, mock_task_name(addon_type)) +@mock_aws def test_get_ecs_task_arns_with_no_running_task(mocked_cluster, mock_application): - addon_type = "opensearch" mocked_cluster_arn = mocked_cluster["cluster"]["clusterArn"] ecs_client = mock_application.environments["development"].session.client("ecs") - - assert len(get_ecs_task_arns(ecs_client, mocked_cluster_arn, mock_task_name(addon_type))) is 0 + ecs_manager = ECS( + ecs_client, + mock_application.environments["development"].session.client("ssm"), + mock_application.name, + "development", + ) + assert len(ecs_manager.get_ecs_task_arns(mocked_cluster_arn, mock_task_name(addon_type))) == 0 @mock_aws @@ -65,8 +76,6 @@ def test_get_ecs_task_arns_does_not_return_arns_from_other_tasks(mock_applicatio ec2 = boto3.resource("ec2") vpc = ec2.create_vpc(CidrBlock="10.0.0.0/16") subnet = ec2.create_subnet(VpcId=vpc.id, CidrBlock="10.0.0.0/18") - - # create unrelated task mocked_task_definition_arn = ecs_client.register_task_definition( family=f"other-task", requiresCompatibilities=["FARGATE"], @@ -91,43 +100,51 @@ def test_get_ecs_task_arns_does_not_return_arns_from_other_tasks(mock_applicatio } }, ) - - assert len(get_ecs_task_arns(ecs_client, cluster_arn, task_name)) is 0 + ecs_manager = ECS( + ecs_client, + mock_application.environments["development"].session.client("ssm"), + mock_application.name, + "development", + ) + assert len(ecs_manager.get_ecs_task_arns(cluster_arn, task_name)) == 0 +@mock_aws def test_ecs_exec_is_available(mock_cluster_client_task, mocked_cluster, mock_application): - - # use mock ecs_client as describe_tasks is overriden mocked_ecs_client = mock_cluster_client_task("postgres") mocked_cluster_arn = mocked_cluster["cluster"]["clusterArn"] - - ecs_exec_is_available( + ecs_manager = ECS( mocked_ecs_client, - mocked_cluster_arn, - ["arn:aws:ecs:eu-west-2:12345678:task/does-not-matter/1234qwer"], + mock_application.environments["development"].session.client("ssm"), + mock_application.name, + "development", + ) + ecs_manager.ecs_exec_is_available( + mocked_cluster_arn, ["arn:aws:ecs:eu-west-2:12345678:task/does-not-matter/1234qwer"] ) @patch("time.sleep", return_value=None) -def test_test_ecs_exec_is_available_with_exec_not_running_raises_exception( +@mock_aws +def test_ecs_exec_is_available_with_exec_not_running_raises_exception( sleep, mock_cluster_client_task, mocked_cluster, mock_application ): - - # use mock ecs_client as describe_tasks is overriden mocked_ecs_client = mock_cluster_client_task("postgres", "PENDING") mocked_cluster_arn = mocked_cluster["cluster"]["clusterArn"] - + ecs_manager = ECS( + mocked_ecs_client, + mock_application.environments["development"].session.client("ssm"), + mock_application.name, + "development", + ) with pytest.raises(ECSAgentNotRunning): - ecs_exec_is_available( - mocked_ecs_client, - mocked_cluster_arn, - ["arn:aws:ecs:eu-west-2:12345678:task/does-not-matter/1234qwer"], + ecs_manager.ecs_exec_is_available( + mocked_cluster_arn, ["arn:aws:ecs:eu-west-2:12345678:task/does-not-matter/1234qwer"] ) @mock_aws def test_get_or_create_task_name(mock_application): - addon_name = "app-postgres" parameter_name = mock_parameter_name(mock_application, "postgres", addon_name) mock_application.environments["development"].session.client("ssm") @@ -137,23 +154,24 @@ def test_get_or_create_task_name(mock_application): Type="String", Value=mock_task_name(addon_name), ) - - task_name = get_or_create_task_name( - mock_ssm, mock_application.name, "development", addon_name, parameter_name + ecs_manager = ECS( + mock_application.environments["development"].session.client("ecs"), + mock_ssm, + mock_application.name, + "development", ) - + task_name = ecs_manager.get_or_create_task_name(addon_name, parameter_name) assert task_name == mock_task_name(addon_name) @mock_aws def test_get_or_create_task_name_appends_random_id(mock_application): - addon_name = "app-postgres" ssm_client = mock_application.environments["development"].session.client("ssm") parameter_name = mock_parameter_name(mock_application, "postgres", addon_name) - task_name = get_or_create_task_name( - ssm_client, mock_application.name, "development", addon_name, parameter_name - ) + ecs_manager = ECS(ssm_client, ssm_client, mock_application.name, "development") + + task_name = ecs_manager.get_or_create_task_name(addon_name, parameter_name) random_id = task_name.rsplit("-", 1)[1] assert task_name.rsplit("-", 1)[0] == mock_task_name("app-postgres").rsplit("-", 1)[0] diff --git a/tests/platform_helper/providers/test_secrets.py b/tests/platform_helper/providers/test_secrets.py index cea50b10b..43f716398 100644 --- a/tests/platform_helper/providers/test_secrets.py +++ b/tests/platform_helper/providers/test_secrets.py @@ -1,16 +1,40 @@ +import json + import boto3 import pytest from moto import mock_aws -from dbt_platform_helper.providers.copilot import ( - get_postgres_connection_data_updated_with_master_secret, -) -from dbt_platform_helper.providers.secrets import SecretNotFoundError -from dbt_platform_helper.providers.secrets import get_connection_secret_arn +from dbt_platform_helper.exceptions import AddonNotFoundError +from dbt_platform_helper.exceptions import AddonTypeMissingFromConfigError +from dbt_platform_helper.exceptions import InvalidAddonTypeError +from dbt_platform_helper.exceptions import ParameterNotFoundError +from dbt_platform_helper.exceptions import SecretNotFoundError +from dbt_platform_helper.providers.secrets import Secrets +from tests.platform_helper.conftest import add_addon_config_parameter +from tests.platform_helper.conftest import mock_parameter_name env = "development" +@pytest.mark.parametrize( + "test_string", + [ + ("app-rds-postgres", "APP_RDS_POSTGRES"), + ("APP-POSTGRES", "APP_POSTGRES"), + ("APP-OpenSearch", "APP_OPENSEARCH"), + ], +) +def test_normalise_secret_name(test_string, mock_application): + """Test that given an addon name, normalise_secret_name produces the + expected result.""" + + ssm_client = mock_application.environments[env].session.client("ssm") + secrets_client = mock_application.environments[env].session.client("secretsmanager") + secrets_manager = Secrets(ssm_client, secrets_client, mock_application.name, env) + + assert secrets_manager._normalise_secret_name(test_string[0]) == test_string[1] + + @mock_aws def test_get_connection_secret_arn_from_secrets_manager(mock_application): """Test that, given app, environment and secret name strings, @@ -26,7 +50,9 @@ def test_get_connection_secret_arn_from_secrets_manager(mock_application): ssm_client = mock_application.environments[env].session.client("ssm") secrets_client = mock_application.environments[env].session.client("secretsmanager") - arn = get_connection_secret_arn(ssm_client, secrets_client, secret_name) + secrets_manager = Secrets(ssm_client, secrets_client, mock_application.name, env) + + arn = secrets_manager.get_connection_secret_arn(secret_name) assert arn.startswith( "arn:aws:secretsmanager:eu-west-2:123456789012:secret:" @@ -49,7 +75,9 @@ def test_get_connection_secret_arn_from_parameter_store(mock_application): Type="SecureString", ) - arn = get_connection_secret_arn(ssm_client, secrets_client, secret_name) + secrets_manager = Secrets(ssm_client, secrets_client, mock_application.name, env) + + arn = secrets_manager.get_connection_secret_arn(secret_name) assert ( arn @@ -65,18 +93,19 @@ def test_get_connection_secret_arn_when_secret_does_not_exist(mock_application): ssm_client = mock_application.environments[env].session.client("ssm") secrets_client = mock_application.environments[env].session.client("secretsmanager") + secrets_manager = Secrets(ssm_client, secrets_client, mock_application.name, env) - with pytest.raises(SecretNotFoundError): - get_connection_secret_arn(ssm_client, secrets_client, "POSTGRES") + with pytest.raises(SecretNotFoundError) as ex: + secrets_manager.get_connection_secret_arn("POSTGRES") @mock_aws -def test_update_postgres_parameter_with_master_secret(): +def test_update_postgres_parameter_with_master_secret(mock_application): session = boto3.session.Session() parameter_name = "test-parameter" ssm_client = session.client("ssm") - secretsmanager_client = session.client("secretsmanager") + session.client("secretsmanager") ssm_client.put_parameter( Name=parameter_name, Value='{"username": "read-only-user", "password": ">G12345", "host": "test.com", "port": 5432}', @@ -86,8 +115,14 @@ def test_update_postgres_parameter_with_master_secret(): Name="master-secret", SecretString='{"username": "postgres", "password": ">G6789"}' )["ARN"] - updated_parameter_value = get_postgres_connection_data_updated_with_master_secret( - ssm_client, secretsmanager_client, parameter_name, secret_arn + ssm_client = mock_application.environments[env].session.client("ssm") + secrets_client = mock_application.environments[env].session.client("secretsmanager") + secrets_manager = Secrets(ssm_client, secrets_client, mock_application.name, env) + + updated_parameter_value = ( + secrets_manager.get_postgres_connection_data_updated_with_master_secret( + parameter_name, secret_arn + ) ) assert updated_parameter_value == { @@ -96,3 +131,131 @@ def test_update_postgres_parameter_with_master_secret(): "host": "test.com", "port": 5432, } + + +@mock_aws +@pytest.mark.parametrize( + "addon_name, expected_type", + [ + ("custom-name-postgres", "postgres"), + ("custom-name-redis", "redis"), + ("custom-name-opensearch", "opensearch"), + ], +) +def test_get_addon_type(addon_name, expected_type, mock_application): + """Test that get_addon_type returns the expected addon type.""" + + ssm_client = mock_application.environments[env].session.client("ssm") + secrets_client = mock_application.environments[env].session.client("secretsmanager") + secrets_manager = Secrets(ssm_client, secrets_client, mock_application.name, env) + + add_addon_config_parameter() + addon_type = secrets_manager.get_addon_type(addon_name) + + assert addon_type == expected_type + + +@mock_aws +def test_get_addon_type_with_not_found_throws_exception(mock_application): + """Test that get_addon_type raises the expected error when the addon is not + found in the config file.""" + + add_addon_config_parameter({"different-name": {"type": "redis"}}) + ssm_client = mock_application.environments[env].session.client("ssm") + secrets_client = mock_application.environments[env].session.client("secretsmanager") + secrets_manager = Secrets(ssm_client, secrets_client, mock_application.name, env) + + with pytest.raises(AddonNotFoundError): + secrets_manager.get_addon_type("custom-name-postgres") + + +@mock_aws +def test_get_addon_type_with_parameter_not_found_throws_exception(mock_application): + """Test that get_addon_type raises the expected error when the addon config + parameter is not found.""" + + ssm_client = mock_application.environments[env].session.client("ssm") + secrets_client = mock_application.environments[env].session.client("secretsmanager") + secrets_manager = Secrets(ssm_client, secrets_client, mock_application.name, env) + + mock_ssm = boto3.client("ssm") + mock_ssm.put_parameter( + Name=f"/copilot/applications/test-application/environments/development/invalid-parameter", + Type="String", + Value=json.dumps({"custom-name-postgres": {"type": "postgres"}}), + ) + + with pytest.raises(ParameterNotFoundError): + secrets_manager.get_addon_type("custom-name-postgres") + + +@mock_aws +def test_get_addon_type_with_invalid_type_throws_exception(mock_application): + """Test that get_addon_type raises the expected error when the config + contains an invalid addon type.""" + + add_addon_config_parameter(param_value={"invalid-extension": {"type": "invalid"}}) + ssm_client = mock_application.environments[env].session.client("ssm") + secrets_client = mock_application.environments[env].session.client("secretsmanager") + secrets_manager = Secrets(ssm_client, secrets_client, mock_application.name, env) + + with pytest.raises(InvalidAddonTypeError): + secrets_manager.get_addon_type("invalid-extension") + + +@mock_aws +def test_get_addon_type_with_blank_type_throws_exception(mock_application): + """Test that get_addon_type raises the expected error when the config + contains an empty addon type.""" + + add_addon_config_parameter(param_value={"blank-extension": {}}) + ssm_client = mock_application.environments[env].session.client("ssm") + secrets_client = mock_application.environments[env].session.client("secretsmanager") + secrets_manager = Secrets(ssm_client, secrets_client, mock_application.name, env) + + with pytest.raises(AddonTypeMissingFromConfigError): + secrets_manager.get_addon_type("blank-extension") + + +@mock_aws +def test_get_addon_type_with_unspecified_type_throws_exception(mock_application): + """Test that get_addon_type raises the expected error when the config + contains an empty addon type.""" + + add_addon_config_parameter(param_value={"addon-type-unspecified": {"type": None}}) + ssm_client = mock_application.environments[env].session.client("ssm") + secrets_client = mock_application.environments[env].session.client("secretsmanager") + secrets_manager = Secrets(ssm_client, secrets_client, mock_application.name, env) + + with pytest.raises(AddonTypeMissingFromConfigError): + secrets_manager.get_addon_type("addon-type-unspecified") + + +@mock_aws +@pytest.mark.parametrize( + "access", + [ + "read", + "write", + "admin", + ], +) +@pytest.mark.parametrize( + "addon_type, addon_name", + [ + ("postgres", "custom-name-postgres"), + ("postgres", "custom-name-rds-postgres"), + ("redis", "custom-name-redis"), + ("opensearch", "custom-name-opensearch"), + ("s3", "custon-name-s3"), + ], +) +def test_get_parameter_name(access, addon_type, addon_name, mock_application): + """Test that get_parameter_name builds the correct parameter name given the + addon_name, addon_type and permission.""" + + ssm_client = mock_application.environments[env].session.client("ssm") + secrets_client = mock_application.environments[env].session.client("secretsmanager") + secrets_manager = Secrets(ssm_client, secrets_client, mock_application.name, env) + parameter_name = secrets_manager.get_parameter_name(addon_type, addon_name, access) + assert parameter_name == mock_parameter_name(mock_application, addon_type, addon_name, access) diff --git a/tests/platform_helper/test_command_codebase.py b/tests/platform_helper/test_command_codebase.py index 137b4c038..894b746f7 100644 --- a/tests/platform_helper/test_command_codebase.py +++ b/tests/platform_helper/test_command_codebase.py @@ -45,8 +45,6 @@ def test_aborts_when_not_in_a_codebase_repository(self, mock_click, mock_codebas result = CliRunner().invoke(prepare_command) - expected_message = "You are in the deploy repository; make sure you are in the application codebase repository." - mock_click.assert_called_with(expected_message, fg="red") assert result.exit_code == 1 @@ -72,8 +70,6 @@ def test_codebase_build_does_not_trigger_build_without_an_application( "ab1c23d", ], ) - expected_message = f"""The account "foo" does not contain the application "not-an-application"; ensure you have set the environment variable "AWS_PROFILE" correctly.""" - mock_click.assert_called_with(expected_message, fg="red") assert result.exit_code == 1 @@ -101,8 +97,7 @@ def test_codebase_build_aborts_with_a_nonexistent_commit_hash( mock_codebase_object_instance.build.assert_called_once_with( "test-application", "application", "nonexistent-commit-hash" ) - expected_message = f"""The commit hash "nonexistent-commit-hash" either does not exist or you need to run `git fetch`.""" - mock_click.assert_called_with(expected_message, fg="red") + assert result.exit_code == 1 @@ -156,8 +151,6 @@ def test_codebase_deploy_aborts_with_a_nonexistent_image_repository_or_image_tag mock_codebase_object_instance.deploy.assert_called_once_with( "test-application", "development", "application", "nonexistent-commit-hash" ) - expected_message = f"""The commit hash "nonexistent-commit-hash" has not been built into an image, try the `platform-helper codebase build` command first.""" - mock_click.assert_called_with(expected_message, fg="red") assert result.exit_code == 1 @patch("dbt_platform_helper.commands.codebase.Codebase") @@ -186,8 +179,6 @@ def test_codebase_deploy_does_not_trigger_build_without_an_application( mock_codebase_object_instance.deploy.assert_called_once_with( "not-an-application", "dev", "application", "ab1c23d" ) - expected_message = f"""The account "foo" does not contain the application "not-an-application"; ensure you have set the environment variable "AWS_PROFILE" correctly.""" - mock_click.assert_called_with(expected_message, fg="red") assert result.exit_code == 1 @patch("dbt_platform_helper.commands.codebase.Codebase") @@ -216,8 +207,6 @@ def test_codebase_deploy_does_not_trigger_build_with_missing_environment( mock_codebase_object_instance.deploy.assert_called_once_with( "test-application", "not-an-environment", "application", "ab1c23d" ) - expected_message = f"""The environment "not-an-environment" either does not exist or has not been deployed.""" - mock_click.assert_called_with(expected_message, fg="red") assert result.exit_code == 1 @patch("dbt_platform_helper.commands.codebase.Codebase") @@ -246,10 +235,6 @@ def test_codebase_deploy_does_not_trigger_build_with_missing_codebase( mock_codebase_object_instance.deploy.assert_called_once_with( "test-application", "test-environment", "not-a-codebase", "ab1c23d" ) - expected_message = ( - f"""The codebase "not-a-codebase" either does not exist or has not been deployed.""" - ) - mock_click.assert_called_with(expected_message, fg="red") assert result.exit_code == 1 @@ -273,8 +258,6 @@ def test_list_aborts_when_application_has_no_codebases(self, mock_click, mock_co result = CliRunner().invoke(list, ["--app", "test-application", "--with-images"]) - expected_message = f"""No codebases found for application "test-application""" - mock_click.assert_called_with(expected_message, fg="red") assert result.exit_code == 1 @patch("dbt_platform_helper.commands.codebase.Codebase") @@ -286,9 +269,6 @@ def test_aborts_when_application_does_not_exist(self, mock_click, mock_codebase_ result = CliRunner().invoke(list, ["--app", "test-application", "--with-images"]) - app = "test-application" - expected_message = f"""The account "{os.environ.get("AWS_PROFILE")}" does not contain the application "{app}"; ensure you have set the environment variable "AWS_PROFILE" correctly.""" - mock_click.assert_called_with(expected_message, fg="red") assert result.exit_code == 1 diff --git a/tests/platform_helper/test_command_conduit.py b/tests/platform_helper/test_command_conduit.py index c5c9990b5..880486335 100644 --- a/tests/platform_helper/test_command_conduit.py +++ b/tests/platform_helper/test_command_conduit.py @@ -5,13 +5,7 @@ from click.testing import CliRunner from dbt_platform_helper.commands.conduit import conduit -from dbt_platform_helper.exceptions import AddonNotFoundError -from dbt_platform_helper.exceptions import AddonTypeMissingFromConfigError -from dbt_platform_helper.exceptions import InvalidAddonTypeError -from dbt_platform_helper.exceptions import NoClusterError -from dbt_platform_helper.exceptions import ParameterNotFoundError -from dbt_platform_helper.providers.copilot import CreateTaskTimeoutError -from dbt_platform_helper.providers.secrets import SecretNotFoundError +from dbt_platform_helper.exceptions import SecretNotFoundError @pytest.mark.parametrize( @@ -52,42 +46,6 @@ def test_start_conduit(mock_application, mock_conduit_object, addon_name, valida mock_conduit_instance.start.assert_called_with("development", addon_name, "read") -@pytest.mark.parametrize( - "exception_type,exception_input_params,expected_message", - [ - ( - SecretNotFoundError, - {}, - """No secret called "" for "test-application" in "development" environment.""", - ), - (AddonNotFoundError, {}, """Addon "important-db" does not exist."""), - ( - CreateTaskTimeoutError, - {}, - """Client (important-db) ECS task has failed to start for "test-application" in "development" environment.""", - ), - ( - NoClusterError, - {}, - """No ECS cluster found for "test-application" in "development" environment.""", - ), - ( - ParameterNotFoundError, - {}, - """No parameter called "/copilot/applications/test-application/environments/development/addons". Try deploying the "test-application" "development" environment.""", - ), - ( - InvalidAddonTypeError, - {"addon_type": "fake-postgres"}, - """Addon type "fake-postgres" is not supported, we support: opensearch, postgres, redis.""", - ), - ( - AddonTypeMissingFromConfigError, - {}, - """The configuration for the addon important-db, is missconfigured and missing the addon type.""", - ), - ], -) @patch("dbt_platform_helper.commands.conduit.Conduit") @patch( "dbt_platform_helper.utils.versioning.running_as_installed_package", @@ -95,20 +53,15 @@ def test_start_conduit(mock_application, mock_conduit_object, addon_name, valida ) @patch("dbt_platform_helper.commands.conduit.load_application") @patch("click.secho") -def test_start_conduit_exception_is_raised( +def test_start_conduit_with_exception_raised_exit_1( mock_click, mock_application, mock_conduit_object, validate_version, - exception_type, - exception_input_params, - expected_message, ): - """Test that given an app, env and addon name strings, the conduit command - calls start_conduit with app, env, addon type and addon name.""" mock_conduit_instance = mock_conduit_object.return_value - mock_conduit_instance.start.side_effect = exception_type(**exception_input_params) + mock_conduit_instance.start.side_effect = SecretNotFoundError(secret_name="test-secret") addon_name = "important-db" result = CliRunner().invoke( conduit, @@ -121,7 +74,7 @@ def test_start_conduit_exception_is_raised( ], ) - mock_click.assert_called_with(expected_message, fg="red") + mock_click.assert_called_with("""No secret called "test-secret".""", fg="red") assert result.exit_code == 1 diff --git a/tests/platform_helper/test_exceptions.py b/tests/platform_helper/test_exceptions.py new file mode 100644 index 000000000..7c7d7a8d3 --- /dev/null +++ b/tests/platform_helper/test_exceptions.py @@ -0,0 +1,110 @@ +import os + +import pytest + +from dbt_platform_helper.exceptions import AddonNotFoundError +from dbt_platform_helper.exceptions import AddonTypeMissingFromConfigError +from dbt_platform_helper.exceptions import ApplicationDeploymentNotTriggered +from dbt_platform_helper.exceptions import ApplicationEnvironmentNotFoundError +from dbt_platform_helper.exceptions import ApplicationNotFoundError +from dbt_platform_helper.exceptions import CopilotCodebaseNotFoundError +from dbt_platform_helper.exceptions import CreateTaskTimeoutError +from dbt_platform_helper.exceptions import ECSAgentNotRunning +from dbt_platform_helper.exceptions import ImageNotFoundError +from dbt_platform_helper.exceptions import InvalidAddonTypeError +from dbt_platform_helper.exceptions import NoClusterError +from dbt_platform_helper.exceptions import NoCopilotCodebasesFoundError +from dbt_platform_helper.exceptions import NotInCodeBaseRepositoryError +from dbt_platform_helper.exceptions import ParameterNotFoundError +from dbt_platform_helper.exceptions import SecretNotFoundError + + +@pytest.mark.parametrize( + "exception, exception_params, expected_message", + [ + ( + AddonNotFoundError, + {"addon_name": "test-addon"}, + """Addon "test-addon" does not exist.""", + ), + ( + AddonTypeMissingFromConfigError, + {"addon_name": "test-addon"}, + """The configuration for the addon test-addon, is misconfigured and missing the addon type.""", + ), + ( + ApplicationDeploymentNotTriggered, + {"codebase": "test-codebase"}, + """Your deployment for test-codebase was not triggered.""", + ), + ( + ApplicationEnvironmentNotFoundError, + {"environment": "development"}, + """The environment "development" either does not exist or has not been deployed.""", + ), + ( + ApplicationNotFoundError, + {"application_name": "test-application"}, + """The account "foo" does not contain the application "test-application"; ensure you have set the environment variable "AWS_PROFILE" correctly.""", + ), + ( + CopilotCodebaseNotFoundError, + {"codebase": "test-codebase-exists"}, + """The codebase "test-codebase-exists" either does not exist or has not been deployed.""", + ), + ( + CreateTaskTimeoutError, + { + "addon_name": "test-addon", + "application_name": "test-application", + "environment": "environment", + }, + """Client (test-addon) ECS task has failed to start for "test-application" in "environment" environment.""", + ), + ( + InvalidAddonTypeError, + {"addon_type": "test-addon-type"}, + """Addon type "test-addon-type" is not supported, we support: opensearch, postgres, redis.""", + ), + ( + ImageNotFoundError, + {"commit": "test-commit-hash"}, + """The commit hash "test-commit-hash" has not been built into an image, try the `platform-helper codebase build` command first.""", + ), + ( + NoCopilotCodebasesFoundError, + {"application_name": "test-application"}, + """No codebases found for application "test-application".""", + ), + ( + NoClusterError, + {"application_name": "test-application", "environment": "environment"}, + """No ECS cluster found for "test-application" in "environment" environment.""", + ), + ( + NotInCodeBaseRepositoryError, + {}, + """You are in the deploy repository; make sure you are in the application codebase repository.""", + ), + ( + ParameterNotFoundError, + {"application_name": "test-application", "environment": "environment"}, + """No parameter called "/copilot/applications/test-application/environments/environment/addons". Try deploying the "test-application" "environment" environment.""", + ), + ( + SecretNotFoundError, + {"secret_name": "test-secret"}, + """No secret called "test-secret".""", + ), + ( + ECSAgentNotRunning, + {}, + """ECS exec agent never reached "RUNNING" status""", + ), + ], +) +def test_exception_message(exception, exception_params, expected_message): + os.environ["AWS_PROFILE"] = "foo" + + exception = exception(**exception_params) + assert str(exception) == expected_message From 91d8397e68315b1fca43143f79c195be736674e3 Mon Sep 17 00:00:00 2001 From: Will Gibson <8738245+WillGibson@users.noreply.github.com> Date: Tue, 3 Dec 2024 19:19:05 +0000 Subject: [PATCH 24/38] chore: DBTP-1585 Drop the _fn suffix from class variable names (#673) --- dbt_platform_helper/domain/codebase.py | 108 +++++++------- dbt_platform_helper/domain/conduit.py | 42 +++--- dbt_platform_helper/domain/database_copy.py | 56 ++++---- dbt_platform_helper/providers/copilot.py | 4 +- dbt_platform_helper/utils/aws.py | 8 +- dbt_platform_helper/utils/validation.py | 12 +- tests/platform_helper/domain/test_codebase.py | 132 +++++++++--------- tests/platform_helper/domain/test_conduit.py | 46 +++--- .../domain/test_database_copy.py | 104 +++++++------- .../platform_helper/providers/test_copilot.py | 2 +- tests/platform_helper/utils/test_aws.py | 2 +- .../platform_helper/utils/test_validation.py | 2 +- tests/utils/test_check_pypi.py | 4 +- utils/check_pypi.py | 4 +- 14 files changed, 262 insertions(+), 264 deletions(-) diff --git a/dbt_platform_helper/domain/codebase.py b/dbt_platform_helper/domain/codebase.py index 053a25a60..6f909cf11 100644 --- a/dbt_platform_helper/domain/codebase.py +++ b/dbt_platform_helper/domain/codebase.py @@ -28,31 +28,31 @@ class Codebase: def __init__( self, - input_fn: Callable[[str], str] = click.prompt, - echo_fn: Callable[[str], str] = click.secho, - confirm_fn: Callable[[str], bool] = click.confirm, - load_application_fn: Callable[[str], Application] = load_application, - get_aws_session_or_abort_fn: Callable[[str], Session] = get_aws_session_or_abort, - check_codebase_exists_fn: Callable[[str], str] = check_codebase_exists, - check_image_exists_fn: Callable[[str], str] = check_image_exists, - get_build_url_from_arn_fn: Callable[[str], str] = get_build_url_from_arn, - list_latest_images_fn: Callable[[str], str] = list_latest_images, - start_build_extraction_fn: Callable[[str], str] = start_build_extraction, - check_if_commit_exists_fn: Callable[[str], str] = check_if_commit_exists, - subprocess: Callable[[str], str] = subprocess.run, + input: Callable[[str], str] = click.prompt, + echo: Callable[[str], str] = click.secho, + confirm: Callable[[str], bool] = click.confirm, + load_application: Callable[[str], Application] = load_application, + get_aws_session_or_abort: Callable[[str], Session] = get_aws_session_or_abort, + check_codebase_exists: Callable[[str], str] = check_codebase_exists, + check_image_exists: Callable[[str], str] = check_image_exists, + get_build_url_from_arn: Callable[[str], str] = get_build_url_from_arn, + list_latest_images: Callable[[str], str] = list_latest_images, + start_build_extraction: Callable[[str], str] = start_build_extraction, + check_if_commit_exists: Callable[[str], str] = check_if_commit_exists, + run_subprocess: Callable[[str], str] = subprocess.run, ): - self.input_fn = input_fn - self.echo_fn = echo_fn - self.confirm_fn = confirm_fn - self.load_application_fn = load_application_fn - self.get_aws_session_or_abort_fn = get_aws_session_or_abort_fn - self.check_codebase_exists_fn = check_codebase_exists_fn - self.check_image_exists_fn = check_image_exists_fn - self.get_build_url_from_arn_fn = get_build_url_from_arn_fn - self.list_latest_images_fn = list_latest_images_fn - self.start_build_extraction_fn = start_build_extraction_fn - self.check_if_commit_exists_fn = check_if_commit_exists_fn - self.subprocess = subprocess + self.input = input + self.echo = echo + self.confirm = confirm + self.load_application = load_application + self.get_aws_session_or_abort = get_aws_session_or_abort + self.check_codebase_exists = check_codebase_exists + self.check_image_exists = check_image_exists + self.get_build_url_from_arn = get_build_url_from_arn + self.list_latest_images = list_latest_images + self.start_build_extraction = start_build_extraction + self.check_if_commit_exists = check_if_commit_exists + self.run_subprocess = run_subprocess def prepare(self): """Sets up an application codebase for use within a DBT platform @@ -60,7 +60,9 @@ def prepare(self): templates = setup_templates() repository = ( - self.subprocess(["git", "remote", "get-url", "origin"], capture_output=True, text=True) + self.run_subprocess( + ["git", "remote", "get-url", "origin"], capture_output=True, text=True + ) .stdout.split("/")[-1] .strip() .removesuffix(".git") @@ -90,7 +92,7 @@ def prepare(self): config_contents = templates.get_template(f".copilot/config.yml").render( repository=repository, builder_version=builder_version ) - self.echo_fn( + self.echo( mkfile( Path("."), ".copilot/image_build_run.sh", image_build_run_contents, overwrite=True ) @@ -99,27 +101,27 @@ def prepare(self): image_build_run_file = Path(".copilot/image_build_run.sh") image_build_run_file.chmod(image_build_run_file.stat().st_mode | stat.S_IEXEC) - self.echo_fn(mkfile(Path("."), ".copilot/config.yml", config_contents, overwrite=True)) + self.echo(mkfile(Path("."), ".copilot/config.yml", config_contents, overwrite=True)) for phase in ["build", "install", "post_build", "pre_build"]: phase_contents = templates.get_template(f".copilot/phases/{phase}.sh").render() - self.echo_fn( + self.echo( mkfile(Path("./.copilot"), f"phases/{phase}.sh", phase_contents, overwrite=True) ) def build(self, app: str, codebase: str, commit: str): """Trigger a CodePipeline pipeline based build.""" - session = self.get_aws_session_or_abort_fn() - self.load_application_fn(app, default_session=session) + session = self.get_aws_session_or_abort() + self.load_application(app, default_session=session) - self.check_if_commit_exists_fn(commit) + self.check_if_commit_exists(commit) codebuild_client = session.client("codebuild") build_url = self.__start_build_with_confirmation( - self.confirm_fn, + self.confirm, codebuild_client, - self.get_build_url_from_arn_fn, + self.get_build_url_from_arn, f'You are about to build "{app}" for "{codebase}" with commit "{commit}". Do you want to continue?', { "projectName": f"codebuild-{app}-{codebase}", @@ -129,7 +131,7 @@ def build(self, app: str, codebase: str, commit: str): ) if build_url: - return self.echo_fn( + return self.echo( f"Your build has been triggered. Check your build progress in the AWS Console: {build_url}" ) @@ -137,21 +139,21 @@ def build(self, app: str, codebase: str, commit: str): def deploy(self, app, env, codebase, commit): """Trigger a CodePipeline pipeline based deployment.""" - session = self.get_aws_session_or_abort_fn() + session = self.get_aws_session_or_abort() - application = self.load_application_fn(app, default_session=session) + application = self.load_application(app, default_session=session) if not application.environments.get(env): raise ApplicationEnvironmentNotFoundError(env) - self.check_codebase_exists_fn(session, application, codebase) + self.check_codebase_exists(session, application, codebase) - self.check_image_exists_fn(session, application, codebase, commit) + self.check_image_exists(session, application, codebase, commit) codebuild_client = session.client("codebuild") build_url = self.__start_build_with_confirmation( - self.confirm_fn, + self.confirm, codebuild_client, - self.get_build_url_from_arn_fn, + self.get_build_url_from_arn, f'You are about to deploy "{app}" for "{codebase}" with commit "{commit}" to the "{env}" environment. Do you want to continue?', { "projectName": f"pipeline-{application.name}-{codebase}-BuildProject", @@ -165,7 +167,7 @@ def deploy(self, app, env, codebase, commit): ) if build_url: - return self.echo_fn( + return self.echo( "Your deployment has been triggered. Check your build progress in the AWS Console: " f"{build_url}", ) @@ -174,25 +176,25 @@ def deploy(self, app, env, codebase, commit): def list(self, app: str, with_images: bool): """List available codebases for the application.""" - session = self.get_aws_session_or_abort_fn() - application = self.load_application_fn(app, session) + session = self.get_aws_session_or_abort() + application = self.load_application(app, session) ssm_client = session.client("ssm") ecr_client = session.client("ecr") codebases = self.__get_codebases(application, ssm_client) - self.echo_fn("The following codebases are available:") + self.echo("The following codebases are available:") for codebase in codebases: - self.echo_fn(f"- {codebase['name']} (https://github.com/{codebase['repository']})") + self.echo(f"- {codebase['name']} (https://github.com/{codebase['repository']})") if with_images: - self.list_latest_images_fn( + self.list_latest_images( ecr_client, f"{application.name}/{codebase['name']}", codebase["repository"], - self.echo_fn, + self.echo, ) - self.echo_fn("") + self.echo("") def __get_codebases(self, application, ssm_client): parameters = ssm_client.get_parameters_by_path( @@ -208,13 +210,13 @@ def __get_codebases(self, application, ssm_client): def __start_build_with_confirmation( self, - confirm_fn, + confirm, codebuild_client, - get_build_url_from_arn_fn, + get_build_url_from_arn, confirmation_message, build_options, ): - if confirm_fn(confirmation_message): - build_arn = self.start_build_extraction_fn(codebuild_client, build_options) - return get_build_url_from_arn_fn(build_arn) + if confirm(confirmation_message): + build_arn = self.start_build_extraction(codebuild_client, build_options) + return get_build_url_from_arn(build_arn) return None diff --git a/dbt_platform_helper/domain/conduit.py b/dbt_platform_helper/domain/conduit.py index 3ac798c29..e409ad869 100644 --- a/dbt_platform_helper/domain/conduit.py +++ b/dbt_platform_helper/domain/conduit.py @@ -19,22 +19,22 @@ def __init__( secrets_provider: Secrets, cloudformation_provider: CloudFormation, ecs_provider: ECS, - echo_fn: Callable[[str], str] = click.secho, - subprocess_fn: subprocess = subprocess, - connect_to_addon_client_task_fn=connect_to_addon_client_task, - create_addon_client_task_fn=create_addon_client_task, - create_postgres_admin_task_fn=create_postgres_admin_task, + echo: Callable[[str], str] = click.secho, + subprocess: subprocess = subprocess, + connect_to_addon_client_task=connect_to_addon_client_task, + create_addon_client_task=create_addon_client_task, + create_postgres_admin_task=create_postgres_admin_task, ): self.application = application self.secrets_provider = secrets_provider self.cloudformation_provider = cloudformation_provider self.ecs_provider = ecs_provider - self.subprocess_fn = subprocess_fn - self.echo_fn = echo_fn - self.connect_to_addon_client_task_fn = connect_to_addon_client_task_fn - self.create_addon_client_task_fn = create_addon_client_task_fn - self.create_postgres_admin_task = create_postgres_admin_task_fn + self.subprocess = subprocess + self.echo = echo + self.connect_to_addon_client_task = connect_to_addon_client_task + self.create_addon_client_task = create_addon_client_task + self.create_postgres_admin_task = create_postgres_admin_task def start(self, env: str, addon_name: str, access: str = "read"): clients = self._initialise_clients(env) @@ -42,15 +42,15 @@ def start(self, env: str, addon_name: str, access: str = "read"): addon_name, access ) - self.echo_fn(f"Checking if a conduit task is already running for {addon_type}") + self.echo(f"Checking if a conduit task is already running for {addon_type}") task_arns = self.ecs_provider.get_ecs_task_arns(cluster_arn, task_name) if not task_arns: - self.echo_fn("Creating conduit task") - self.create_addon_client_task_fn( + self.echo("Creating conduit task") + self.create_addon_client_task( clients["iam"], clients["ssm"], clients["secrets_manager"], - self.subprocess_fn, + self.subprocess, self.application, env, addon_type, @@ -59,7 +59,7 @@ def start(self, env: str, addon_name: str, access: str = "read"): access, ) - self.echo_fn("Updating conduit task") + self.echo("Updating conduit task") self._update_stack_resources( self.application.name, env, @@ -73,15 +73,15 @@ def start(self, env: str, addon_name: str, access: str = "read"): task_arns = self.ecs_provider.get_ecs_task_arns(cluster_arn, task_name) else: - self.echo_fn("Conduit task already running") + self.echo("Conduit task already running") - self.echo_fn(f"Checking if exec is available for conduit task...") + self.echo(f"Checking if exec is available for conduit task...") self.ecs_provider.ecs_exec_is_available(cluster_arn, task_arns) - self.echo_fn("Connecting to conduit task") - self.connect_to_addon_client_task_fn( - clients["ecs"], self.subprocess_fn, self.application.name, env, cluster_arn, task_name + self.echo("Connecting to conduit task") + self.connect_to_addon_client_task( + clients["ecs"], self.subprocess, self.application.name, env, cluster_arn, task_name ) def _initialise_clients(self, env): @@ -121,7 +121,7 @@ def _update_stack_resources( parameter_name, access, ) - self.echo_fn("Waiting for conduit task update to complete...") + self.echo("Waiting for conduit task update to complete...") self.cloudformation_provider.wait_for_cloudformation_to_reach_status( "stack_update_complete", stack_name ) diff --git a/dbt_platform_helper/domain/database_copy.py b/dbt_platform_helper/domain/database_copy.py index 3aef077f5..b3c0cd63b 100644 --- a/dbt_platform_helper/domain/database_copy.py +++ b/dbt_platform_helper/domain/database_copy.py @@ -27,39 +27,39 @@ def __init__( app: str, database: str, auto_approve: bool = False, - load_application_fn: Callable[[str], Application] = load_application, - vpc_config_fn: Callable[[Session, str, str, str], Vpc] = get_vpc_info_by_name, - db_connection_string_fn: Callable[ + load_application: Callable[[str], Application] = load_application, + vpc_config: Callable[[Session, str, str, str], Vpc] = get_vpc_info_by_name, + db_connection_string: Callable[ [Session, str, str, str, Callable], str ] = get_connection_string, maintenance_page_provider: Callable[ [str, str, list[str], str, str], None ] = MaintenancePageProvider(), - input_fn: Callable[[str], str] = click.prompt, - echo_fn: Callable[[str], str] = click.secho, - abort_fn: Callable[[str], None] = abort_with_error, + input: Callable[[str], str] = click.prompt, + echo: Callable[[str], str] = click.secho, + abort: Callable[[str], None] = abort_with_error, ): self.app = app self.database = database self.auto_approve = auto_approve - self.vpc_config_fn = vpc_config_fn - self.db_connection_string_fn = db_connection_string_fn + self.vpc_config = vpc_config + self.db_connection_string = db_connection_string self.maintenance_page_provider = maintenance_page_provider - self.input_fn = input_fn - self.echo_fn = echo_fn - self.abort_fn = abort_fn + self.input = input + self.echo = echo + self.abort = abort if not self.app: if not Path(PLATFORM_CONFIG_FILE).exists(): - self.abort_fn("You must either be in a deploy repo, or provide the --app option.") + self.abort("You must either be in a deploy repo, or provide the --app option.") config = load_and_validate_platform_config() self.app = config["application"] try: - self.application = load_application_fn(self.app) + self.application = load_application(self.app) except ApplicationNotFoundError: - abort_fn(f"No such application '{app}'.") + abort(f"No such application '{app}'.") def _execute_operation(self, is_dump: bool, env: str, vpc_name: str, to_env: str): vpc_name = self.enrich_vpc_name(env, vpc_name) @@ -67,40 +67,40 @@ def _execute_operation(self, is_dump: bool, env: str, vpc_name: str, to_env: str environments = self.application.environments environment = environments.get(env) if not environment: - self.abort_fn( + self.abort( f"No such environment '{env}'. Available environments are: {', '.join(environments.keys())}" ) env_session = environment.session try: - vpc_config = self.vpc_config_fn(env_session, self.app, env, vpc_name) + vpc_config = self.vpc_config(env_session, self.app, env, vpc_name) except AWSException as ex: - self.abort_fn(str(ex)) + self.abort(str(ex)) database_identifier = f"{self.app}-{env}-{self.database}" try: - db_connection_string = self.db_connection_string_fn( + db_connection_string = self.db_connection_string( env_session, self.app, env, database_identifier ) except Exception as exc: - self.abort_fn(f"{exc} (Database: {database_identifier})") + self.abort(f"{exc} (Database: {database_identifier})") try: task_arn = self.run_database_copy_task( env_session, env, vpc_config, is_dump, db_connection_string, to_env ) except Exception as exc: - self.abort_fn(f"{exc} (Account id: {self.account_id(env)})") + self.abort(f"{exc} (Account id: {self.account_id(env)})") if is_dump: message = f"Dumping {self.database} from the {env} environment into S3" else: message = f"Loading data into {self.database} in the {env} environment from S3" - self.echo_fn(message, fg="white", bold=True) - self.echo_fn( + self.echo(message, fg="white", bold=True) + self.echo( f"Task {task_arn} started. Waiting for it to complete (this may take some time)...", fg="white", ) @@ -109,9 +109,7 @@ def _execute_operation(self, is_dump: bool, env: str, vpc_name: str, to_env: str def enrich_vpc_name(self, env, vpc_name): if not vpc_name: if not Path(PLATFORM_CONFIG_FILE).exists(): - self.abort_fn( - "You must either be in a deploy repo, or provide the vpc name option." - ) + self.abort("You must either be in a deploy repo, or provide the vpc name option.") config = load_and_validate_platform_config() env_config = apply_environment_defaults(config)["environments"] vpc_name = env_config.get(env, {}).get("vpc") @@ -190,7 +188,7 @@ def is_confirmed_ready_to_load(self, env: str) -> bool: if self.auto_approve: return True - user_input = self.input_fn( + user_input = self.input( f"\nWARNING: the load operation is destructive and will delete the {self.database} database in the {env} environment. Continue? (y/n)" ) return user_input.lower().strip() in ["y", "yes"] @@ -199,7 +197,7 @@ def tail_logs(self, is_dump: bool, env: str): action = "dump" if is_dump else "load" log_group_name = f"/ecs/{self.app}-{env}-{self.database}-{action}" log_group_arn = f"arn:aws:logs:eu-west-2:{self.account_id(env)}:log-group:{log_group_name}" - self.echo_fn(f"Tailing {log_group_name} logs", fg="yellow") + self.echo(f"Tailing {log_group_name} logs", fg="yellow") session = self.application.environments[env].session log_client = session.client("logs") wait_for_log_group_to_exist(log_client, log_group_name) @@ -217,9 +215,9 @@ def tail_logs(self, is_dump: bool, env: str): match = re.match(r"(Stopping|Aborting) data (load|dump).*", message) if match: if match.group(1) == "Aborting": - self.abort_fn("Task aborted abnormally. See logs above for details.") + self.abort("Task aborted abnormally. See logs above for details.") stopped = True - self.echo_fn(message) + self.echo(message) def account_id(self, env): envs = self.application.environments diff --git a/dbt_platform_helper/providers/copilot.py b/dbt_platform_helper/providers/copilot.py index 503a82f10..c7b10f31a 100644 --- a/dbt_platform_helper/providers/copilot.py +++ b/dbt_platform_helper/providers/copilot.py @@ -127,14 +127,14 @@ def connect_to_addon_client_task( env, cluster_arn, task_name, - get_ecs_task_arns_fn=_temp_until_refactor_get_ecs_task_arns, + get_ecs_task_arns=_temp_until_refactor_get_ecs_task_arns, ): running = False tries = 0 while tries < 15 and not running: tries += 1 # Todo: Use from ECS provider when we refactor this - if get_ecs_task_arns_fn(ecs_client, cluster_arn, task_name): + if get_ecs_task_arns(ecs_client, cluster_arn, task_name): subprocess.call( "copilot task exec " f"--app {application_name} --env {env} " diff --git a/dbt_platform_helper/utils/aws.py b/dbt_platform_helper/utils/aws.py index 1faa3b1a9..cb2b2e34b 100644 --- a/dbt_platform_helper/utils/aws.py +++ b/dbt_platform_helper/utils/aws.py @@ -417,7 +417,7 @@ def get_connection_string( app: str, env: str, db_identifier: str, - connection_data_fn=get_postgres_connection_data_updated_with_master_secret, + connection_data=get_postgres_connection_data_updated_with_master_secret, ) -> str: addon_name = db_identifier.split(f"{app}-{env}-", 1)[1] normalised_addon_name = addon_name.replace("-", "_").upper() @@ -429,7 +429,7 @@ def get_connection_string( Name=master_secret_name, WithDecryption=True )["Parameter"]["Value"] - conn = connection_data_fn(session, connection_string_parameter, master_secret_arn) + conn = connection_data(session, connection_string_parameter, master_secret_arn) return f"postgres://{conn['username']}:{conn['password']}@{conn['host']}:{conn['port']}/{conn['dbname']}" @@ -528,7 +528,7 @@ def get_build_url_from_arn(build_arn: str) -> str: ) -def list_latest_images(ecr_client, ecr_repository_name, codebase_repository, echo_fn): +def list_latest_images(ecr_client, ecr_repository_name, codebase_repository, echo): paginator = ecr_client.get_paginator("describe_images") describe_images_response_iterator = paginator.paginate( repositoryName=ecr_repository_name, @@ -553,7 +553,7 @@ def list_latest_images(ecr_client, ecr_repository_name, codebase_repository, ech continue commit_hash = commit_tag.replace("commit-", "") - echo_fn( + echo( f" - https://github.com/{codebase_repository}/commit/{commit_hash} - published: {image['imagePushedAt']}" ) except StopIteration: diff --git a/dbt_platform_helper/utils/validation.py b/dbt_platform_helper/utils/validation.py index cbe06e3b1..7a812b7a9 100644 --- a/dbt_platform_helper/utils/validation.py +++ b/dbt_platform_helper/utils/validation.py @@ -104,13 +104,13 @@ def validate_addons(addons: dict): config={"extensions": addons}, extension_type="redis", version_key="engine", - get_supported_versions_fn=get_supported_redis_versions, + get_supported_versions=get_supported_redis_versions, ) _validate_extension_supported_versions( config={"extensions": addons}, extension_type="opensearch", version_key="engine", - get_supported_versions_fn=get_supported_opensearch_versions, + get_supported_versions=get_supported_opensearch_versions, ) return errors @@ -563,18 +563,18 @@ def validate_platform_config(config): config=config, extension_type="redis", version_key="engine", - get_supported_versions_fn=get_supported_redis_versions, + get_supported_versions=get_supported_redis_versions, ) _validate_extension_supported_versions( config=config, extension_type="opensearch", version_key="engine", - get_supported_versions_fn=get_supported_opensearch_versions, + get_supported_versions=get_supported_opensearch_versions, ) def _validate_extension_supported_versions( - config, extension_type, version_key, get_supported_versions_fn + config, extension_type, version_key, get_supported_versions ): extensions = config.get("extensions", {}) if not extensions: @@ -586,7 +586,7 @@ def _validate_extension_supported_versions( if extension.get("type") == extension_type ] - supported_extension_versions = get_supported_versions_fn() + supported_extension_versions = get_supported_versions() extensions_with_invalid_version = [] for extension in extensions_for_type: diff --git a/tests/platform_helper/domain/test_codebase.py b/tests/platform_helper/domain/test_codebase.py index 2ea4d9c5f..5c35a141f 100644 --- a/tests/platform_helper/domain/test_codebase.py +++ b/tests/platform_helper/domain/test_codebase.py @@ -38,13 +38,13 @@ def mock_aws_client(get_aws_session_or_abort): class CodebaseMocks: def __init__(self, **kwargs): - self.load_application_fn = kwargs.get("load_application_fn", Mock()) - self.get_aws_session_or_abort_fn = kwargs.get("get_aws_session_or_abort_fn", Mock()) - self.input_fn = kwargs.get("input_fn", Mock(return_value="yes")) - self.echo_fn = kwargs.get("echo_fn", Mock()) - self.confirm_fn = kwargs.get("confirm_fn", Mock(return_value=True)) - self.check_codebase_exists_fn = kwargs.get( - "check_codebase_exists_fn", + self.load_application = kwargs.get("load_application", Mock()) + self.get_aws_session_or_abort = kwargs.get("get_aws_session_or_abort", Mock()) + self.input = kwargs.get("input", Mock(return_value="yes")) + self.echo = kwargs.get("echo", Mock()) + self.confirm = kwargs.get("confirm", Mock(return_value=True)) + self.check_codebase_exists = kwargs.get( + "check_codebase_exists", Mock( return_value=""" { @@ -55,21 +55,21 @@ def __init__(self, **kwargs): """ ), ) - self.check_image_exists_fn = kwargs.get("check_image_exists_fn", Mock(return_value="")) - self.subprocess = kwargs.get("subprocess", Mock()) - self.check_if_commit_exists_fn = kwargs.get("check_if_commit_exists_fn", Mock()) + self.check_image_exists = kwargs.get("check_image_exists", Mock(return_value="")) + self.run_subprocess = kwargs.get("run_subprocess", Mock()) + self.check_if_commit_exists = kwargs.get("check_if_commit_exists", Mock()) def params(self): return { - "load_application_fn": self.load_application_fn, - "get_aws_session_or_abort_fn": self.get_aws_session_or_abort_fn, - "check_codebase_exists_fn": self.check_codebase_exists_fn, - "check_image_exists_fn": self.check_image_exists_fn, - "input_fn": self.input_fn, - "echo_fn": self.echo_fn, - "confirm_fn": self.confirm_fn, - "subprocess": self.subprocess, - "check_if_commit_exists_fn": self.check_if_commit_exists_fn, + "load_application": self.load_application, + "get_aws_session_or_abort": self.get_aws_session_or_abort, + "check_codebase_exists": self.check_codebase_exists, + "check_image_exists": self.check_image_exists, + "input": self.input, + "echo": self.echo, + "confirm": self.confirm, + "run_subprocess": self.run_subprocess, + "check_if_commit_exists": self.check_if_commit_exists, } @@ -104,7 +104,7 @@ def mocked_response(): os.chdir(tmp_path) - mocks.subprocess.return_value.stdout = "git@github.com:uktrade/test-app.git" + mocks.run_subprocess.return_value.stdout = "git@github.com:uktrade/test-app.git" codebase.prepare() @@ -113,7 +113,7 @@ def mocked_response(): compare_directories = filecmp.dircmp(str(expected_files_dir), str(copilot_dir)) - mocks.echo_fn.assert_has_calls( + mocks.echo.assert_has_calls( [ call( "File .copilot/image_build_run.sh created", @@ -141,12 +141,12 @@ def mocked_response(): def test_codebase_prepare_does_not_generate_files_in_a_repo_with_a_copilot_directory(tmp_path): mocks = CodebaseMocks() - mocks.load_application_fn.side_effect = SystemExit(1) + mocks.load_application.side_effect = SystemExit(1) codebase = Codebase(**mocks.params()) os.chdir(tmp_path) Path(tmp_path / "copilot").mkdir() - mocks.subprocess.return_value.stdout = mock_suprocess_fixture() + mocks.run_subprocess.return_value.stdout = mock_run_suprocess_fixture() with pytest.raises(NotInCodeBaseRepositoryError): codebase.prepare() @@ -154,12 +154,12 @@ def test_codebase_prepare_does_not_generate_files_in_a_repo_with_a_copilot_direc def test_codebase_build_does_not_trigger_build_without_an_application(): mocks = CodebaseMocks() - mocks.load_application_fn.side_effect = ApplicationNotFoundError("not-an-application") + mocks.load_application.side_effect = ApplicationNotFoundError("not-an-application") codebase = Codebase(**mocks.params()) with pytest.raises(ApplicationNotFoundError): codebase.build("not-an-application", "application", "ab1c23d") - mocks.echo_fn.assert_has_calls( + mocks.echo.assert_has_calls( [ call( """The account "foo" does not contain the application "not-an-application"; ensure you have set the environment variable "AWS_PROFILE" correctly.""", @@ -170,7 +170,7 @@ def test_codebase_build_does_not_trigger_build_without_an_application(): def test_codebase_build_commit_not_found(): - mocks = CodebaseMocks(check_if_commit_exists_fn=Mock(side_effect=CommitNotFoundError())) + mocks = CodebaseMocks(check_if_commit_exists=Mock(side_effect=CommitNotFoundError())) codebase = Codebase(**mocks.params()) @@ -180,9 +180,9 @@ def test_codebase_build_commit_not_found(): def test_codebase_prepare_raises_not_in_codebase_exception(tmp_path): mocks = CodebaseMocks() - mocks.load_application_fn.side_effect = SystemExit(1) + mocks.load_application.side_effect = SystemExit(1) - mocks.subprocess.return_value = mock_suprocess_fixture() + mocks.run_subprocess.return_value = mock_run_suprocess_fixture() codebase = Codebase(**mocks.params()) os.chdir(tmp_path) Path(tmp_path / "copilot").mkdir() @@ -195,7 +195,7 @@ def test_codebase_prepare_generates_an_executable_image_build_run_file(tmp_path) mocks = CodebaseMocks() codebase = Codebase(**mocks.params()) os.chdir(tmp_path) - mocks.subprocess.return_value.stdout = "demodjango" + mocks.run_subprocess.return_value.stdout = "demodjango" codebase.prepare() @@ -206,9 +206,9 @@ def test_codebase_prepare_generates_an_executable_image_build_run_file(tmp_path) def test_codebase_build_does_not_trigger_deployment_without_confirmation(): - mocks = CodebaseMocks(confirm_fn=Mock(return_value=False)) + mocks = CodebaseMocks(confirm=Mock(return_value=False)) - client = mock_aws_client(mocks.get_aws_session_or_abort_fn) + client = mock_aws_client(mocks.get_aws_session_or_abort) client.get_parameter.return_value = { "Parameter": {"Value": json.dumps({"name": "application"})}, } @@ -220,17 +220,17 @@ def test_codebase_build_does_not_trigger_deployment_without_confirmation(): def test_codebase_deploy_successfully_triggers_a_pipeline_based_deploy(mock_application): mocks = CodebaseMocks() - mocks.confirm_fn.return_value = True + mocks.confirm.return_value = True mock_application.environments = { "development": Environment( name="development", account_id="1234", - sessions={"111111111111": mocks.get_aws_session_or_abort_fn}, + sessions={"111111111111": mocks.get_aws_session_or_abort}, ) } - mocks.load_application_fn.return_value = mock_application + mocks.load_application.return_value = mock_application - client = mock_aws_client(mocks.get_aws_session_or_abort_fn) + client = mock_aws_client(mocks.get_aws_session_or_abort) client.get_parameter.return_value = { "Parameter": {"Value": json.dumps({"name": "application"})}, @@ -254,7 +254,7 @@ def test_codebase_deploy_successfully_triggers_a_pipeline_based_deploy(mock_appl ], ) - mocks.confirm_fn.assert_has_calls( + mocks.confirm.assert_has_calls( [ call( 'You are about to deploy "test-application" for "application" with commit ' @@ -263,7 +263,7 @@ def test_codebase_deploy_successfully_triggers_a_pipeline_based_deploy(mock_appl ] ) - mocks.echo_fn.assert_has_calls( + mocks.echo.assert_has_calls( [ call( "Your deployment has been triggered. Check your build progress in the AWS Console: " @@ -276,10 +276,10 @@ def test_codebase_deploy_successfully_triggers_a_pipeline_based_deploy(mock_appl def test_codebase_deploy_exception_with_a_nonexistent_codebase(): mocks = CodebaseMocks( - check_codebase_exists_fn=Mock(side_effect=CopilotCodebaseNotFoundError("application")) + check_codebase_exists=Mock(side_effect=CopilotCodebaseNotFoundError("application")) ) - client = mock_aws_client(mocks.get_aws_session_or_abort_fn) + client = mock_aws_client(mocks.get_aws_session_or_abort) client.get_parameter.return_value = { "Parameter": {"Value": json.dumps({"name": "application"})}, @@ -292,10 +292,10 @@ def test_codebase_deploy_exception_with_a_nonexistent_codebase(): def test_check_codebase_exists_returns_error_when_no_json(): mocks = CodebaseMocks( - check_codebase_exists_fn=Mock(side_effect=CopilotCodebaseNotFoundError("application")) + check_codebase_exists=Mock(side_effect=CopilotCodebaseNotFoundError("application")) ) - client = mock_aws_client(mocks.get_aws_session_or_abort_fn) + client = mock_aws_client(mocks.get_aws_session_or_abort) client.get_parameter.return_value = { "Parameter": {"Value": json.dumps({"name": "application"})}, @@ -308,10 +308,10 @@ def test_check_codebase_exists_returns_error_when_no_json(): def test_codebase_deploy_aborts_with_a_nonexistent_image_repository(): mocks = CodebaseMocks( - check_image_exists_fn=Mock(side_effect=ImageNotFoundError("nonexistent-commit-hash")) + check_image_exists=Mock(side_effect=ImageNotFoundError("nonexistent-commit-hash")) ) - client = mock_aws_client(mocks.get_aws_session_or_abort_fn) + client = mock_aws_client(mocks.get_aws_session_or_abort) client.get_parameter.return_value = { "Parameter": {"Value": json.dumps({"name": "application"})}, @@ -325,10 +325,10 @@ def test_codebase_deploy_aborts_with_a_nonexistent_image_repository(): def test_codebase_deploy_aborts_with_a_nonexistent_image_tag(): mocks = CodebaseMocks( - check_image_exists_fn=Mock(side_effect=ImageNotFoundError("nonexistent-commit-hash")) + check_image_exists=Mock(side_effect=ImageNotFoundError("nonexistent-commit-hash")) ) - client = mock_aws_client(mocks.get_aws_session_or_abort_fn) + client = mock_aws_client(mocks.get_aws_session_or_abort) client.get_parameter.return_value = { "Parameter": {"Value": json.dumps({"name": "application"})}, @@ -342,9 +342,9 @@ def test_codebase_deploy_aborts_with_a_nonexistent_image_tag(): def test_codebase_deploy_does_not_trigger_build_without_confirmation(): mocks = CodebaseMocks() - mocks.subprocess.return_value.stderr = "" - mocks.confirm_fn.return_value = False - client = mock_aws_client(mocks.get_aws_session_or_abort_fn) + mocks.run_subprocess.return_value.stderr = "" + mocks.confirm.return_value = False + client = mock_aws_client(mocks.get_aws_session_or_abort) client.get_parameter.return_value = { "Parameter": {"Value": json.dumps({"name": "application"})}, @@ -360,7 +360,7 @@ def test_codebase_deploy_does_not_trigger_build_without_confirmation(): codebase = Codebase(**mocks.params()) codebase.deploy("test-application", "development", "application", "ab1c23d") - mocks.confirm_fn.assert_has_calls( + mocks.confirm.assert_has_calls( [ call( 'You are about to deploy "test-application" for "application" with commit ' @@ -369,12 +369,12 @@ def test_codebase_deploy_does_not_trigger_build_without_confirmation(): ] ) - mocks.echo_fn.assert_has_calls([call("Your deployment was not triggered.")]) + mocks.echo.assert_has_calls([call("Your deployment was not triggered.")]) def test_codebase_deploy_does_not_trigger_build_without_an_application(): mocks = CodebaseMocks() - mocks.load_application_fn.side_effect = ApplicationNotFoundError("not-an-application") + mocks.load_application.side_effect = ApplicationNotFoundError("not-an-application") codebase = Codebase(**mocks.params()) with pytest.raises(ApplicationNotFoundError) as exc: @@ -384,12 +384,12 @@ def test_codebase_deploy_does_not_trigger_build_without_an_application(): def test_codebase_deploy_does_not_trigger_build_with_missing_environment(mock_application): mocks = CodebaseMocks() mock_application.environments = {} - mocks.load_application_fn.return_value = mock_application + mocks.load_application.return_value = mock_application codebase = Codebase(**mocks.params()) with pytest.raises(ApplicationEnvironmentNotFoundError) as exc: codebase.deploy("test-application", "not-an-environment", "application", "ab1c23d") - mocks.echo_fn.assert_has_calls( + mocks.echo.assert_has_calls( [ call( """The environment "not-an-environment" either does not exist or has not been deployed.""", @@ -400,9 +400,9 @@ def test_codebase_deploy_does_not_trigger_build_with_missing_environment(mock_ap def test_codebase_deploy_does_not_trigger_deployment_without_confirmation(): - mocks = CodebaseMocks(confirm_fn=Mock(return_value=False)) + mocks = CodebaseMocks(confirm=Mock(return_value=False)) - client = mock_aws_client(mocks.get_aws_session_or_abort_fn) + client = mock_aws_client(mocks.get_aws_session_or_abort) client.get_parameter.return_value = { "Parameter": {"Value": json.dumps({"name": "application"})}, } @@ -414,7 +414,7 @@ def test_codebase_deploy_does_not_trigger_deployment_without_confirmation(): def test_codebase_list_does_not_trigger_build_without_an_application(): mocks = CodebaseMocks() - mocks.load_application_fn.side_effect = ApplicationNotFoundError("not-an-application") + mocks.load_application.side_effect = ApplicationNotFoundError("not-an-application") codebase = Codebase(**mocks.params()) with pytest.raises(ApplicationNotFoundError) as exc: @@ -422,9 +422,9 @@ def test_codebase_list_does_not_trigger_build_without_an_application(): def test_codebase_list_returns_empty_when_no_codebases(): - mocks = CodebaseMocks(check_codebase_exists_fn=Mock()) + mocks = CodebaseMocks(check_codebase_exists=Mock()) - client = mock_aws_client(mocks.get_aws_session_or_abort_fn) + client = mock_aws_client(mocks.get_aws_session_or_abort) client.get_parameter.return_value = { "Parameter": {"Value": json.dumps({"name": "application"})}, @@ -433,13 +433,13 @@ def test_codebase_list_returns_empty_when_no_codebases(): codebase = Codebase(**mocks.params()) codebase.list("test-application", True) - mocks.echo_fn.assert_has_calls([]) + mocks.echo.assert_has_calls([]) def test_lists_codebases_with_multiple_pages_of_images(): mocks = CodebaseMocks() codebase = Codebase(**mocks.params()) - client = mock_aws_client(mocks.get_aws_session_or_abort_fn) + client = mock_aws_client(mocks.get_aws_session_or_abort) client.get_parameters_by_path.return_value = { "Parameters": [ {"Value": json.dumps({"name": "application", "repository": "uktrade/example"})} @@ -474,7 +474,7 @@ def test_lists_codebases_with_multiple_pages_of_images(): ] codebase.list("test-application", True) - mocks.echo_fn.assert_has_calls( + mocks.echo.assert_has_calls( [ call("- application (https://github.com/uktrade/example)"), call( @@ -497,7 +497,7 @@ def test_lists_codebases_with_multiple_pages_of_images(): def test_lists_codebases_with_disordered_images_in_chronological_order(): mocks = CodebaseMocks() codebase = Codebase(**mocks.params()) - client = mock_aws_client(mocks.get_aws_session_or_abort_fn) + client = mock_aws_client(mocks.get_aws_session_or_abort) client.get_parameters_by_path.return_value = { "Parameters": [ {"Value": json.dumps({"name": "application", "repository": "uktrade/example"})} @@ -531,7 +531,7 @@ def test_lists_codebases_with_disordered_images_in_chronological_order(): ] codebase.list("test-application", True) - mocks.echo_fn.assert_has_calls( + mocks.echo.assert_has_calls( [ call("The following codebases are available:"), call("- application (https://github.com/uktrade/example)"), @@ -554,7 +554,7 @@ def test_lists_codebases_with_disordered_images_in_chronological_order(): def test_lists_codebases_with_images_successfully(): mocks = CodebaseMocks() codebase = Codebase(**mocks.params()) - client = mock_aws_client(mocks.get_aws_session_or_abort_fn) + client = mock_aws_client(mocks.get_aws_session_or_abort) client.get_parameters_by_path.return_value = { "Parameters": [ {"Value": json.dumps({"name": "application", "repository": "uktrade/example"})} @@ -585,7 +585,7 @@ def test_lists_codebases_with_images_successfully(): codebase.list("test-application", True) - mocks.echo_fn.assert_has_calls( + mocks.echo.assert_has_calls( [ call("The following codebases are available:"), call("- application (https://github.com/uktrade/example)"), @@ -630,7 +630,7 @@ def is_same_files(compare_directories): return True -def mock_suprocess_fixture(): +def mock_run_suprocess_fixture(): mock_stdout = MagicMock() mock_stdout.configure_mock(**{"stdout.decode.return_value": '{"A": 3}'}) return mock_stdout diff --git a/tests/platform_helper/domain/test_conduit.py b/tests/platform_helper/domain/test_conduit.py index 52e67ac14..0afa1c6c5 100644 --- a/tests/platform_helper/domain/test_conduit.py +++ b/tests/platform_helper/domain/test_conduit.py @@ -34,10 +34,10 @@ def __init__(self, app_name="test-application", *args, **kwargs): self.secrets_provider = kwargs.get("secrets_provider", Mock()) self.cloudformation_provider = kwargs.get("cloudformation_provider", Mock()) self.ecs_provider = kwargs.get("ecs_provider", Mock()) - self.connect_to_addon_client_task_fn = kwargs.get("connect_to_addon_client_task_fn", Mock()) - self.create_addon_client_task_fn = kwargs.get("create_addon_client_task_fn", Mock()) - self.create_postgres_admin_task_fn = kwargs.get("create_postgres_admin_task_fn", Mock()) - self.echo_fn = kwargs.get("echo_fn", Mock()) + self.connect_to_addon_client_task = kwargs.get("connect_to_addon_client_task", Mock()) + self.create_addon_client_task = kwargs.get("create_addon_client_task", Mock()) + self.create_postgres_admin_task = kwargs.get("create_postgres_admin_task", Mock()) + self.echo = kwargs.get("echo", Mock()) self.subprocess = kwargs.get("subprocess", Mock(return_value="task_name")) def params(self): @@ -46,11 +46,11 @@ def params(self): "secrets_provider": self.secrets_provider, "cloudformation_provider": self.cloudformation_provider, "ecs_provider": self.ecs_provider, - "connect_to_addon_client_task_fn": self.connect_to_addon_client_task_fn, - "create_addon_client_task_fn": self.create_addon_client_task_fn, - "create_postgres_admin_task_fn": self.create_postgres_admin_task_fn, - "echo_fn": self.echo_fn, - "subprocess_fn": self.subprocess, + "connect_to_addon_client_task": self.connect_to_addon_client_task, + "create_addon_client_task": self.create_addon_client_task, + "create_postgres_admin_task": self.create_postgres_admin_task, + "echo": self.echo, + "subprocess": self.subprocess, } @@ -86,8 +86,8 @@ def test_conduit(app_name, addon_type, addon_name, access): conduit.ecs_provider.get_ecs_task_arns.assert_has_calls( [call(cluster_arn, task_name), call(cluster_arn, task_name)] ) - conduit.connect_to_addon_client_task_fn.assert_called_once_with( - ecs_client, conduit.subprocess_fn, app_name, env, cluster_arn, task_name + conduit.connect_to_addon_client_task.assert_called_once_with( + ecs_client, conduit.subprocess, app_name, env, cluster_arn, task_name ) conduit.secrets_provider.get_addon_type.assert_called_once_with(addon_name) conduit.ecs_provider.get_cluster_arn.assert_called_once() @@ -109,11 +109,11 @@ def test_conduit(app_name, addon_type, addon_name, access): conduit.cloudformation_provider.wait_for_cloudformation_to_reach_status.assert_called_once_with( "stack_update_complete", f"task-{task_name}" ) - conduit.create_addon_client_task_fn.assert_called_once_with( + conduit.create_addon_client_task.assert_called_once_with( iam_client, ssm_client, secretsmanager_client, - conduit.subprocess_fn, + conduit.subprocess, conduit.application, env, addon_type, @@ -121,7 +121,7 @@ def test_conduit(app_name, addon_type, addon_name, access): task_name, access, ) - conduit_mocks.echo_fn.assert_has_calls( + conduit_mocks.echo.assert_has_calls( [ call("Creating conduit task"), call("Updating conduit task"), @@ -148,8 +148,8 @@ def test_conduit_with_task_already_running(): conduit.start(env, addon_name, "read") conduit.ecs_provider.get_ecs_task_arns.assert_called_once_with(cluster_arn, task_name) - conduit.connect_to_addon_client_task_fn.assert_called_once_with( - ecs_client, conduit.subprocess_fn, app_name, env, cluster_arn, task_name + conduit.connect_to_addon_client_task.assert_called_once_with( + ecs_client, conduit.subprocess, app_name, env, cluster_arn, task_name ) conduit.secrets_provider.get_addon_type.assert_called_once_with(addon_name) conduit.ecs_provider.get_cluster_arn.assert_called_once() @@ -158,9 +158,9 @@ def test_conduit_with_task_already_running(): ) conduit.cloudformation_provider.add_stack_delete_policy_to_task_role.assert_not_called() conduit.cloudformation_provider.update_conduit_stack_resources.assert_not_called() - conduit.create_addon_client_task_fn.assert_not_called() + conduit.create_addon_client_task.assert_not_called() - conduit_mocks.echo_fn.assert_has_calls( + conduit_mocks.echo.assert_has_calls( [ call("Checking if a conduit task is already running for postgres"), call("Conduit task already running"), @@ -191,7 +191,7 @@ def test_conduit_domain_when_no_connection_secret_exists(): ) conduit_mocks.ecs_provider.get_ecs_task_arns.return_value = [] conduit_mocks.secrets_provider.get_parameter_name.return_value = "parameter_name" - conduit_mocks.create_addon_client_task_fn.side_effect = SecretNotFoundError( + conduit_mocks.create_addon_client_task.side_effect = SecretNotFoundError( f"/copilot/{app_name}/{env}/secrets/{addon_name}" ) conduit = Conduit(**conduit_mocks.params()) @@ -211,7 +211,7 @@ def test_conduit_domain_when_client_task_fails_to_start(): app_name, addon_type, ) - conduit_mocks.connect_to_addon_client_task_fn.side_effect = ( + conduit_mocks.connect_to_addon_client_task.side_effect = ( CreateTaskTimeoutError( addon_name=addon_name, application_name=app_name, @@ -224,15 +224,15 @@ def test_conduit_domain_when_client_task_fails_to_start(): with pytest.raises(CreateTaskTimeoutError): conduit.start(env, addon_name) conduit.ecs_provider.get_ecs_task_arns.assert_called_once_with(cluster_arn, task_name) - conduit.connect_to_addon_client_task_fn.assert_called_once_with( - conduit.subprocess_fn, app_name, env, cluster_arn, task_name + conduit.connect_to_addon_client_task.assert_called_once_with( + conduit.subprocess, app_name, env, cluster_arn, task_name ) conduit.secrets_provider.get_addon_type.assert_called_once_with(app_name, env, addon_name) conduit.ecs_provider.get_cluster_arn.assert_called_once() conduit.ecs_provider.get_or_create_task_name.assert_called_once_with( addon_name, "parameter_name" ) - conduit.create_addon_client_task_fn.assert_not_called() + conduit.create_addon_client_task.assert_not_called() conduit.cloudformation_provider.add_stack_delete_policy_to_task_role.assert_not_called() conduit.cloudformation_provider.update_conduit_stack_resources.assert_not_called() diff --git a/tests/platform_helper/domain/test_database_copy.py b/tests/platform_helper/domain/test_database_copy.py index fd6bf3c11..0c8f7bc7e 100644 --- a/tests/platform_helper/domain/test_database_copy.py +++ b/tests/platform_helper/domain/test_database_copy.py @@ -18,29 +18,29 @@ def __init__(self, app="test-app", env="test-env", acc="12345", vpc=Vpc([], [])) self.environment = Mock() self.environment.account_id = acc self.application.environments = {env: self.environment, "test-env-2": Mock()} - self.load_application_fn = Mock(return_value=self.application) + self.load_application = Mock(return_value=self.application) self.client = Mock() self.environment.session.client.return_value = self.client self.vpc = vpc - self.vpc_config_fn = Mock() - self.vpc_config_fn.return_value = vpc - self.db_connection_string_fn = Mock(return_value="test-db-connection-string") + self.vpc_config = Mock() + self.vpc_config.return_value = vpc + self.db_connection_string = Mock(return_value="test-db-connection-string") self.maintenance_page_provider = Mock() - self.input_fn = Mock(return_value="yes") - self.echo_fn = Mock() - self.abort_fn = Mock(side_effect=SystemExit(1)) + self.input = Mock(return_value="yes") + self.echo = Mock() + self.abort = Mock(side_effect=SystemExit(1)) def params(self): return { - "load_application_fn": self.load_application_fn, - "vpc_config_fn": self.vpc_config_fn, - "db_connection_string_fn": self.db_connection_string_fn, + "load_application": self.load_application, + "vpc_config": self.vpc_config, + "db_connection_string": self.db_connection_string, "maintenance_page_provider": self.maintenance_page_provider, - "input_fn": self.input_fn, - "echo_fn": self.echo_fn, - "abort_fn": self.abort_fn, + "input": self.input, + "echo": self.echo, + "abort": self.abort, } @@ -119,18 +119,18 @@ def test_database_dump(): db_copy.dump(env, vpc_name, "test-env") - mocks.load_application_fn.assert_called_once() - mocks.vpc_config_fn.assert_called_once_with( + mocks.load_application.assert_called_once() + mocks.vpc_config.assert_called_once_with( mocks.environment.session, app, env, "test-vpc-override" ) - mocks.db_connection_string_fn.assert_called_once_with( + mocks.db_connection_string.assert_called_once_with( mocks.environment.session, app, env, "test-app-test-env-test-db" ) mock_run_database_copy_task.assert_called_once_with( mocks.environment.session, env, mocks.vpc, True, "test-db-connection-string", "test-env" ) - mocks.input_fn.assert_not_called() - mocks.echo_fn.assert_has_calls( + mocks.input.assert_not_called() + mocks.echo.assert_has_calls( [ call("Dumping test-db from the test-env environment into S3", fg="white", bold=True), call( @@ -159,13 +159,13 @@ def test_database_load_with_response_of_yes(): db_copy.load(env, vpc_name) - mocks.load_application_fn.assert_called_once() + mocks.load_application.assert_called_once() - mocks.vpc_config_fn.assert_called_once_with( + mocks.vpc_config.assert_called_once_with( mocks.environment.session, app, env, "test-vpc-override" ) - mocks.db_connection_string_fn.assert_called_once_with( + mocks.db_connection_string.assert_called_once_with( mocks.environment.session, app, env, "test-app-test-env-test-db" ) @@ -173,11 +173,11 @@ def test_database_load_with_response_of_yes(): mocks.environment.session, env, mocks.vpc, False, "test-db-connection-string", "test-env" ) - mocks.input_fn.assert_called_once_with( + mocks.input.assert_called_once_with( f"\nWARNING: the load operation is destructive and will delete the test-db database in the test-env environment. Continue? (y/n)" ) - mocks.echo_fn.assert_has_calls( + mocks.echo.assert_has_calls( [ call( "Loading data into test-db in the test-env environment from S3", @@ -196,35 +196,35 @@ def test_database_load_with_response_of_yes(): def test_database_load_with_response_of_no(): mocks = DataCopyMocks() - mocks.input_fn = Mock(return_value="no") + mocks.input = Mock(return_value="no") - mock_run_database_copy_task_fn = Mock() + mock_run_database_copy_task = Mock() db_copy = DatabaseCopy("test-app", "test-db", **mocks.params()) db_copy.tail_logs = Mock() - db_copy.run_database_copy_task = mock_run_database_copy_task_fn + db_copy.run_database_copy_task = mock_run_database_copy_task db_copy.load("test-env", "test-vpc") - mocks.environment.session_fn.assert_not_called() + mocks.environment.session.assert_not_called() - mocks.vpc_config_fn.assert_not_called() + mocks.vpc_config.assert_not_called() - mocks.db_connection_string_fn.assert_not_called() + mocks.db_connection_string.assert_not_called() - mock_run_database_copy_task_fn.assert_not_called() + mock_run_database_copy_task.assert_not_called() - mocks.input_fn.assert_called_once_with( + mocks.input.assert_called_once_with( f"\nWARNING: the load operation is destructive and will delete the test-db database in the test-env environment. Continue? (y/n)" ) - mocks.echo_fn.assert_not_called() + mocks.echo.assert_not_called() db_copy.tail_logs.assert_not_called() @pytest.mark.parametrize("is_dump", (True, False)) def test_database_dump_handles_vpc_errors(is_dump): mocks = DataCopyMocks() - mocks.vpc_config_fn.side_effect = AWSException("A VPC error occurred") + mocks.vpc_config.side_effect = AWSException("A VPC error occurred") db_copy = DatabaseCopy("test-app", "test-db", **mocks.params()) @@ -235,13 +235,13 @@ def test_database_dump_handles_vpc_errors(is_dump): db_copy.load("test-env", "bad-vpc-name") assert exc.value.code == 1 - mocks.abort_fn.assert_called_once_with("A VPC error occurred") + mocks.abort.assert_called_once_with("A VPC error occurred") @pytest.mark.parametrize("is_dump", (True, False)) def test_database_dump_handles_db_name_errors(is_dump): mocks = DataCopyMocks() - mocks.db_connection_string_fn = Mock(side_effect=Exception("Parameter not found.")) + mocks.db_connection_string = Mock(side_effect=Exception("Parameter not found.")) db_copy = DatabaseCopy("test-app", "bad-db", **mocks.params()) @@ -252,9 +252,7 @@ def test_database_dump_handles_db_name_errors(is_dump): db_copy.load("test-env", "vpc-name") assert exc.value.code == 1 - mocks.abort_fn.assert_called_once_with( - "Parameter not found. (Database: test-app-test-env-bad-db)" - ) + mocks.abort.assert_called_once_with("Parameter not found. (Database: test-app-test-env-bad-db)") @pytest.mark.parametrize("is_dump", (True, False)) @@ -270,7 +268,7 @@ def test_database_dump_handles_env_name_errors(is_dump): db_copy.load("bad-env", "vpc-name") assert exc.value.code == 1 - mocks.abort_fn.assert_called_once_with( + mocks.abort.assert_called_once_with( "No such environment 'bad-env'. Available environments are: test-env, test-env-2" ) @@ -291,30 +289,30 @@ def test_database_dump_handles_account_id_errors(is_dump): db_copy.load("test-env", "vpc-name") assert exc.value.code == 1 - mocks.abort_fn.assert_called_once_with(f"{error_msg} (Account id: 12345)") + mocks.abort.assert_called_once_with(f"{error_msg} (Account id: 12345)") def test_database_copy_initialization_handles_app_name_errors(): mocks = DataCopyMocks() - mocks.load_application_fn = Mock(side_effect=ApplicationNotFoundError("bad-app")) + mocks.load_application = Mock(side_effect=ApplicationNotFoundError("bad-app")) with pytest.raises(SystemExit) as exc: DatabaseCopy("bad-app", "test-db", **mocks.params()) assert exc.value.code == 1 - mocks.abort_fn.assert_called_once_with("No such application 'bad-app'.") + mocks.abort.assert_called_once_with("No such application 'bad-app'.") @pytest.mark.parametrize("user_response", ["y", "Y", " y ", "\ny", "YES", "yes"]) def test_is_confirmed_ready_to_load(user_response): mocks = DataCopyMocks() - mocks.input_fn.return_value = user_response + mocks.input.return_value = user_response db_copy = DatabaseCopy("test-app", "test-db", **mocks.params()) assert db_copy.is_confirmed_ready_to_load("test-env") - mocks.input_fn.assert_called_once_with( + mocks.input.assert_called_once_with( f"\nWARNING: the load operation is destructive and will delete the test-db database in the test-env environment. Continue? (y/n)" ) @@ -322,13 +320,13 @@ def test_is_confirmed_ready_to_load(user_response): @pytest.mark.parametrize("user_response", ["n", "N", " no ", "squiggly"]) def test_is_not_confirmed_ready_to_load(user_response): mocks = DataCopyMocks() - mocks.input_fn.return_value = user_response + mocks.input.return_value = user_response db_copy = DatabaseCopy("test-app", "test-db", **mocks.params()) assert not db_copy.is_confirmed_ready_to_load("test-env") - mocks.input_fn.assert_called_once_with( + mocks.input.assert_called_once_with( f"\nWARNING: the load operation is destructive and will delete the test-db database in the test-env environment. Continue? (y/n)" ) @@ -340,7 +338,7 @@ def test_is_confirmed_ready_to_load_with_yes_flag(): assert db_copy.is_confirmed_ready_to_load("test-env") - mocks.input_fn.assert_not_called() + mocks.input.assert_not_called() @pytest.mark.parametrize( @@ -425,7 +423,7 @@ def test_tail_logs(is_dump): ], ) - mocks.echo_fn.assert_has_calls( + mocks.echo.assert_has_calls( [ call( f"Tailing /ecs/test-app-test-env-test-db-{action} logs", @@ -465,7 +463,7 @@ def test_tail_logs_exits_with_error_if_task_aborts(is_dump): db_copy.tail_logs(is_dump, "test-env") assert exc.value.code == 1 - mocks.abort_fn.assert_called_once_with("Task aborted abnormally. See logs above for details.") + mocks.abort.assert_called_once_with("Task aborted abnormally. See logs above for details.") def test_database_copy_account_id(): @@ -493,7 +491,7 @@ def test_error_if_neither_platform_config_or_application_supplied(fs): DatabaseCopy(None, "test-db", **mocks.params()) assert exc.value.code == 1 - mocks.abort_fn.assert_called_once_with( + mocks.abort.assert_called_once_with( "You must either be in a deploy repo, or provide the --app option." ) @@ -523,7 +521,7 @@ def test_database_dump_with_no_vpc_works_in_deploy_repo(fs, is_dump): else: db_copy.load(env, None) - mocks.vpc_config_fn.assert_called_once_with( + mocks.vpc_config.assert_called_once_with( mocks.environment.session, "test-app", env, "test-env-vpc" ) @@ -550,7 +548,7 @@ def test_database_dump_with_no_vpc_fails_if_not_in_deploy_repo(fs, is_dump): db_copy.load(env, None) assert exc.value.code == 1 - mocks.abort_fn.assert_called_once_with( + mocks.abort.assert_called_once_with( f"You must either be in a deploy repo, or provide the vpc name option." ) @@ -570,7 +568,7 @@ def test_enrich_vpc_name_aborts_if_no_platform_config(fs): with pytest.raises(SystemExit): db_copy.enrich_vpc_name("test-env", None) - mocks.abort_fn.assert_called_once_with( + mocks.abort.assert_called_once_with( f"You must either be in a deploy repo, or provide the vpc name option." ) diff --git a/tests/platform_helper/providers/test_copilot.py b/tests/platform_helper/providers/test_copilot.py index 9b9f27d4d..55ec40040 100644 --- a/tests/platform_helper/providers/test_copilot.py +++ b/tests/platform_helper/providers/test_copilot.py @@ -424,7 +424,7 @@ def test_connect_to_addon_client_task_with_timeout_reached_throws_exception( env, "test-arn", task_name, - get_ecs_task_arns_fn=get_ecs_task_arns, + get_ecs_task_arns=get_ecs_task_arns, ) get_ecs_task_arns.assert_called_with(ecs_client, "test-arn", task_name) diff --git a/tests/platform_helper/utils/test_aws.py b/tests/platform_helper/utils/test_aws.py index add894d01..9c0b8d239 100644 --- a/tests/platform_helper/utils/test_aws.py +++ b/tests/platform_helper/utils/test_aws.py @@ -764,7 +764,7 @@ def test_get_connection_string(): ) connection_string = get_connection_string( - session, "my_app", "my_env", db_identifier, connection_data_fn=mock_connection_data + session, "my_app", "my_env", db_identifier, connection_data=mock_connection_data ) mock_connection_data.assert_called_once_with( diff --git a/tests/platform_helper/utils/test_validation.py b/tests/platform_helper/utils/test_validation.py index ab6f954d8..3b26b5e25 100644 --- a/tests/platform_helper/utils/test_validation.py +++ b/tests/platform_helper/utils/test_validation.py @@ -1094,7 +1094,7 @@ def test_validate_extension_supported_versions( config=config, extension_type="redis", version_key="engine", - get_supported_versions_fn=mock_supported_versions, + get_supported_versions=mock_supported_versions, ) captured = capsys.readouterr() diff --git a/tests/utils/test_check_pypi.py b/tests/utils/test_check_pypi.py index b63a7e3a4..58a84e260 100644 --- a/tests/utils/test_check_pypi.py +++ b/tests/utils/test_check_pypi.py @@ -94,13 +94,13 @@ def test_check_for_version_in_pypi_releases__version_found_on_the_third_attempt( call_no = {"calls": 0} - def releases_fn(): + def releases(): call_no["calls"] += 1 if call_no["calls"] <= 2: return ["0.1.2", "0.1.21", "0.1.1"] return ["0.1.2", "0.1.21", "0.1.22", "0.1.1"] - exit_code = check_for_version_in_pypi_releases(opts, "0.1.22", releases_fn) + exit_code = check_for_version_in_pypi_releases(opts, "0.1.22", releases) captured_output = capsys.readouterr() lines = [line.strip() for line in captured_output.out.split("\n") if line] diff --git a/utils/check_pypi.py b/utils/check_pypi.py index ae788cb13..be596f081 100644 --- a/utils/check_pypi.py +++ b/utils/check_pypi.py @@ -18,13 +18,13 @@ def opts(): return parser.parse_args() -def check_for_version_in_pypi_releases(options, version, get_releases_fn): +def check_for_version_in_pypi_releases(options, version, get_releases): print("Version:", version) if options.version: return OK for i in range(options.max_attempts): print(f"Attempt {i + 1} of {options.max_attempts}: ", end="") - releases = get_releases_fn() + releases = get_releases() if version in releases: print(f"Version {version} has been found in PyPI.") return OK From 023af689c30c08a3659328a7da7aa94b6a93b74d Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" <41898282+github-actions[bot]@users.noreply.github.com> Date: Wed, 4 Dec 2024 09:44:47 +0000 Subject: [PATCH 25/38] chore(main): release 12.3.0 (#671) Co-authored-by: github-actions[bot] <41898282+github-actions[bot]@users.noreply.github.com> --- CHANGELOG.md | 12 ++++++++++++ pyproject.toml | 2 +- release-manifest.json | 2 +- 3 files changed, 14 insertions(+), 2 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 0b2cd91f4..9342a7cef 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,17 @@ # Changelog +## [12.3.0](https://github.com/uktrade/platform-tools/compare/12.2.4...12.3.0) (2024-12-03) + + +### Features + +* DBTP-1299 - Cross account database copy ([#657](https://github.com/uktrade/platform-tools/issues/657)) ([7d35599](https://github.com/uktrade/platform-tools/commit/7d35599533b55f15fb08801c50ce538a8a32b847)) + + +### Reverts + +* Improving provider structure and exception handling" ([#670](https://github.com/uktrade/platform-tools/issues/670)) ([331e8b8](https://github.com/uktrade/platform-tools/commit/331e8b89d60fec4e29a9ea4473ffa44cba8e92c7)) + ## [12.2.4](https://github.com/uktrade/platform-tools/compare/12.2.3...12.2.4) (2024-12-02) diff --git a/pyproject.toml b/pyproject.toml index 7e7f7c64a..5d45dfb70 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -3,7 +3,7 @@ line-length = 100 [tool.poetry] name = "dbt-platform-helper" -version = "12.2.4" +version = "12.3.0" description = "Set of tools to help transfer applications/services from GOV.UK PaaS to DBT PaaS augmenting AWS Copilot." authors = ["Department for Business and Trade Platform Team "] license = "MIT" diff --git a/release-manifest.json b/release-manifest.json index b1f1b166a..b06fb695a 100644 --- a/release-manifest.json +++ b/release-manifest.json @@ -1,3 +1,3 @@ { - ".": "12.2.4" + ".": "12.3.0" } From ae4862da9e3e3d39c82c99222fa21450191f260a Mon Sep 17 00:00:00 2001 From: Connor Hindle <69192234+DeveloperConnor@users.noreply.github.com> Date: Wed, 4 Dec 2024 11:29:09 +0000 Subject: [PATCH 26/38] docs: Document new dbt-platform-helper architecture (#669) Co-authored-by: Will Gibson <8738245+WillGibson@users.noreply.github.com> --- README.md | 51 +++++++++++++++++++++++++++ doc/platform-helper-architecture.png | Bin 0 -> 26675 bytes 2 files changed, 51 insertions(+) create mode 100644 doc/platform-helper-architecture.png diff --git a/README.md b/README.md index 4b6f1455e..f5d476ab3 100644 --- a/README.md +++ b/README.md @@ -42,8 +42,59 @@ If you are migrating a service to DBT PaaS, [GOV.UK PaaS to DBT PaaS Migration]( poetry run pre-commit install ``` +### Platform Helper architecture + +`platform-helper` is split into the following layers: + +Commands (UI) -> Domains -> Providers + +Code written for `platform-helper` should adhere to the following architecture, shown below. + +![platform-helper architecture](doc/platform-helper-architecture.png) + +#### Commands + +This is the (essentially) UI level of `platform-helper` + +We try to follow a noun/domain verb/action pattern, `platform-helper thing action`. + +E.g. `codebase` has the following commands (all to do with the codebase Domain): + +- `build` +- `deploy` +- `list` +- `prepare` + +Each command has an associated Domain. + +There should be no business logic within the command as this is implelemented in the Domain layer. + +CLI arguments for a command are pulled in via [click](https://click.palletsprojects.com/en/stable/) and passed to the Domain along with any dependencies. + +#### Domains + +Domains are where the business logic for a given Command lives. + +Each Domain is a class. + +Any logged information (`click.secho`) from the Provider level should live within the Domain level. + +Any common/reusable elements should be implemented in a Provider. + +#### Providers + +Providers are groups of similar logic that are linked by the resource/tool/thing they use rather than the result of what their actions is. + +E.g. I have a method that lists *thing* from the *thing-service* AWS using a boto3 client. + +This method is not specific to the Domain so it should go into the *thing-service* Provider. + ### Testing +#### Testing approach + +See the following [Confluence](https://uktrade.atlassian.net/wiki/spaces/DBTP/pages/4325376119/Testing+approach) page the `platform-tools` testing approach + #### Requirements The following tools are required to run the full test suite. diff --git a/doc/platform-helper-architecture.png b/doc/platform-helper-architecture.png new file mode 100644 index 0000000000000000000000000000000000000000..2af907e7eaa9bd6dfe54a77421eeb694c529bc6c GIT binary patch literal 26675 zcmeFZWl&sE*Cq<2u@IaUIU4jR9Pq5&@A$V|TB)CIxCqQtULwIez zsk(pety?oSU#hz}r_a`P_FnQlYbRV$UJCgo-b*MbC}in(;!03Z(4D|{9{~>dr6G!{ z5x6n55EE0B784^;bhIRIItBjdimWISxBU4XHsDI1(u5Yjt<%+ZFl6r1b=gq)-VesMu`0k!Y&-C zU@sp>zIBQ2zL^dp;uZO4X&h=9Vy-V`c!xB9JARS=#-uTy#smRUu@cbLB3X(_k5 zHx`~jv%l|EQNC}gC?eC;{uMjKc3)oJf6WKqk|BH;a%LHDHcvaI1y_MB2<^mgn#@Z- zVF<`v%~V6$Oim7p4!A~uf)2HS0s&Xhz!wkrLP5QV4}pRQ{$c=M@m!dHK85bgeetjB z{ilK=%3{*ez+Yu!M^jTXDUyiUIhvAiGO;kRkO{sd zAtB*+G%@2<5|{i(b>L2b?1QtjJufq}o0}Vx8#|Moqd7Ax4-XGB3mY>V8zWGH(aFQs z+0dQQ)`|SNk$<)$Zt7(0XkqVcVP{M7)UKhCor|*o8QD`u|N8elPE&V_fA?hT^p9x) z6J&mR!pzFV!u+qcfvWsZzw#)e9Obi|F``A^5ow={zpxX|ET$v z=Ra%y$CLkBQ`O1TQOwQ;=+as6-*4t0mH+eMKPvJwKh69!1V~)Gs`Vo9i;)Pd z{_U-l2o{o2CybHEcamJ^U5VA5R=39cZA6o_g$3PY-gB*}WtvAMc~ z0Shq}>gn?aMtypaQf`Piu5vr_18qxj91S$Y?C9(r42AI1r-CY$&&{@}%rV}?Qj1i= zUPS6255mQBwRUylov(fyqDrvDm1LPxLSv9XW3YhOOo!8_GXK>Aq6Eo6o{nfz>c{5- zBmqvKkt1|Ul&8|b=bbPNRwt;;89VLopjR27BS^Gi{ti$02SzRemGxB3>hoZ*MC$)m zAqfbk6h;TnmY8=+|D7sLp!JLAB8am7WU1d&M*|c7PEQdu2lp%ujASSY7Q@nPSn9t` zr6wrJm(;HV|r#5ul2F! zosv?#)nIpEtS|-QZ*>YX=>&&@4F7BNgi5un*o4V8=QE`90LdOUKYdkquiiM^Y46RgRzxzx8HeG7S%L zHU?DRWPF#_W(!3lC`KJ;9K`C?8C3g*Hyjy@1=J!6FQR{d8vvmcVc za5+n�MjGqj*rGbjwS9K3B;`(Z>0UAt-_fmEJ?Bo?rR~q9TZVG?6v_?;GR}26`fW z-DCZHE1nI!##1mZ`QNhtUG@J5F_}2L6HJrC7z4@Y@RJVE;|#?G-4<8|$x=uic_WH! zUHufRw(C=))vWr?IM{49jK|}8ppo8UGMcShq8?*I#AP#2frP`9IG98)dpKV=u|bPR zw9sIeGEt-;o5p4;8u8E_ju}rWi#?FZMX=sc>*doj8FMQ+85E<#A~(6uUVsM>{2%uhx9`!%qfnWg9Fiod=v`V{HN2}b_3-N%XoFp=qL1*Eo6Ld zM~SS)y}^m{MeHye5tO@GhK1Q3Lb=1sMsSID#X9>$pkp z;pQCQ<98P8>2g{5Y43*1>0k`f#@BT8hs&e=o&-m#X-%1{KX9+!kVwT4$(@HMLCI%u z%4yYG)8goRR~;-ht4~+zGkaYepl=}l;5PgL(w-U}Vs2*`7vcCL88szFuT$zCrjIRVl}rxpcDi9EJWbqti<}<0KxAN}chl43pt(F>JXk675G_ zS~*_lq_xe`9cNc-9ct|v#soY$t-+S?m7z`nzJgs>Cg-n@1*nlgudv(%$)Z4t(nW%*M4#dQ)iEfbm!@V5-D}ksv%uo zZUdS6ZOL7?moiI33tsD-)U@XE1Jy!B{d=%A`5)3E#y3is;>p$ZoEDP%C}orT8y(g` zR1-|P-*?g$ij|8tPKTHk5>^167SXqU;&r?+kl?tT6}-wd zm{zSalBj%qxWC;57RMC3B!YP)HY0`AblHudxc8r(pX1GLc4_<`)g()@%&jW(4R*|F zY*{(v4BGYJw#NrAyXDrnQcNp?-bIhq4(RKx3_E8*bnL=)cJmSUE+Qdc9V+?EuS4#` z95Whu>mE^v6vZK1;ST9wCK1Hc!sPh)FMJ>ZYP*yEY)om9@K?J|Nmw*mqBm_f4rBI> z*Sop!CchFLl$tfIcxwG($~3uwZMFNtV)lmj{@^#Y=fK4$D@s~g=|PoB^2fWAjKpHs zIJSM$qyrwOp@$_tlc~VD(~(*~O1FR?Y{mzDY!5-|1qLTdo&vUAY#g`z&cb~SHuo|E z1(0t@h4&2ia51rxHg;&KHt;LUTGb}QRIm4S7|=FcWVeTui62WUFKt&CQ0pq_K56Td zn@^@IEeA@d5K+klP0z@TZZA)GEPg)a%2$9N(9aK6QF16#x7Zp?(XYR69<)O7sai0N zFH%G=R(iykYcWfssMKI!_PuN{d2dA2Z#ujYw?7oAmm+G847Y{3lP3J2;nlobn6c5c zosEP=kt7rh_e1rwD6GYFSsH`JH({ig(J ze?1(W=nsY}oF~or+LlPb`p!9S#PR|hu@^@vSA&)VbCDFawuItiBF*w?_x{bpqrkW>wp>Q81dD9_!T9;F zlu1ZcQd_7{<)22;hpPv6#=zbh(-c9a1_^kd@U){V!=PctYs-F28Rwz?i5!I~vXA(A z@IoaLXxmmeBgsn(oCECoBfm$8NGm!)XrmJIK6fX5r)Q%=MG^JB_cwYcEL`7cH-1VH zCp8~^m#v)XBxWZMQzqf^y>qYhzX}OJculW%5C{{tO=7$B-6f%6#aCM@2zQn;MTZPk z=$~RWgMs_cN8Mgjx%bt!$O( zZ9B^>yR!;;+j>Jq2yf<#CfpdGRI;y`fi8UYnpRf?<#-`T`S28nsR9-a=X%nk?G836 zmqXA?`TnCOr^hD7vCH%U&n7u#8B65!_u4v@iUpMm>!qF6n1uAf7E7HexM7pai}Ed{ zX~t`9-V2XzdjH1vH5N#k+%7FB%C*)KuiYZ<8uY1~8t*?T6?yQ{ny_Abl1H3Mw|btp z{4k8C)BRq{b53L^vsiDFfi1Egj;ov}`DtUdGpLt-iq-F-13!x;sH+T#oy&L{eEz!$ z2{NWusmGYggO@@sh0=6;)V!JHw#=$`6DCL3Yna?*I)v>LF2}{)et)qrY$LE>wb)p- z{AT@pBitD9nFzIA!V^xElC1jiOuw|mE|lt#e$}azPB*wldM!sTY5^nBZ#+>#d8(CD zTHw;qHvW6i%CsxuBb)xPJpE(|F$?VeJZA9OSb)#GIQ|ZFs&vlnmO>i8XDay7HQ)Q@ zv}>@%vwZPy_)o1vJ1*vmwP^>(jITSfvFAITpg{wTzP3^nGX7wK~MJ(0s~ zO^%!KEgs61No#d*2NcK~W#<8exo8J?93zzy@Sf2RBNX<&n=@(G<`tO!!le~HCP`Wg z8IQM@7BXH2m#&?>ggQ;>=cWUY6{U|nK5LEEE46xbQ?1-dezi>4^0>^DgX;0NF|Bl} z__|a!cFhWSbv(^PxH9RptI8T_S~#QcbWv8!ky03_xdcul`1#BVmnkFDj*Luy)a7+~ z1+kFc9L;v8+jFC=MWRB&S?q4UOD9kvg1^o3M9h(aOVus`W3hYATdJRXR-#(E*J6w* zOA_j$+9(!^0{c?b4<8fobQ>L0>uNyXiVHORDo%e*_-v-c+4Wx_TWxgz?6TzARycLM zq&GQOcu}`|a*FTSpKYVPG3ubd18*ow_~1-Tt5aP&FYIUmwl})mTRzlo+0s5(Tda1~ z{65}mJJs%Yujcoy3Z`%pu`ASleYVB3*!#YB!qRwH`Y=V?CLMhWHKl|g1E)t$^0y@n zL#x^xDo4JbqOTL?nH&7TPc(}US6Dmos=jj)!7n|(df>%)eX zKhg4bP(z4jmcXhUZx*PEJS)oa9T)vLOoPh*+ke?3LY;;MshdfqJ93VO0Cd`Vxh!j% zUMHTlh1Co;BqDtKA$GM@oV(j*n2)pHz%@LpDIVI60ZPqKxpu?N>l1^~6TJ<@ z?h#jv>lXH25ZT!FCi)(}QTjkIy4XhPqhKk-H?2VU*O$SKS!r7z%dg8MN`+d4P3HYH zxM%b0r{{_svWi(YJ?So+aj3neGi23$!D@M&S(8!c@E&D8jKzvI*~?}*yNj-OitP4F zOA15TjQE@PeV#IHDGz!2=O3nvn>l^Rq8BaY9MAUrHUh9;|AEU=?u(I;<KXT>uhxtT(hGQIF8$fSuWYw!nO&~4QqST#AnMVg5ntLM92NGC+vZw!7dzb?Dypo4 zri$$v(XwvzIINqgO*L1pJRIu@h0>1;L%j?JPg&zA9Mfa8ePik3E7Vh_(UCd4L|wf& zo~PoO;`5(#+C)Sn`@S4{d6=v`h-Kv?#o9_sq<$>4w*SlE=Fhe~p%9e@JhJK?*>yy4 z``y*eFI}D$WTsgg#Ht_LC8ZVPfkt}h-$J-xg5+3m96zX|=|TEQ93Kdle|F83s(w}M zTse%r`_=>>sF(i^z5SP=XU_o&`-&n#nucjNc^t0^D!j+NEJkYfq7iTMyz4J=VlHWY=gpo&?I-mnE$*&+GZqKp4 zjyw=jpT0CZdA@}Kb}F1%7vD2Nz(g&K?(6|upUy1J|2r9(h=)u; zpLs77KW@zF=z{&Znj!|>?r$8O8+F{S^Q11`@U#HvFn5RqPPlwJ}#g_QBM=F`fUGsNB#)9wz)9 zfY^oA*#@y?jJ&B4$XpL{^7~;lgZml%4gTsMvrhYS&rH6m8|_h_u9I{S#wTF!{Ipy& z+vHdawT9ixd~w}ZN<$^&iq&ZV2Db%${3U?#?k%u0#Y-LVlV=*7>h~d|Av=L)11VNx z9WBx@h%!KTxv%ZEjra=ATxA3gb!H&=h}{`b?R;;SIXeD41Bz>sz%Jza15SP3&C7ti zPAZ@a75;8#G%le1#?R(Io;An`AonOYWEP)iozaNCdNlU*RGxCI)Q}x<{yFM%7F3$49i68~ao@$W{;uW#*=_AH*}UXEvN(Fr4!npV#>-pa&%OCX2~q zgdQ4Oe*Zx7v_aeVFOW%!fBBkjM=Nt8Q~Wc5}cB_`zI}APT$XmeVP9{ef!Jp^XUzp(&o5`^%LAn}z!Ec0WH_ zt=g~NmrHI+Wg4ulaE&heY93d=LS4A|mCLm_cpTP(Az6ItTD2cOn+IuN23t&~%B?Gc zsic7w5en7LIgEDA>St)PvB&=x|_nl7+7=o z>L@kXUN9`-L_Ch&$5hymqi-l-b%Od0PTTSW$&49TG>Rls1UJc1;lytPYdsP`)~udz0kY~6o0MZH+Vo>3+x3n z*`qTb!?FE4vXZx{~&+Gz10-!Iz_TEv? zR{9#)7XQ;W#Oh?}JYjtv$u?m8dD~GmVV+0A0+^UCP+#A3FaO=?v$yi!;(b~VfI|N- z3rU)2^M`kGOq}o!w6B1_Pr&0yVcyL03k1yw~>KG<%O{d9?L%ep)(gH48V~u zw!Q*D3s#4}vwINeK+d0C#A^oC$($S}z$^&Rdz^!mHKH&Su|$9-DGbH#qIwJMS0e&} z`MU*VfzsR4gib{8Fl{v=BQWcVh~Rh-**kysPJ|yODo51cjCx+$k@6t@`Lc?L$ek>V z-M5vA>qZGpZLL=Ev2>P^n7A?M`a?8MI3Hn*0FO34irN|mEq+N`$%Ohc zC3u)+ENIRtt5vy4kLoiC+!yi3e|i~CbHfalOKVs>o+Kk_4j!jXr;)-4zi2|3Dy^}Z zbwQ0(sZ2Vr2xws>q0ot8S(H>>^u6e9<}BEOQo9JqStZ1H_X;qg-CxbMlg6EYa2sL2 z!@-B);|g~}o|qJ|Rx+cbFwh6|&F!?D=mv!RZn{lF?QQocLM~4)n@^zxj4Kdf)M|ff zh&?07TXD5a#N)I#>AD>w}caTZ|v#_Cnqm!oi@#Y4e3+Az~1@2r*0EGY~W zfM&g>Wkok+B1wlovzA-2G@uAZ(e%E>1V@3a#Q{<*Z_wULt&Ff@hM{l309>kz%FnPL z$SRi^9_Nn?=mZ-LTHDchbTKDqFONel!P5q-VHW+WCr^#i1{j(HmGG}fCs-Dbg(oVk zLI#6e>>vs=FI!Uv;~O8Ko0hD?uAfy^7#Y&8|Y zhD1uT6|Z$iZ~8r4OIc2r(E`R@&V_lbCyrc-0x%T;wzua}O?YoVkSx^OB#ba6}$2# zu^!_)*%YQEpW6$>BhB{9w^X<<;}Oi9^u13$s+6cmho-ZcMgYDEcRwZ>V9&4+LLADq z>Q1Zm+Ps?<9r_4Q=ge~k)?yvcyjJ#WBrY#l&Wm;mwyJjUYMDve(I zv^*$Qp@50lBS?~P86ZedZVP-JUiK4%E|wO#P~+|+gizcQ4ZovSgo`XzbYU_EE#1I`hH zP%C6J0CJFXLM1|*@H%Re@_oE_OXILg=D$6tDgM#(hMG_LbEFXW*Vxsm?&##S$j3gVKv4~9V)rHVYbFqOVtJdjwoL~i?;E z?5$YQe8j6+`ymcH1fU;>MzZ)^2+(2ya!to=i=~F_xM9&TlFK)fBzjcnkw0k#aEppQ zB4jta9Z#FW!k2|J0t8Ux8o=i@h)D>4p1L7L63f;+5i#E?PYMLTM%l8O?97)= z_*0kgU~e3iU>WH_89;-{?S1v_taQ$xlE11*NJwy?1A?O|b`gj{l+{XGE6=AvbhCl)F8d)^T##s% znjd$O{Og)wX%1*i5IE8;wV4(Vr(;Su1R8b_8cL1g{ubH6)yLnR&XMQCRC4M0X#feJ zOr098y^k71$&M+`ZJl4W_4B4IYm_$7wSsW#6UYb*RYRT1Am*TvSMB=!Ge0K7D>EjI zyzjAHM{U#2q);mmby}ku zk`A_^;Oj;4OfLq_O?;EQGl&?LK%*+t{b7hbTOf-wjN%axq-ZpZ*38!B(2t~{<>r(w z$MUN?)SLxLs^veIqW>q>oqlhjS*U@@EQZ>GoU#UHQ&q-Nc170HpGiD> zAt5L@mE;BuW+d={pz&Lnpp^;1*0YEa$n5TE#8liZW6}J=9T48v;%@>YIffN=004gj zh!m56G=cX9YGrEW>DXHPHE0`T5)nJMKomNjA&kdmYmFSgcb9He@v{l9yELDd=X8J7(Vd^yA@PyNiL0RN6G(z34sccdugVN%I zdQ6WDsiIw^ikc`mEKj%@Cm@yV0Dm2VPP6>;2djss8INzOah1m8he8IT;>F*I%RPXU z(df<)IeZ|gcRnA}=6Vp=@y(1=bxDzHKCXmea++<7<_P6)04f3449LgOWJ0_5du$2N z2kzbKJt78C_8DGV2sNZ_!?2YWI{lw@*GtDBwwhM%5dV{OxHeU)N!-$Nk4$o3NMI1_ zi4WB*huN1fRQzQw?v@teJ`>jIQd?P&>QIuQz(lbWmh023M!}c z*TmXt26)Cn2^`&gKH*e0Qm6nu%x{7@VcX0tzXLh_)^r8AcqaKEKbc$q#IHy6kT$ND zMB!3Fd!cn4Ko`DEMFz42=l~vLx)Q2`UuHX}BQHh}?;=|Mk#)YyZGXD_^lCk3ft-C8 z0&5<)3+i3{g}Dl5@pg@KXB%hle76zq5G&k*XixR#7|-D587gvk0><$@sld-Tk{#dz zyU##PJg20E;REjA;uyQ(v;XG~xP!gTaH)dNQS%}YM6ZWrBR=P~0eF=J%l`vhb>;OJ zuOfDUk%}fvEMFLXtkC%t*i)bwV1%wfy@(J)D8#_yw1If(AP9gJXd*y}A&FL~`{jqi zm(&%#nk~eYAH;*lX_`3>YuCgze#uO&`0p-#hyN70Y_mD)j%uAQ?)#V=KV&&DVzhB= zmt@cn&E|ykaS)%#@c&#eYiVR(dWO!qFHn4AF^TmZIMwuU~ndl3PqvJCUh~rL$Gr6pDz(a6d_*31%&g-xs^@@^TpL z`+LFQs+e3qG$*Oku7PLZb-%heuZ~B}-OAX?Bx>STf>cgBt?C6T;WBf(JTCd;?G?|p z)i11bCyt_)%a|-~vX#fXH~`^LD!k{B57n&pj&5srIJ3m%xlz5g*_*j(yCij~w%-Ng zp7hY49)&@(*`2)IXFLxo?VH^GdLg@9tyVa4^FBI!oihRy$Voia=M4 z^%}g~$&2hO-9`XB+go|rg<83$Uh4;t@&4_dW>T3v)tiaUUV`MPD2?wS-xqJ>=r`p# z_!!hns#WOpW%X{31p+y`QJ1fDt4)WQ;8Bj^w)KyxmCYVjv@+j&p(YaP5>IO^ z6Ft=OJC35!>eZG(^!ar|{2n3_Y1GvGqKWv($lKr0F$X)MH9ygkMGY9o60 zyLA7GRQGO##5W(i-5C{UC+)*VjKlXxARtr90nG5YdZ+L8JjNn+(GvpYu-;oh&#Zxy z$?H<`aL>h`nwiN^Ae)j5pmef8h-?b@em6x9B;Hr&zX1zB2(Ut*CC2Yi@O;B2FQwk; z`(B&gN3QM;K;*doBrWg>Wo1jTVb%L)4Rz<4HJ&A2=`7dg-EF6hj8~|td#trJIJZ#C z6Q(TOawTv*T!yJn+UMPVLLH?1G{3{b^{0e!NNqGlVfk8&{pzOgF@d?wM|-6!r*vVF z4)v0lZmwo)%-7EEDlzKzP`MJp^F=lvPl503c>%RhoDwD7ha77C7FFMz2b*pQv_P+O z##Wz%m_3epHBX(~`f%>w^gBS9mJB4BDM`)!8|fUe^Qtf8^SSx+&F|w9o`U4a?fHI@ zpXbX*1?LM=VRU|V95mrDVsTjFqQ*kGjKTrBZp3!s!+nG0?p7dYZjV^d2FL+868`RX zT;ZpR_al|dPOs{Lh`<-tdoYpq`+8@((e1@yfm{aXllQr@b1rnX*yJ>Eb?cd$qrxV& za{IGI(Bm{XQ<`VU?6$&W?wdnhl&4z=mo$QUiu}C^#Le8?Wecu*qH&y(Ugmu8yd}tA z=C$SEXL9&q8(PhlOyMin!(}>-lF5`vOU}3)>X?h=O95J)uZ`+Z4wca_ANMEiyn`e* zz5^*D%igyrMr83Fuw;l0Hh(eNqZ1)ZIDi~AO9F`Z@M}{2m5YOnB=<*szu%qkp0Qz% z%r8IN9d;k5lYN~vh70mzh?)el=+N5d!xRTi2NUCd<|(^<*I?EJBCjaa?|ST9*~68{CO(;2oL&PJYEU2iq1N5KXpTh?(Cl0kRBCNWj! zejKgh`&rL`YVNj^KGC8sWYKf|h2v@m(X|O=?8)_tXET*ozfyd`RWhSwySD_M5t@er z4S0)wBG3Ev!zSZu)3fYFGM%anfCCrS1h~GO_>g;HZ5D;#{kcBvr{l-dmb6c8KvW>| zs3001X-FpJk8g}68#T5_=KlfjDP$|qqEE8}V)+Rx|0`Ih3C^EP8E-8xz6t2lFjuvql1d5%L34rP zMu-5$wpL0&{4LH@iE086C@K>9{CR&5B#)*(5d=WYOU^=S7)ou{l6t;5l%g;iwd}di zIFK#S{6tM?iL)99`x~8g0&({ju2F-8SK#8*9uB)@olz9>^b-Wht&@BNSGymF%QbRa zkeo7C{ph%Zx#}VR(B+v2o|^4sSM^1jyf!>C9Ig+vvl6^+ohGbjLY6&kH$%^{6Ch~!I}Fu&^M_>uP|fxyP3I}9D>eXv-Dq}QLBY!|4E`_4yXxTU}Q zwN^n_{4I6EvS;b)i_R{TC)UPuy^rdRwR7hybd-JHUPBu!V(Sn7#fU!;X1+)uY1Uh( zVR!xbh~y=ww8hg*7(*n$kY&rqmq8!fo}0+1E5TcWOz&go1*GcnSjmfJ>wBIWdtB7< zZG>dwgtMWPIGU#T3c$VVt8e(IN`bKJcjPNUMC6{J#iimvH(|z;#JS}0G;)L;po?>Q zUY2ld8pRHLi`1Yr{C@)&oRMcFrYkoZLtlsc=Pw8CUpIhF^tYj)bABwQz_ z5@b5uUtyOIt_O=0rcc2iM{TkrjGebg2gDDon(;x(b7GTZ8kL#I@ z`ozg8f|fK&QRtKdFp4OZv0X>1zE;OiMfZ{67K*?`0~DaMic1r9jkL7%cNT7J8i#n% zH=8Al8aKfvgWZ0YG{$CsP$*)10zAx;3FSNJ{Apy`z7$a^{7R_yv5`lI2fy5AOeaNp zOk?!%;y1ia?@xP7T=S9vtVh{ty5=h{lSNP4+d!5e3N%WVZ`rq}jS=3nARtnJfb}b? zQgi<@d<2&)H2V60Vbhw(0ogB%bmZ{hz*g@qOjsTd7kK#Rh<6$nMi?wmoPmb@lL`n7 zAC-6ghBO!o&jEZM`-utsAeU{E$YG}jCi(=GPJZWLn~7*%;Xx4~4n!2-TD%x1kDt4u z8>?kmRVVCH5YXY#0YhL2vpfE;8X^aIz{MvKMK_@jLxP?_#3y}QBSdh*`rt6I2#dkt zF&f{q{3uA3!yQO1$lY+GpZm2%4yJYl}J=c}u4wcd^1{z)Lfh%iOhq*$FNeMNmk^Ii&FnD1jWYizL{QySGU> z+=(kmIy- z{Jqctrcm#^bFn)>7&tUjH;YWbtq6yVM*+OUq{_A%86X&<%JID^9{C9Xt4x60l7Qn4 zvQK&L&BtvCMm-U`93233F$h4fWnI70ET>8a!ZE2$0J>!ya61I>yVFU)dA-(np+;;^ zJiHLFS$uQH=&KM6rs00>PnD)DH^u`8|7_rT;8O57EW`TN<3-7csk+cFJpwArvM~Cz*W1S z7};P50I#Ysb>y!SfBqwR=V>@REZW0G#C)>V(Wns-WjgT|1ksL$_5cpc#YP8_?A3A&w0E;3NyMu~IP@f$BEI;e*x#ThfM5S`p?c{H1X=&w$DO?A5Kerk*5NBe-$%T)Zm6cB% z9l}$cFW9x~a@sC_7_Mg=#Vj}k&h!36b9;O%?;7*CoSd}30XAjURii^8$d6o2mK?mm zXvGe{fC=SXPrfiJFcr7E&kb-i8lu`~T*w|>s8PqmVKP~`(_~lMn8(lW=g6u}!Cydq ziTxhEma~6-Urk%{l>Y}P!nIYEOP-nwT%Ur#6+!ZI^Q+EqQV=zkhiUur8jml-yIl`@ za!|bEMj{@ucUhjjqaS9NNcS{o)a}FQOIQ}PqyFm*P0mLAi?~RE8&=IT$OVSavO$a0 zT?{WN{q0T>K^7Jm7dWZ<%j(nX7g6Uy*;9HU&F5_sqvT|PaId1N>#bdRF{L($ktduj zqx-*6CQM%#Jv1>ic;HdOc=_IR@)@wXpcUkQ!7Cr&i_V0Wd|k(Ny1qZ%_~pu^F7j=VoA=A!M7uA{cEyr*7A1jg<0!6lOhJ+>4&LM{Tq_n z<*Ji@S(|8p;$ED^BmoZCuUD*rV~7NLZ&!wh6w=`EJYQ9zTAY5E2Ol`a&eE>=O-=?V zOMg4;uH0Je7bGM5P}05os8dJ~%~SI88zvveSxi2>+_qVDwBgb3sPxD8PR_ttCJ)I4 zX~TtZi}Qenha7>Fk~%@7^f(0SWK#56QF|kE0>fn17?eWsr!%l;U=tFNjkK+?S5U9k z-?O+mN);}=njdabc!O~DE*133iC}4ijUo`r5O$rDTmI_@mIhZ#*O8h!ANw(^ob>94 z_FiiEIN2#(U5AZEae69xyZuMU+VM!$7F=u*E6F8LuND(^7T;$SFwyW&Y44>(bs&=p ziq5xutk{O?O;uQg!M9*{W?m3D?`_{sK1Pp*u9;RiS#g;7;jHUgZRhj!VebytR3cM&I+lu(>CHaG3FvG$S zzT0>byh#x_ZZv#rrce!e4LA%7h~na)C6w^ZeeL?dBEc-EGDPp-da zB?HjqQ>GXKP?l8K2<@*5hjs%FgMY8l)1-UW=xBgu74z>Vc}CWcfcy(*^VVvW=UY<1 ze+hvK7kTpi{6ULALgmhgr)lx?ttjABy@JLFdd{|N1Tw_Nvd#yUp4E9UV65K2P{2J~ z2K)byCK@ODE*1)kLg4BD0(kfTYSJc%q+m$U0gtcf1%XgD-O{3Fjm++1lhU=s{#qo* z<6!)fm#Ja`T)cpjS^Gd4kGw31yb@(MK3S zQkCqO8Bv+CG9CN}R4>(xQeEsg0f8iQ+F&ssbJ=C8Lk6 ziZUC6l$Vng*>tE|YqM$W)w^Sc16h=jiE1|KOw}N3+Q2-u@}!t#23h6tA2OiZnQ}%h z_2SemJqiKI3Ju1Eq<)uT9T6rbwQjpf-QGtkO;vclS|S=J zt*(ro#k^v~_Mk=U>z`+p(-kQL`&=*K#4l$~glKN8&v~!yPn337aw`_S-hT=!9PMY5 zTn**#|sJKrwX@X^iUSzu@Wnj-P8E{@-!6FF%VbPE|aW~mKTB`Ov*7LZEnPaOk<{QWnSb)t&Id+AeN)d3{?HLq} zU8;l%!J*HQL50?iG&ub6apN94W$)A=abDptRicwLhOd*-TlGD{RE&f#d z6x*Uq(wZfJ^gzh#Vt~N3a4>-*@-6dw@Zjcf-g;WSOEX(tcC1-Xq|AWRmTK_h!^)?d zCab}exb?xiST1c&Q!<{soW7v2edcr8&)H4&qMC#$$Hb{EX0|J|!0|2EGuZ{U0#FTl zBdvD*KpQ4?f$p4AwZRP0(t9c8exI{snwe2u9sLrV{>n%`hY4(t5AzJy+Eqfivv!JR zm35XOE8pIcs`k-V%THnM;iI^)Ylm-re-WA=@EsKi3jB*NnrOmBi$q(^qI_?n(bVg5 zNQOhPxH$TQIZFYP68XJSiEbUUw$^E} z^J+aI51n6$m#L~3Q>7p#mt7Lk6kU;LHclL+z47Mhc(bG`;qH-fU%@@9(2%VzDqlg{ zUL?#^AEIiE-|-?NlB$4_|1$WT3pVM4D7anS2W!#g^^kP3lTRnp>&qw`K8nh{jIqM% z;tMCyI;r)MuiI4Ciwut?^;*sLBCn6EGgvN%9fB;|{k}eIjSVP|tDKvgjYV(2IRCVz zNi>T2tqvi3v=AYm;;{(@1{n_7ZndmQNw~Z!N+Vla?Rc5I3rGhhb9p3oG6~u9FFR@r zIjSlH&@kBjl@dEr%y_`eAF!Nsqb`Ar$vX*+c9Vr z=CGc8dcQBUopGSK(mpBFd9VBs+f^i=W-y*>f{5$eW9q3p*nQo4s{Iwr?31XOLbfCE zIw^sy#O4irCvH2bW}8l%42yO*!wH#7FoKc)S;u6WW4Dr(1_Ng%sq|HV= zG`S=1PwE?b+3gzGb}OV}+MEg3mWn~{IhOP9=t|_#tiY`*+Ovhctc~b{ue9E~d{Jvq z1{$n!I2NqD481-ZdeqV@87`5%Je)afuJ*Le!C3R@i+&Gk&Hv&TFLlnrqq;<_jt%=N z6=n*4uWEY(H@<9ds8MIH$*tl8Qwp8ojXZLNq25(pZ0Q%qZ>o!=ceBCt7%me+r8mTs0`N4Crym)0y;&cqhsO(V~883hWb zB>R_3*avC7dN^LIu?nKUB#douu$`c6RQXgJwNS~4ucA)aBW$YlwGgZ%hQl-kqtT`) z(j>-FO;{4=TWo!F%&1Yuo6_PaI~BoZx$Bsg*`k?ktD+Dy!h#gstoLc!o6rmeAqH0$ z=e0gPN6Di_7nNq6Rf8My%e?zXa0cyMA})#<7X(JCLL+a+|DgzU9(PmM`tyj-L6oSu zF+gKzwRIrj73P)o6fdL7n<%i*OxzpxlyL97^v@G zl0<4YmUnf8f7)QeNX&xKL?#8x)a)D7%JXBUg^VX}*g)SDM>5BCkta*P3L*uXV6z@;_u>)He60$XGX+S>$|)|k^h zZ|^H_v3%FT){pO>U8>yc_cB+rKPXCRWwVPB^0;Ur&HX9o%m2hC^p;Us>tV50vycW; znVkbawHRXd=ESW+MMTKzK1dTr(%dByCt-VJm;n8_+mFjVX!w22%hUxtTEEoU-?W9( z*wwU7^KU5^)zH05J(LB;F9jh9VHj7w=kqEjNh&p^DNGiLgOxd2UCphYuMaP4@LmzB zxVXiYtl&&hAv6xDWPCV3; zW7m*!li^Ib(XPp;^&Apsw>d4T^UGD#qJGz z&@*h5g$pPIWB1Y=ruAI+snTbagw5x>bxHJtOWUrIKL|Ygy0fUlz^Yq5jeCPytg^`2 zt0ma|)HN#X>5Uep>L8z6Ny1paaWp{NO)HXT*>1ZA8rf9Vm;5>1p1uG}vr!iKPC3l{ z>bg;rdp+H_XK4o4AdyzQYFnZt5=kOy-mEGXf?H;sB(r?}hHz(**jdB=p}s9g_fR?S zJji@Prv&_Ee^M*XfhCea&|{hZrJrAOJJ_9wU4HfG#^uiqffnra#9qm{z{(5-gqKli z!Fk6zqeCm&;e%Dr)HxgnR1gP@6(ftZp@3&`iGI`udSAKa#_{N(Iy*w6Rx;;@AHSwP zozuye;T)>joG*QROx30I##%*4nL)8gC%hst2vgj%cQRdtZ=U&638Umylj3 zsHl*d^|@SQ=8&$q^OZP@<~f(*C z4i?8PU8>hj@250MN|_(p|B}Mdg_(6X;p`2LR|;|}B)3-VYqI-B&nn}O9g5MdKpk+*9xK1X{PY3*GK#&=$UzU+;Q}ouGHHXQ!2<60ZG#W1lhq*i8}TiD7j! zB9=mP`9w`75;L&+q}$FwRNfoHP$W60l-4TGnphPQ9uke6ru8DMjkI6AJF`p6BrLoy z-WX$}@h-?sja_?GKgKp+bhh;(eX%dv*FUV9ImM8FcVbDWrnqJ0{p5n^8m`RaF9Hia z5?PPb%9GUIlkr;NO}rk^_KS;^qc6Wd^8%#X+LM@sPJaFDzBJ8R-OZUC5m@GWIrNhX z4F`^nkqaI#KO*Q>J!vz3$2DJka8Ax8An@q}^v)8(VXSgMvPuR0v}<8&`Ylh?*3t~S1XVAkCr4rN*67WjtYc*s^wBWU=_duxG>31Yz~#3Nje^Td>6WprLw zB!h6~g8Sb*Hz7w+M}b9#QmI151Sfgi8T=r8`^ij{dts_w!@R~{J)=EZq_12Njq76+ zg2{fAGuqmfKFgD)w2f`jr6$dC$Lhy}=U!;s)Q|MQZg$B>i%7*ysAmb|HCDT8Ii_3} zuLzKQZRovfVls_lBE&2Q$XME6iyF5MyDt_+L$i71t6ZLn;Hg^48+EhJx?c0ovriol zEoztwFP|v6inbkYiJULX{};CXw=DMnYxbFE$h7kXeHU1yi&U6jiTk(7Eelrr9!luD z=TU$yhys|5MP6v0M*)Cl7L(qvRzOgG6%d1&}k;N{+EQq8B*v8g&E!>+$ZUo0)7f2_qa*CVM?JqXgGFcH7SGEoR7GFeK&xt-c$?Ixpv6O4G){jD?MfWE>|y}b2;8Iw)obZW9N|6 zZ9G?wswL$O1>x#sl5{pl|7^HI#(pp))DwHpL&}vxOIE&)Maa2^YFGRhxz?K8RD7xl za;aD`-3Ru|0VB7OaUWSazEmol^!YhsCZWTeMv?jwyq!6S6RSlKHz);N^ER2Z(oz)*O;oFLKz2hH*_kr(|d`hsxV` z&}#kIw>|wUHJfep=;y@m>$vLt?^P|mNZm|G2(OZ(@*+F>{efieqZ8QYM2gIHHo;=v z4#DYG)uK*mi*AbaAfGCT%Rqh1#7J0u?L57odRDNceS1Bach|D$^``H-3KTcZEQi<5 znfYUV%C)eOKtaJ~g_yN4o1z&%vW#LXQ_SnDqR8xy;?1U4mD0o(WJDtFlYHrAWHwXA zMXqEUWDk@&i59!&zi%C^VVmEqc`|XmB~m3WtqbUlb zabyl(28O38vQy=Q;)ednm0eq!igglLT&{UCuGmr>@Q3qP?xyu2s+Q5yExs*xzUepp z))+k2>d3a*eK|TN|9YXj+V3XFEK*0AFeq*?&*}&k|AV3VSH}S~P|j1U5C79-@0;Ia z4^Ix|>SMPtjqYaKP4N!XS2KJ%HHPfsH5A#0MN z@6)N6ObX3fR%W(;ZwmrS^<0%m-zZI`yBpElz;#q@(?Qk}fgjrfI=FSS>9(KSRIM&3 z;w{KDOn!8J>bspufCtK3{+^iuHy@vFrc@G`x@MY9&jZby9cY3X)Hv#IGeybd4^l#I z;zbMZ9=^XJ=h?kstXb>+qoSn=4{G~ksdvCab7vtL6xvLhM3BvXyYSOJeLn4!Pu3># zq_Q3}!7krE=>za>rOs{py-@$Fo5@rkb!(CVWpsO?&!Oeh(dQ&yF!kIB)=>0XP6RVZ z4a7(Zy%;9Kja|{-sIH$XVm{y!#fxAG-XV>URzPCK+(dDXuDm^bw$m%1e_Px;1x$|j zN;pECzG?;CxW0l+;dJ!U?y&rb^)bZ-;@n!OL$uO7hmob<41dOynrK072h;Gg=T zh4V&NH~awZ(qt2wgVzG0(!oQi%9_5#_wVV!Q-Grs&rAD#29$Lg0`uR;6)I3&w*wS^)<|4nTnyi28lFl()8D4eE|92XRYVvmJ_{b32xX@F<$< z2Y8gV+gmw+dL88AY42^?pWL8|IooV|Nf1y%?{XF69jP2hsfN}+nUP09lJ*@e~*<9fuIddd$94{EakKR z`JtextlC~gvZnD(=7NAuU+?O4o?8l zKU!Q4?mX@N^@%-CWL`NSAtl_W6|Nh+j@}Q;r~zkX0BHOGYQq%{-){xVJOVx;!E^;H z*tgD}LVZ&}l+ecAE}L#NTCRFU*n}HQ^%DV-C8rxj?aA4mFN|13a8e_I((#{Bpn=yI zMSGKoq8zSz8LICL;v=jh%pYg`IzLOh0 z@f-~H5)Ae)lj}W~{82jtGC!8Z00xx?o+X-t_kf#avx#e&WI>fY{?9Bo37+d{539C) zh3ue~`4e!WdN|qiv`!FJ^Q9k*bl=M(DV{$l;r=8j1pl6%K?@ueRK%;~8(^wE+)moo zNYh1MXk7}Am#t8OxM2Io)43eyU1TB=Wh4Dv`9l{_3<#?2m zq{OTuEUqU`(;mCm1?nRAdme)?q@U5)oTr59lU{n(C*(CkUGA^@54wF{5#UHZEWU3eaNbVsC_fG5cp@7d{eP}u;fc`k!F{r@>-09uK&p3L(?Yok_kiSQLg)u_K+8| z^mE*r7vNoXlo*!Pq2l|5|E`X#!OZe@XbQB|~q!>7@}J zY!TdM+F4p~D|h}is1$RK5+alp-l|U(k#w+WQkIe1?^!SyvH7EQMqJD)Wv)NoRw7?S zK<1iqWgiz-F!4oC4*KefnKX}RULG)sNtRLKlZ94xt5N2YFUagg7fC3tP4x+v?2?~# zm$GDO4OlB87R#Fwxyqg2v7}R8`dy-KQF)sAyQ;z%*hWOPH@zg~T2_qHb3J_KuoY)4QOzr$V+rqZtW<=0X}fl3)4 z!$-PS(`-xks@BBQ!|DfR0(QS7WGqBTpgJ~`V>fJSJ1|b5tl+=D4e01KQz4%ec&AOx z->-K?olvgd$u2=??$U1x1y3uLgGRyN8j;6L9`>$O(U9uQt zlCf-gWrfKH)j_4WLk2AjdRB}rw3LotbzaPw)ct2AC`e4Q*~G3)hEsVN+4B57-3VgM*UnuVg zpR0-trD>lG3EwfV5~CYZSy7`HDrhH(xF>ZvO*trV^AR`HMw$)Ffy|dujloh}v}~+` zO6s-xGvXGxpDnLfw4C{eWH78*i8Zy|TISX)aOvpbCSEJ)=GjCP7wVSd%Tb(){*E^O z`x6eN3jvE64mv|pTGB6QGApHM1!>nFTD*2Pz88<48H?%nmAMCOP_)s^O1lESo-R&a-uk6n}?GGcavp|m(2?o zs%D8BiH5gfI45s9)^aWzp9XM`N#(FXZ$~={O5SlsH!snh0@IC?ei#Ky)SePX<%9S_ zs?rLftT2$C7E=BP+DSe82Q>Nu-0hcH+Y{n6m_Kv7t?B4wK=vYq%a_`0 zu$1ssrYCf)7tO0edLAX_i#ue$j*Bngkr|7@20S^}w)Wynb-;a(0nn=pkk0{h2pzE-!qCZ{eJd=C7|V2OYflO|<#PB3Q*s;YYK`pl2yXXr z%INRUJ(f>IMoeLvumM`Qs>d?P@L{20WJfd?N{NCPj!lS!eVa9*6);<1q0Uzf$&b1Jy#m40=-F*M?PtmohqY&^CJZTLR}Tz0PT`v{xeCF2x2b z_tVRH7vJNuu42CGO+P@Cr8^Rf4U1z#YW~t5xnnBRZ7yg9PbPz`Fgrl5WjMR&)&HuU zQb6bq7Qw$a#3BOvzwMWJ-%5~3b`G*98(kU~gJru14)LetXH5|%AS~{sQE8Q_O|@z= zJiAV0BIF!Gp5?2P`%LDYiA;C?O&*0wy1p=<*ElmpN1^WPw+>1s@}|v6o5u+vbalH+ z+fv*d91{=(bKAPK7=3tWfDj8NLVX{myR?9QU;;tLJsOA=73Lg zvSdQgHjb(&sH)B7W?~0FErydgm1oHFVGu?|kFz|_X1*)JT=1|#k3BsH29MoSO1~1o z0f`8XF%{Zta#`(OIaG1w`Iu;xiQlumd)RV0!Y6}%XeOQhK(vd zXiFpjQ5vOQukh9L$S{b=iHYkZsz~&|HQ}dtkmq?n=p*{CWdkg1%r~w_{#PgB0FT49 zcPXU*)rnF;OlV|q5!BZ4bs{Lp5XT-|r&2%95b+i9lGn_@s)qOQc!S+mlo?!!K2INw zKs2d$M;x>?L4PQC4OE0IjNb^;{o7P20iP{+X)Ajk1&#r~N<)RVDff94s0U6_jjzk& zJTqz8iqw8Sd!9F%YjIOV literal 0 HcmV?d00001 From 371b259f896338e2669bccf813d3148a86109108 Mon Sep 17 00:00:00 2001 From: Connor Hindle <69192234+DeveloperConnor@users.noreply.github.com> Date: Wed, 4 Dec 2024 16:42:34 +0000 Subject: [PATCH 27/38] refactor: Remove some unused imports and variables (#676) --- dbt_platform_helper/domain/conduit.py | 6 ------ dbt_platform_helper/providers/copilot.py | 3 --- tests/platform_helper/domain/test_conduit.py | 4 ---- tests/platform_helper/providers/test_copilot.py | 15 --------------- 4 files changed, 28 deletions(-) diff --git a/dbt_platform_helper/domain/conduit.py b/dbt_platform_helper/domain/conduit.py index e409ad869..32e02f90e 100644 --- a/dbt_platform_helper/domain/conduit.py +++ b/dbt_platform_helper/domain/conduit.py @@ -6,7 +6,6 @@ from dbt_platform_helper.providers.cloudformation import CloudFormation from dbt_platform_helper.providers.copilot import connect_to_addon_client_task from dbt_platform_helper.providers.copilot import create_addon_client_task -from dbt_platform_helper.providers.copilot import create_postgres_admin_task from dbt_platform_helper.providers.ecs import ECS from dbt_platform_helper.providers.secrets import Secrets from dbt_platform_helper.utils.application import Application @@ -23,7 +22,6 @@ def __init__( subprocess: subprocess = subprocess, connect_to_addon_client_task=connect_to_addon_client_task, create_addon_client_task=create_addon_client_task, - create_postgres_admin_task=create_postgres_admin_task, ): self.application = application @@ -34,7 +32,6 @@ def __init__( self.echo = echo self.connect_to_addon_client_task = connect_to_addon_client_task self.create_addon_client_task = create_addon_client_task - self.create_postgres_admin_task = create_postgres_admin_task def start(self, env: str, addon_name: str, access: str = "read"): clients = self._initialise_clients(env) @@ -49,7 +46,6 @@ def start(self, env: str, addon_name: str, access: str = "read"): self.create_addon_client_task( clients["iam"], clients["ssm"], - clients["secrets_manager"], self.subprocess, self.application, env, @@ -89,8 +85,6 @@ def _initialise_clients(self, env): "ecs": self.application.environments[env].session.client("ecs"), "iam": self.application.environments[env].session.client("iam"), "ssm": self.application.environments[env].session.client("ssm"), - "cloudformation": self.application.environments[env].session.client("cloudformation"), - "secrets_manager": self.application.environments[env].session.client("secretsmanager"), } def _get_addon_details(self, addon_name, access): diff --git a/dbt_platform_helper/providers/copilot.py b/dbt_platform_helper/providers/copilot.py index c7b10f31a..47bd2f937 100644 --- a/dbt_platform_helper/providers/copilot.py +++ b/dbt_platform_helper/providers/copilot.py @@ -13,7 +13,6 @@ def create_addon_client_task( iam_client, ssm_client, - secrets_manager_client, subprocess, application: Application, env: str, @@ -32,7 +31,6 @@ def create_addon_client_task( elif access == "admin": create_postgres_admin_task( ssm_client, - secrets_manager_client, subprocess, application, addon_name, @@ -74,7 +72,6 @@ def create_addon_client_task( def create_postgres_admin_task( ssm_client, - secrets_manager_client, subprocess, app: Application, addon_name: str, diff --git a/tests/platform_helper/domain/test_conduit.py b/tests/platform_helper/domain/test_conduit.py index 0afa1c6c5..0d9501cd1 100644 --- a/tests/platform_helper/domain/test_conduit.py +++ b/tests/platform_helper/domain/test_conduit.py @@ -36,7 +36,6 @@ def __init__(self, app_name="test-application", *args, **kwargs): self.ecs_provider = kwargs.get("ecs_provider", Mock()) self.connect_to_addon_client_task = kwargs.get("connect_to_addon_client_task", Mock()) self.create_addon_client_task = kwargs.get("create_addon_client_task", Mock()) - self.create_postgres_admin_task = kwargs.get("create_postgres_admin_task", Mock()) self.echo = kwargs.get("echo", Mock()) self.subprocess = kwargs.get("subprocess", Mock(return_value="task_name")) @@ -48,7 +47,6 @@ def params(self): "ecs_provider": self.ecs_provider, "connect_to_addon_client_task": self.connect_to_addon_client_task, "create_addon_client_task": self.create_addon_client_task, - "create_postgres_admin_task": self.create_postgres_admin_task, "echo": self.echo, "subprocess": self.subprocess, } @@ -79,7 +77,6 @@ def test_conduit(app_name, addon_type, addon_name, access): ecs_client = conduit.application.environments[env].session.client("ecs") ssm_client = conduit.application.environments[env].session.client("ssm") iam_client = conduit.application.environments[env].session.client("iam") - secretsmanager_client = conduit.application.environments[env].session.client("secretsmanager") conduit.start(env, addon_name, access) @@ -112,7 +109,6 @@ def test_conduit(app_name, addon_type, addon_name, access): conduit.create_addon_client_task.assert_called_once_with( iam_client, ssm_client, - secretsmanager_client, conduit.subprocess, conduit.application, env, diff --git a/tests/platform_helper/providers/test_copilot.py b/tests/platform_helper/providers/test_copilot.py index 55ec40040..1b736db1d 100644 --- a/tests/platform_helper/providers/test_copilot.py +++ b/tests/platform_helper/providers/test_copilot.py @@ -30,7 +30,6 @@ def test_create_postgres_admin_task(mock_update_parameter, mock_application): f"/copilot/{mock_application.name}/{env}/secrets/DUMMY_POSTGRES_RDS_MASTER_ARN" ) ssm_client = mock_application.environments[env].session.client("ssm") - secrets_manager_client = mock_application.environments[env].session.client("secretsmanager") boto3.client("ssm").put_parameter( Name=master_secret_name, Value="master-secret-arn", Type="String" @@ -39,7 +38,6 @@ def test_create_postgres_admin_task(mock_update_parameter, mock_application): create_postgres_admin_task( ssm_client, - secrets_manager_client, mock_subprocess, mock_application, addon_name, @@ -102,12 +100,10 @@ def test_create_redis_or_opensearch_addon_client_task( iam_client = mock_application.environments[env].session.client("iam") ssm_client = mock_application.environments[env].session.client("ssm") - secretsmanager_client = mock_application.environments[env].session.client("secretsmanager") create_addon_client_task( iam_client, ssm_client, - secretsmanager_client, mock_subprocess, mock_application, env, @@ -156,12 +152,10 @@ def test_create_postgres_addon_client_task( iam_client = mock_application.environments[env].session.client("iam") ssm_client = mock_application.environments[env].session.client("ssm") - secretsmanager_client = mock_application.environments[env].session.client("secretsmanager") create_addon_client_task( iam_client, ssm_client, - secretsmanager_client, mock_subprocess, mock_application, env, @@ -197,11 +191,9 @@ def test_create_postgres_addon_client_task_admin( iam_client = mock_application.environments[env].session.client("iam") ssm_client = mock_application.environments[env].session.client("ssm") - secretsmanager_client = mock_application.environments[env].session.client("secretsmanager") create_addon_client_task( iam_client, ssm_client, - secretsmanager_client, mock_subprocess, mock_application, env, @@ -214,7 +206,6 @@ def test_create_postgres_addon_client_task_admin( mock_create_postgres_admin_task.assert_called_once_with( ssm_client, - secretsmanager_client, mock_subprocess, mock_application, addon_name, @@ -249,12 +240,10 @@ def test_create_addon_client_task_does_not_add_execution_role_if_role_not_found( task_name = mock_task_name(addon_name) ssm_client = mock_application.environments[env].session.client("ssm") - secretsmanager_client = mock_application.environments[env].session.client("secretsmanager") create_addon_client_task( mock_application.environments[env].session.client("iam"), ssm_client, - secretsmanager_client, mock_subprocess, mock_application, env, @@ -301,13 +290,11 @@ def test_create_addon_client_task_abort_with_message_on_other_exceptions( task_name = mock_task_name(addon_name) iam_client = mock_application.environments[env].session.client("iam") ssm_client = mock_application.environments[env].session.client("ssm") - secretsmanager_client = mock_application.environments[env].session.client("secretsmanager") with pytest.raises(SystemExit) as exc_info: create_addon_client_task( iam_client, ssm_client, - secretsmanager_client, mock_subprocess, mock_application, env, @@ -337,7 +324,6 @@ def test_create_addon_client_task_when_no_secret_found(get_connection_secret_arn mock_subprocess = Mock() iam_client = mock_application.environments[env].session.client("iam") ssm_client = mock_application.environments[env].session.client("ssm") - secretsmanager_client = mock_application.environments[env].session.client("secretsmanager") get_connection_secret_arn.side_effect = SecretNotFoundError( "/copilot/test-application/development/secrets/named-postgres" @@ -347,7 +333,6 @@ def test_create_addon_client_task_when_no_secret_found(get_connection_secret_arn create_addon_client_task( iam_client, ssm_client, - secretsmanager_client, mock_subprocess, mock_application, env, From cdd8aecf36c663ec1ba01eb16e6b84ca68172f5a Mon Sep 17 00:00:00 2001 From: Will Gibson <8738245+WillGibson@users.noreply.github.com> Date: Thu, 5 Dec 2024 15:59:31 +0000 Subject: [PATCH 28/38] refactor: DBTP-1587 Namespace the exceptions better (#677) --- dbt_platform_helper/commands/codebase.py | 2 +- dbt_platform_helper/commands/conduit.py | 4 +- dbt_platform_helper/commands/config.py | 8 +- dbt_platform_helper/commands/database.py | 3 + dbt_platform_helper/commands/environment.py | 5 +- dbt_platform_helper/domain/codebase.py | 28 +++- dbt_platform_helper/domain/database_copy.py | 6 +- .../domain/maintenance_page.py | 16 +- dbt_platform_helper/exceptions.py | 147 ------------------ dbt_platform_helper/platform_exception.py | 5 + dbt_platform_helper/providers/aws.py | 32 ++++ .../providers/cloudformation.py | 9 +- dbt_platform_helper/providers/copilot.py | 4 +- dbt_platform_helper/providers/ecs.py | 23 ++- .../providers/load_balancers.py | 16 +- dbt_platform_helper/providers/secrets.py | 61 ++++++-- dbt_platform_helper/providers/validation.py | 19 +++ dbt_platform_helper/utils/application.py | 16 +- dbt_platform_helper/utils/arn_parser.py | 2 +- dbt_platform_helper/utils/aws.py | 23 +-- dbt_platform_helper/utils/git.py | 4 +- dbt_platform_helper/utils/versioning.py | 16 +- tests/platform_helper/domain/test_codebase.py | 52 +++---- tests/platform_helper/domain/test_conduit.py | 44 +++--- .../domain/test_database_copy.py | 6 +- .../domain/test_maintenance_page.py | 2 +- .../providers/test_cloudformation.py | 2 +- .../platform_helper/providers/test_copilot.py | 10 +- tests/platform_helper/providers/test_ecs.py | 8 +- .../providers/test_load_balancers.py | 8 +- .../platform_helper/providers/test_secrets.py | 22 +-- .../platform_helper/test_command_codebase.py | 40 ++--- tests/platform_helper/test_command_conduit.py | 4 +- .../test_command_environment.py | 16 +- tests/platform_helper/test_exceptions.py | 62 ++++---- .../platform_helper/utils/test_application.py | 4 +- .../platform_helper/utils/test_arn_parser.py | 2 +- tests/platform_helper/utils/test_aws.py | 18 +-- .../platform_helper/utils/test_versioning.py | 32 ++-- 39 files changed, 393 insertions(+), 388 deletions(-) delete mode 100644 dbt_platform_helper/exceptions.py create mode 100644 dbt_platform_helper/platform_exception.py create mode 100644 dbt_platform_helper/providers/aws.py create mode 100644 dbt_platform_helper/providers/validation.py diff --git a/dbt_platform_helper/commands/codebase.py b/dbt_platform_helper/commands/codebase.py index e082f5b1c..76401e41f 100644 --- a/dbt_platform_helper/commands/codebase.py +++ b/dbt_platform_helper/commands/codebase.py @@ -1,7 +1,7 @@ import click from dbt_platform_helper.domain.codebase import Codebase -from dbt_platform_helper.exceptions import PlatformException +from dbt_platform_helper.platform_exception import PlatformException from dbt_platform_helper.utils.click import ClickDocOptGroup from dbt_platform_helper.utils.versioning import ( check_platform_helper_version_needs_update, diff --git a/dbt_platform_helper/commands/conduit.py b/dbt_platform_helper/commands/conduit.py index 8d3051bb6..2c457eeec 100644 --- a/dbt_platform_helper/commands/conduit.py +++ b/dbt_platform_helper/commands/conduit.py @@ -1,7 +1,7 @@ import click from dbt_platform_helper.domain.conduit import Conduit -from dbt_platform_helper.exceptions import AWSException +from dbt_platform_helper.platform_exception import PlatformException from dbt_platform_helper.providers.cloudformation import CloudFormation from dbt_platform_helper.providers.ecs import ECS from dbt_platform_helper.providers.secrets import Secrets @@ -53,6 +53,6 @@ def conduit(addon_name: str, app: str, env: str, access: str): Conduit(application, secrets_provider, cloudformation_provider, ecs_provider).start( env, addon_name, access ) - except AWSException as err: + except PlatformException as err: click.secho(str(err), fg="red") raise click.Abort diff --git a/dbt_platform_helper/commands/config.py b/dbt_platform_helper/commands/config.py index 6183a5454..ba60b4c52 100644 --- a/dbt_platform_helper/commands/config.py +++ b/dbt_platform_helper/commands/config.py @@ -8,8 +8,8 @@ import click from prettytable import PrettyTable -from dbt_platform_helper.exceptions import IncompatibleMajorVersion -from dbt_platform_helper.exceptions import ValidationException +from dbt_platform_helper.providers.validation import IncompatibleMajorVersionException +from dbt_platform_helper.providers.validation import ValidationException from dbt_platform_helper.utils import versioning from dbt_platform_helper.utils.click import ClickDocOptGroup from dbt_platform_helper.utils.validation import config_file_check @@ -110,7 +110,7 @@ def deployment(): str(template_file.resolve()) ) versioning.validate_template_version(local_version, str(template_file.resolve())) - except IncompatibleMajorVersion: + except IncompatibleMajorVersionException: local_compatible_symbol = no compatible = False recommendations["dbt-platform-helper-upgrade"] = RECOMMENDATIONS[ @@ -134,7 +134,7 @@ def deployment(): str(template_file.resolve()) ) versioning.validate_template_version(latest_release, str(template_file.resolve())) - except IncompatibleMajorVersion: + except IncompatibleMajorVersionException: latest_compatible_symbol = no compatible = False except ValidationException: diff --git a/dbt_platform_helper/commands/database.py b/dbt_platform_helper/commands/database.py index c8d4e729e..64aaaf905 100644 --- a/dbt_platform_helper/commands/database.py +++ b/dbt_platform_helper/commands/database.py @@ -35,6 +35,7 @@ def dump(app, from_env, database, from_vpc): """Dump a database into an S3 bucket.""" data_copy = DatabaseCopy(app, database) data_copy.dump(from_env, from_vpc) + # Todo: Catch expected errors and output message @database.command(name="load") @@ -59,6 +60,7 @@ def load(app, to_env, database, to_vpc, auto_approve): """Load a database from an S3 bucket.""" data_copy = DatabaseCopy(app, database, auto_approve) data_copy.load(to_env, to_vpc) + # Todo: Catch expected errors and output message @database.command(name="copy") @@ -110,3 +112,4 @@ def copy( """Copy a database between environments.""" data_copy = DatabaseCopy(app, database, auto_approve) data_copy.copy(from_env, to_env, from_vpc, to_vpc, svc, template, no_maintenance_page) + # Todo: Catch expected errors and output message diff --git a/dbt_platform_helper/commands/environment.py b/dbt_platform_helper/commands/environment.py index 51a4434bd..42e8c75ce 100644 --- a/dbt_platform_helper/commands/environment.py +++ b/dbt_platform_helper/commands/environment.py @@ -5,6 +5,7 @@ from dbt_platform_helper.constants import DEFAULT_TERRAFORM_PLATFORM_MODULES_VERSION from dbt_platform_helper.constants import PLATFORM_CONFIG_FILE from dbt_platform_helper.domain.maintenance_page import MaintenancePageProvider +from dbt_platform_helper.platform_exception import PlatformException from dbt_platform_helper.providers.load_balancers import find_https_listener from dbt_platform_helper.utils.aws import get_aws_session_or_abort from dbt_platform_helper.utils.click import ClickDocOptGroup @@ -238,10 +239,10 @@ def find_https_certificate(session: boto3.Session, app: str, env: str) -> str: try: certificate_arn = next(c["CertificateArn"] for c in certificates if c["IsDefault"]) except StopIteration: - raise CertificateNotFoundError() + raise CertificateNotFoundException() return certificate_arn -class CertificateNotFoundError(Exception): +class CertificateNotFoundException(PlatformException): pass diff --git a/dbt_platform_helper/domain/codebase.py b/dbt_platform_helper/domain/codebase.py index 6f909cf11..53de7d05a 100644 --- a/dbt_platform_helper/domain/codebase.py +++ b/dbt_platform_helper/domain/codebase.py @@ -9,10 +9,9 @@ import yaml from boto3 import Session -from dbt_platform_helper.exceptions import ApplicationDeploymentNotTriggered -from dbt_platform_helper.exceptions import ApplicationEnvironmentNotFoundError -from dbt_platform_helper.exceptions import NotInCodeBaseRepositoryError +from dbt_platform_helper.platform_exception import PlatformException from dbt_platform_helper.utils.application import Application +from dbt_platform_helper.utils.application import ApplicationException from dbt_platform_helper.utils.application import load_application from dbt_platform_helper.utils.aws import check_codebase_exists from dbt_platform_helper.utils.aws import check_image_exists @@ -68,7 +67,7 @@ def prepare(self): .removesuffix(".git") ) if repository.endswith("-deploy") or Path("./copilot").exists(): - raise NotInCodeBaseRepositoryError() + raise NotInCodeBaseRepositoryException() builder_configuration_url = "https://raw.githubusercontent.com/uktrade/ci-image-builder/main/image_builder/configuration/builder_configuration.yml" builder_configuration_response = requests.get(builder_configuration_url) @@ -143,7 +142,7 @@ def deploy(self, app, env, codebase, commit): application = self.load_application(app, default_session=session) if not application.environments.get(env): - raise ApplicationEnvironmentNotFoundError(env) + raise ApplicationEnvironmentNotFoundException(env) self.check_codebase_exists(session, application, codebase) @@ -220,3 +219,22 @@ def __start_build_with_confirmation( build_arn = self.start_build_extraction(codebuild_client, build_options) return get_build_url_from_arn(build_arn) return None + + +class ApplicationDeploymentNotTriggered(PlatformException): + def __init__(self, codebase: str): + super().__init__(f"""Your deployment for {codebase} was not triggered.""") + + +class ApplicationEnvironmentNotFoundException(ApplicationException): + def __init__(self, environment: str): + super().__init__( + f"""The environment "{environment}" either does not exist or has not been deployed.""" + ) + + +class NotInCodeBaseRepositoryException(PlatformException): + def __init__(self): + super().__init__( + "You are in the deploy repository; make sure you are in the application codebase repository.", + ) diff --git a/dbt_platform_helper/domain/database_copy.py b/dbt_platform_helper/domain/database_copy.py index b3c0cd63b..5a6bf4a43 100644 --- a/dbt_platform_helper/domain/database_copy.py +++ b/dbt_platform_helper/domain/database_copy.py @@ -8,9 +8,9 @@ from dbt_platform_helper.constants import PLATFORM_CONFIG_FILE from dbt_platform_helper.domain.maintenance_page import MaintenancePageProvider -from dbt_platform_helper.exceptions import ApplicationNotFoundError -from dbt_platform_helper.exceptions import AWSException +from dbt_platform_helper.providers.aws import AWSException from dbt_platform_helper.utils.application import Application +from dbt_platform_helper.utils.application import ApplicationNotFoundException from dbt_platform_helper.utils.application import load_application from dbt_platform_helper.utils.aws import Vpc from dbt_platform_helper.utils.aws import get_connection_string @@ -58,7 +58,7 @@ def __init__( try: self.application = load_application(self.app) - except ApplicationNotFoundError: + except ApplicationNotFoundException: abort(f"No such application '{app}'.") def _execute_operation(self, is_dump: bool, env: str, vpc_name: str, to_env: str): diff --git a/dbt_platform_helper/domain/maintenance_page.py b/dbt_platform_helper/domain/maintenance_page.py index e94e1977e..fc749498c 100644 --- a/dbt_platform_helper/domain/maintenance_page.py +++ b/dbt_platform_helper/domain/maintenance_page.py @@ -9,9 +9,9 @@ import boto3 import click -from dbt_platform_helper.providers.load_balancers import ListenerNotFoundError -from dbt_platform_helper.providers.load_balancers import ListenerRuleNotFoundError -from dbt_platform_helper.providers.load_balancers import LoadBalancerNotFoundError +from dbt_platform_helper.providers.load_balancers import ListenerNotFoundException +from dbt_platform_helper.providers.load_balancers import ListenerRuleNotFoundException +from dbt_platform_helper.providers.load_balancers import LoadBalancerNotFoundException from dbt_platform_helper.providers.load_balancers import find_https_listener from dbt_platform_helper.utils.application import Environment from dbt_platform_helper.utils.application import Service @@ -75,13 +75,13 @@ def activate(self, app, env, svc, template, vpc): else: raise click.Abort - except LoadBalancerNotFoundError: + except LoadBalancerNotFoundException: click.secho( f"No load balancer found for environment {env} in the application {app}.", fg="red" ) raise click.Abort - except ListenerNotFoundError: + except ListenerNotFoundException: click.secho( f"No HTTPS listener found for environment {env} in the application {app}.", fg="red" ) @@ -110,13 +110,13 @@ def deactivate(self, app, env): f"Maintenance page removed from environment {env} in application {app}", fg="green" ) - except LoadBalancerNotFoundError: + except LoadBalancerNotFoundException: click.secho( f"No load balancer found for environment {env} in the application {app}.", fg="red" ) raise click.Abort - except ListenerNotFoundError: + except ListenerNotFoundException: click.secho( f"No HTTPS listener found for environment {env} in the application {app}.", fg="red" ) @@ -180,7 +180,7 @@ def remove_maintenance_page(session: boto3.Session, listener_arn: str): deleted = delete_listener_rule(tag_descriptions, name, lb_client) if name == "MaintenancePage" and not deleted: - raise ListenerRuleNotFoundError() + raise ListenerRuleNotFoundException() def get_rules_tag_descriptions(rules: list, lb_client): diff --git a/dbt_platform_helper/exceptions.py b/dbt_platform_helper/exceptions.py deleted file mode 100644 index e5eab9e57..000000000 --- a/dbt_platform_helper/exceptions.py +++ /dev/null @@ -1,147 +0,0 @@ -import os - -from dbt_platform_helper.constants import CONDUIT_ADDON_TYPES - - -class ValidationException(Exception): - pass - - -class PlatformException(Exception): - pass - - -class AWSException(PlatformException): - pass - - -class ApplicationException(PlatformException): - pass - - -class CloudFormationException(AWSException): - def __init__(self, stack_name: str, current_status: str): - super().__init__( - f"The CloudFormation stack '{stack_name}' is not in a good state: {current_status}" - ) - - -class CommitNotFoundError: - def __init__(self, commit: str): - super().__init__( - f"""The commit hash "{commit}" either does not exist or you need to run `git fetch`.""" - ) - - -class IncompatibleMajorVersion(ValidationException): - def __init__(self, app_version: str, check_version: str): - super().__init__() - self.app_version = app_version - self.check_version = check_version - - -class IncompatibleMinorVersion(ValidationException): - def __init__(self, app_version: str, check_version: str): - super().__init__() - self.app_version = app_version - self.check_version = check_version - - -class NoClusterError(AWSException): - def __init__(self, application_name: str, environment: str): - super().__init__( - f"""No ECS cluster found for "{application_name}" in "{environment}" environment.""" - ) - - -class CreateTaskTimeoutError(AWSException): - def __init__(self, addon_name: str, application_name: str, environment: str): - super().__init__( - f"""Client ({addon_name}) ECS task has failed to start for "{application_name}" in "{environment}" environment.""" - ) - - -class ParameterNotFoundError(AWSException): - def __init__(self, application_name: str, environment: str): - super().__init__( - f"""No parameter called "/copilot/applications/{application_name}/environments/{environment}/addons". Try deploying the "{application_name}" "{environment}" environment.""" - ) - - -class AddonNotFoundError(AWSException): - def __init__(self, addon_name: str): - super().__init__(f"""Addon "{addon_name}" does not exist.""") - - -class InvalidAddonTypeError(AWSException): - def __init__(self, addon_type): - self.addon_type = addon_type - super().__init__( - f"""Addon type "{self.addon_type}" is not supported, we support: {", ".join(CONDUIT_ADDON_TYPES)}.""" - ) - - -class AddonTypeMissingFromConfigError(AWSException): - def __init__(self, addon_name: str): - super().__init__( - f"""The configuration for the addon {addon_name}, is misconfigured and missing the addon type.""" - ) - - -class CopilotCodebaseNotFoundError(PlatformException): - def __init__(self, codebase: str): - super().__init__( - f"""The codebase "{codebase}" either does not exist or has not been deployed.""" - ) - - -class NotInCodeBaseRepositoryError(PlatformException): - def __init__(self): - super().__init__( - "You are in the deploy repository; make sure you are in the application codebase repository.", - ) - - -class NoCopilotCodebasesFoundError(PlatformException): - def __init__(self, application_name: str): - super().__init__(f"""No codebases found for application "{application_name}".""") - - -class ImageNotFoundError(PlatformException): - def __init__(self, commit: str): - super().__init__( - f"""The commit hash "{commit}" has not been built into an image, try the `platform-helper codebase build` command first.""" - ) - - -class ApplicationDeploymentNotTriggered(PlatformException): - def __init__(self, codebase: str): - super().__init__(f"""Your deployment for {codebase} was not triggered.""") - - -class ApplicationNotFoundError(ApplicationException): - def __init__(self, application_name: str): - super().__init__( - f"""The account "{os.environ.get("AWS_PROFILE")}" does not contain the application "{application_name}"; ensure you have set the environment variable "AWS_PROFILE" correctly.""" - ) - - -class ApplicationEnvironmentNotFoundError(ApplicationException): - def __init__(self, environment: str): - super().__init__( - f"""The environment "{environment}" either does not exist or has not been deployed.""" - ) - - -class SecretNotFoundError(AWSException): - def __init__(self, secret_name: str): - super().__init__(f"""No secret called "{secret_name}".""") - - -class ECSAgentNotRunning(AWSException): - def __init__(self): - super().__init__("""ECS exec agent never reached "RUNNING" status""") - - -class ResourceNotFoundException(AWSException): - pass diff --git a/dbt_platform_helper/platform_exception.py b/dbt_platform_helper/platform_exception.py new file mode 100644 index 000000000..443226f8d --- /dev/null +++ b/dbt_platform_helper/platform_exception.py @@ -0,0 +1,5 @@ +# This exception exists so that we can easily catch exceptions +# at the command level where we know we can just output the +# error and abort. +class PlatformException(Exception): + pass diff --git a/dbt_platform_helper/providers/aws.py b/dbt_platform_helper/providers/aws.py new file mode 100644 index 000000000..c7005489a --- /dev/null +++ b/dbt_platform_helper/providers/aws.py @@ -0,0 +1,32 @@ +from dbt_platform_helper.platform_exception import PlatformException + + +class AWSException(PlatformException): + pass + + +class CreateTaskTimeoutException(AWSException): + def __init__(self, addon_name: str, application_name: str, environment: str): + super().__init__( + f"""Client ({addon_name}) ECS task has failed to start for "{application_name}" in "{environment}" environment.""" + ) + + +class ImageNotFoundException(AWSException): + def __init__(self, commit: str): + super().__init__( + f"""The commit hash "{commit}" has not been built into an image, try the `platform-helper codebase build` command first.""" + ) + + +class LogGroupNotFoundException(AWSException): + def __init__(self, log_group_name: str): + super().__init__(f"""No log group called "{log_group_name}".""") + + +# Todo: This should probably be in the AWS Copilot provider, but was causing circular import when we tried it pre refactoring the utils/aws.py +class CopilotCodebaseNotFoundException(PlatformException): + def __init__(self, codebase: str): + super().__init__( + f"""The codebase "{codebase}" either does not exist or has not been deployed.""" + ) diff --git a/dbt_platform_helper/providers/cloudformation.py b/dbt_platform_helper/providers/cloudformation.py index 4cc45fc24..a2d7da36c 100644 --- a/dbt_platform_helper/providers/cloudformation.py +++ b/dbt_platform_helper/providers/cloudformation.py @@ -4,7 +4,7 @@ from cfn_tools import dump_yaml from cfn_tools import load_yaml -from dbt_platform_helper.exceptions import CloudFormationException +from dbt_platform_helper.platform_exception import PlatformException class CloudFormation: @@ -125,3 +125,10 @@ def wait_for_cloudformation_to_reach_status(self, stack_status, stack_name): raise CloudFormationException( stack_name, f"Error while waiting for stack status: {str(err)}" ) + + +class CloudFormationException(PlatformException): + def __init__(self, stack_name: str, current_status: str): + super().__init__( + f"The CloudFormation stack '{stack_name}' is not in a good state: {current_status}" + ) diff --git a/dbt_platform_helper/providers/copilot.py b/dbt_platform_helper/providers/copilot.py index 47bd2f937..30085f346 100644 --- a/dbt_platform_helper/providers/copilot.py +++ b/dbt_platform_helper/providers/copilot.py @@ -4,7 +4,7 @@ from botocore.exceptions import ClientError from dbt_platform_helper.constants import CONDUIT_DOCKER_IMAGE_LOCATION -from dbt_platform_helper.exceptions import CreateTaskTimeoutError +from dbt_platform_helper.providers.aws import CreateTaskTimeoutException from dbt_platform_helper.providers.secrets import Secrets from dbt_platform_helper.utils.application import Application from dbt_platform_helper.utils.messages import abort_with_error @@ -144,7 +144,7 @@ def connect_to_addon_client_task( time.sleep(1) if not running: - raise CreateTaskTimeoutError(task_name, application_name, env) + raise CreateTaskTimeoutException(task_name, application_name, env) def _normalise_secret_name(addon_name: str) -> str: diff --git a/dbt_platform_helper/providers/ecs.py b/dbt_platform_helper/providers/ecs.py index 7ddf0a180..6771133c3 100644 --- a/dbt_platform_helper/providers/ecs.py +++ b/dbt_platform_helper/providers/ecs.py @@ -3,8 +3,7 @@ import time from typing import List -from dbt_platform_helper.exceptions import ECSAgentNotRunning -from dbt_platform_helper.exceptions import NoClusterError +from dbt_platform_helper.platform_exception import PlatformException class ECS: @@ -36,7 +35,7 @@ def get_cluster_arn(self) -> str: if app_key_found and env_key_found and cluster_key_found: return cluster_arn - raise NoClusterError(self.application_name, self.env) + raise NoClusterException(self.application_name, self.env) def get_or_create_task_name(self, addon_name: str, parameter_name: str) -> str: """Fetches the task name from SSM or creates a new one if not found.""" @@ -84,4 +83,20 @@ def ecs_exec_is_available(self, cluster_arn: str, task_arns: List[str]): time.sleep(1) if execute_command_agent_status != "RUNNING": - raise ECSAgentNotRunning + raise ECSAgentNotRunningException + + +class ECSException(PlatformException): + pass + + +class ECSAgentNotRunningException(ECSException): + def __init__(self): + super().__init__("""ECS exec agent never reached "RUNNING" status""") + + +class NoClusterException(ECSException): + def __init__(self, application_name: str, environment: str): + super().__init__( + f"""No ECS cluster found for "{application_name}" in "{environment}" environment.""" + ) diff --git a/dbt_platform_helper/providers/load_balancers.py b/dbt_platform_helper/providers/load_balancers.py index 7be823ed6..49fc75830 100644 --- a/dbt_platform_helper/providers/load_balancers.py +++ b/dbt_platform_helper/providers/load_balancers.py @@ -1,5 +1,7 @@ import boto3 +from dbt_platform_helper.platform_exception import PlatformException + def find_load_balancer(session: boto3.Session, app: str, env: str) -> str: lb_client = session.client("elbv2") @@ -16,7 +18,7 @@ def find_load_balancer(session: boto3.Session, app: str, env: str) -> str: load_balancer_arn = lb["ResourceArn"] if not load_balancer_arn: - raise LoadBalancerNotFoundError() + raise LoadBalancerNotFoundException() return load_balancer_arn @@ -34,18 +36,22 @@ def find_https_listener(session: boto3.Session, app: str, env: str) -> str: pass if not listener_arn: - raise ListenerNotFoundError() + raise ListenerNotFoundException() return listener_arn -class LoadBalancerNotFoundError(Exception): +class LoadBalancerException(PlatformException): + pass + + +class LoadBalancerNotFoundException(LoadBalancerException): pass -class ListenerNotFoundError(Exception): +class ListenerNotFoundException(LoadBalancerException): pass -class ListenerRuleNotFoundError(Exception): +class ListenerRuleNotFoundException(LoadBalancerException): pass diff --git a/dbt_platform_helper/providers/secrets.py b/dbt_platform_helper/providers/secrets.py index c875f7dfc..2182ff04e 100644 --- a/dbt_platform_helper/providers/secrets.py +++ b/dbt_platform_helper/providers/secrets.py @@ -2,11 +2,7 @@ import urllib from dbt_platform_helper.constants import CONDUIT_ADDON_TYPES -from dbt_platform_helper.exceptions import AddonNotFoundError -from dbt_platform_helper.exceptions import AddonTypeMissingFromConfigError -from dbt_platform_helper.exceptions import InvalidAddonTypeError -from dbt_platform_helper.exceptions import ParameterNotFoundError -from dbt_platform_helper.exceptions import SecretNotFoundError +from dbt_platform_helper.platform_exception import PlatformException class Secrets: @@ -43,8 +39,9 @@ def get_connection_secret_arn(self, secret_name: str) -> str: except self.secrets_manager_client.exceptions.ResourceNotFoundException: pass - raise SecretNotFoundError(secret_name) + raise SecretNotFoundException(secret_name) + # Todo: This probably does not belong in the secrets provider. When it moves, take the Todoed exceptions from below def get_addon_type(self, addon_name: str) -> str: addon_type = None try: @@ -54,19 +51,19 @@ def get_addon_type(self, addon_name: str) -> str: )["Parameter"]["Value"] ) except self.ssm_client.exceptions.ParameterNotFound: - raise ParameterNotFoundError(self.application_name, self.env) + raise ParameterNotFoundException(self.application_name, self.env) if addon_name not in addon_config.keys(): - raise AddonNotFoundError(addon_name) + raise AddonNotFoundException(addon_name) for name, config in addon_config.items(): if name == addon_name: if not config.get("type"): - raise AddonTypeMissingFromConfigError(addon_name) + raise AddonTypeMissingFromConfigException(addon_name) addon_type = config["type"] if not addon_type or addon_type not in CONDUIT_ADDON_TYPES: - raise InvalidAddonTypeError(addon_type) + raise InvalidAddonTypeException(addon_type) if "postgres" in addon_type: addon_type = "postgres" @@ -83,3 +80,47 @@ def get_parameter_name(self, addon_type: str, addon_name: str, access: str) -> s def _normalise_secret_name(self, addon_name: str) -> str: return addon_name.replace("-", "_").upper() + + +# Todo: This probably does not belong in the secrets provider. Move it when we find a better home for get_addon_type() +class AddonException(PlatformException): + pass + + +# Todo: This probably does not belong in the secrets provider. Move it when we find a better home for get_addon_type() +class AddonNotFoundException(AddonException): + def __init__(self, addon_name: str): + super().__init__(f"""Addon "{addon_name}" does not exist.""") + + +# Todo: This probably does not belong in the secrets provider. Move it when we find a better home for get_addon_type() +class AddonTypeMissingFromConfigException(AddonException): + def __init__(self, addon_name: str): + super().__init__( + f"""The configuration for the addon {addon_name}, is misconfigured and missing the addon type.""" + ) + + +# Todo: This probably does not belong in the secrets provider. Move it when we find a better home for get_addon_type() +class InvalidAddonTypeException(AddonException): + def __init__(self, addon_type): + self.addon_type = addon_type + super().__init__( + f"""Addon type "{self.addon_type}" is not supported, we support: {", ".join(CONDUIT_ADDON_TYPES)}.""" + ) + + +class SecretException(PlatformException): + pass + + +class ParameterNotFoundException(SecretException): + def __init__(self, application_name: str, environment: str): + super().__init__( + f"""No parameter called "/copilot/applications/{application_name}/environments/{environment}/addons". Try deploying the "{application_name}" "{environment}" environment.""" + ) + + +class SecretNotFoundException(SecretException): + def __init__(self, secret_name: str): + super().__init__(f"""No secret called "{secret_name}".""") diff --git a/dbt_platform_helper/providers/validation.py b/dbt_platform_helper/providers/validation.py new file mode 100644 index 000000000..7a1ad6ce2 --- /dev/null +++ b/dbt_platform_helper/providers/validation.py @@ -0,0 +1,19 @@ +from dbt_platform_helper.platform_exception import PlatformException + + +class ValidationException(PlatformException): + pass + + +class IncompatibleMajorVersionException(ValidationException): + def __init__(self, app_version: str, check_version: str): + super().__init__() + self.app_version = app_version + self.check_version = check_version + + +class IncompatibleMinorVersionException(ValidationException): + def __init__(self, app_version: str, check_version: str): + super().__init__() + self.app_version = app_version + self.check_version = check_version diff --git a/dbt_platform_helper/utils/application.py b/dbt_platform_helper/utils/application.py index fba00bdab..e0909884c 100644 --- a/dbt_platform_helper/utils/application.py +++ b/dbt_platform_helper/utils/application.py @@ -1,4 +1,5 @@ import json +import os import re from pathlib import Path from typing import Dict @@ -8,7 +9,7 @@ from boto3 import Session from yaml.parser import ParserError -from dbt_platform_helper.exceptions import ApplicationNotFoundError +from dbt_platform_helper.platform_exception import PlatformException from dbt_platform_helper.utils.aws import get_aws_session_or_abort from dbt_platform_helper.utils.aws import get_profile_name_from_account_id from dbt_platform_helper.utils.aws import get_ssm_secrets @@ -80,7 +81,7 @@ def load_application(app: str = None, default_session: Session = None) -> Applic WithDecryption=False, ) except ssm_client.exceptions.ParameterNotFound: - raise ApplicationNotFoundError(app) + raise ApplicationNotFoundException(app) path = f"/copilot/applications/{application.name}/environments" secrets = get_ssm_secrets(app, None, current_session, path) @@ -135,3 +136,14 @@ def get_application_name(): abort_with_error("Cannot get application name. No copilot/.workspace file found") return app_name + + +class ApplicationException(PlatformException): + pass + + +class ApplicationNotFoundException(ApplicationException): + def __init__(self, application_name: str): + super().__init__( + f"""The account "{os.environ.get("AWS_PROFILE")}" does not contain the application "{application_name}"; ensure you have set the environment variable "AWS_PROFILE" correctly.""" + ) diff --git a/dbt_platform_helper/utils/arn_parser.py b/dbt_platform_helper/utils/arn_parser.py index 3891f1294..28ca79340 100644 --- a/dbt_platform_helper/utils/arn_parser.py +++ b/dbt_platform_helper/utils/arn_parser.py @@ -1,4 +1,4 @@ -from dbt_platform_helper.exceptions import ValidationException +from dbt_platform_helper.providers.validation import ValidationException class ARN: diff --git a/dbt_platform_helper/utils/aws.py b/dbt_platform_helper/utils/aws.py index cb2b2e34b..48387e9e0 100644 --- a/dbt_platform_helper/utils/aws.py +++ b/dbt_platform_helper/utils/aws.py @@ -13,11 +13,12 @@ import yaml from boto3 import Session -from dbt_platform_helper.exceptions import AWSException -from dbt_platform_helper.exceptions import CopilotCodebaseNotFoundError -from dbt_platform_helper.exceptions import ImageNotFoundError -from dbt_platform_helper.exceptions import ResourceNotFoundException -from dbt_platform_helper.exceptions import ValidationException +from dbt_platform_helper.platform_exception import PlatformException +from dbt_platform_helper.providers.aws import AWSException +from dbt_platform_helper.providers.aws import CopilotCodebaseNotFoundException +from dbt_platform_helper.providers.aws import ImageNotFoundException +from dbt_platform_helper.providers.aws import LogGroupNotFoundException +from dbt_platform_helper.providers.validation import ValidationException from dbt_platform_helper.utils.files import cache_refresh_required from dbt_platform_helper.utils.files import read_supported_versions_from_cache from dbt_platform_helper.utils.files import write_to_cache @@ -95,7 +96,7 @@ def _log_account_info(account_name: list, account_id: str) -> None: ) -class NoProfileForAccountIdError(Exception): +class NoProfileForAccountIdException(PlatformException): def __init__(self, account_id): super().__init__(f"No profile found for account {account_id}") @@ -110,7 +111,7 @@ def get_profile_name_from_account_id(account_id: str): if account_id == found_account_id: return section.removeprefix("profile ") - raise NoProfileForAccountIdError(account_id) + raise NoProfileForAccountIdException(account_id) def get_ssm_secret_names(app, env): @@ -488,8 +489,10 @@ def start_build_extraction(codebuild_client, build_options): return response["build"]["arn"] +# Todo: This should probably be in the AWS Copilot provider def check_codebase_exists(session: Session, application, codebase: str): try: + # Todo: Can this leverage dbt_platform_helper.providers.secrets.Secrets.get_connection_secret_arn? ssm_client = session.client("ssm") json.loads( ssm_client.get_parameter( @@ -502,7 +505,7 @@ def check_codebase_exists(session: Session, application, codebase: str): ssm_client.exceptions.ParameterNotFound, json.JSONDecodeError, ): - raise CopilotCodebaseNotFoundError(codebase) + raise CopilotCodebaseNotFoundException(codebase) def check_image_exists(session, application, codebase, commit): @@ -516,7 +519,7 @@ def check_image_exists(session, application, codebase, commit): ecr_client.exceptions.RepositoryNotFoundException, ecr_client.exceptions.ImageNotFoundException, ): - raise ImageNotFoundError(commit) + raise ImageNotFoundException(commit) def get_build_url_from_arn(build_arn: str) -> str: @@ -577,4 +580,4 @@ def wait_for_log_group_to_exist(log_client, log_group_name, attempts=30): time.sleep(1) if not log_group_exists: - raise ResourceNotFoundException + raise LogGroupNotFoundException(log_group_name) diff --git a/dbt_platform_helper/utils/git.py b/dbt_platform_helper/utils/git.py index e451ce652..1983e0413 100644 --- a/dbt_platform_helper/utils/git.py +++ b/dbt_platform_helper/utils/git.py @@ -2,7 +2,7 @@ import subprocess -class CommitNotFoundError(Exception): +class CommitNotFoundException(Exception): pass @@ -26,4 +26,4 @@ def check_if_commit_exists(commit): ) if branches_containing_commit.stderr: - raise CommitNotFoundError() + raise CommitNotFoundException() diff --git a/dbt_platform_helper/utils/versioning.py b/dbt_platform_helper/utils/versioning.py index 5c87ee42e..795a65d87 100644 --- a/dbt_platform_helper/utils/versioning.py +++ b/dbt_platform_helper/utils/versioning.py @@ -13,9 +13,9 @@ from dbt_platform_helper.constants import PLATFORM_CONFIG_FILE from dbt_platform_helper.constants import PLATFORM_HELPER_VERSION_FILE -from dbt_platform_helper.exceptions import IncompatibleMajorVersion -from dbt_platform_helper.exceptions import IncompatibleMinorVersion -from dbt_platform_helper.exceptions import ValidationException +from dbt_platform_helper.providers.validation import IncompatibleMajorVersionException +from dbt_platform_helper.providers.validation import IncompatibleMinorVersionException +from dbt_platform_helper.providers.validation import ValidationException from dbt_platform_helper.utils.platform_config import load_unvalidated_config_file VersionTuple = Optional[Tuple[int, int, int]] @@ -198,13 +198,13 @@ def validate_version_compatibility( if (app_major == 0 and check_major == 0) and ( app_minor != check_minor or app_patch != check_patch ): - raise IncompatibleMajorVersion(app_version_as_string, check_version_as_string) + raise IncompatibleMajorVersionException(app_version_as_string, check_version_as_string) if app_major != check_major: - raise IncompatibleMajorVersion(app_version_as_string, check_version_as_string) + raise IncompatibleMajorVersionException(app_version_as_string, check_version_as_string) if app_minor != check_minor: - raise IncompatibleMinorVersion(app_version_as_string, check_version_as_string) + raise IncompatibleMinorVersionException(app_version_as_string, check_version_as_string) def check_version_on_file_compatibility( @@ -248,9 +248,9 @@ def check_platform_helper_version_needs_update(): ) try: validate_version_compatibility(local_version, latest_release) - except IncompatibleMajorVersion: + except IncompatibleMajorVersionException: click.secho(message, fg="red") - except IncompatibleMinorVersion: + except IncompatibleMinorVersionException: click.secho(message, fg="yellow") diff --git a/tests/platform_helper/domain/test_codebase.py b/tests/platform_helper/domain/test_codebase.py index 5c35a141f..076d5d447 100644 --- a/tests/platform_helper/domain/test_codebase.py +++ b/tests/platform_helper/domain/test_codebase.py @@ -13,15 +13,15 @@ import pytest import requests +from dbt_platform_helper.domain.codebase import ApplicationDeploymentNotTriggered +from dbt_platform_helper.domain.codebase import ApplicationEnvironmentNotFoundException from dbt_platform_helper.domain.codebase import Codebase -from dbt_platform_helper.domain.codebase import NotInCodeBaseRepositoryError -from dbt_platform_helper.exceptions import ApplicationDeploymentNotTriggered -from dbt_platform_helper.exceptions import ApplicationEnvironmentNotFoundError -from dbt_platform_helper.exceptions import ApplicationNotFoundError -from dbt_platform_helper.exceptions import CopilotCodebaseNotFoundError -from dbt_platform_helper.exceptions import ImageNotFoundError +from dbt_platform_helper.domain.codebase import NotInCodeBaseRepositoryException +from dbt_platform_helper.providers.aws import CopilotCodebaseNotFoundException +from dbt_platform_helper.providers.aws import ImageNotFoundException +from dbt_platform_helper.utils.application import ApplicationNotFoundException from dbt_platform_helper.utils.application import Environment -from dbt_platform_helper.utils.git import CommitNotFoundError +from dbt_platform_helper.utils.git import CommitNotFoundException from tests.platform_helper.conftest import EXPECTED_FILES_DIR ecr_exceptions = boto3.client("ecr").exceptions @@ -148,16 +148,16 @@ def test_codebase_prepare_does_not_generate_files_in_a_repo_with_a_copilot_direc mocks.run_subprocess.return_value.stdout = mock_run_suprocess_fixture() - with pytest.raises(NotInCodeBaseRepositoryError): + with pytest.raises(NotInCodeBaseRepositoryException): codebase.prepare() def test_codebase_build_does_not_trigger_build_without_an_application(): mocks = CodebaseMocks() - mocks.load_application.side_effect = ApplicationNotFoundError("not-an-application") + mocks.load_application.side_effect = ApplicationNotFoundException("not-an-application") codebase = Codebase(**mocks.params()) - with pytest.raises(ApplicationNotFoundError): + with pytest.raises(ApplicationNotFoundException): codebase.build("not-an-application", "application", "ab1c23d") mocks.echo.assert_has_calls( [ @@ -170,11 +170,11 @@ def test_codebase_build_does_not_trigger_build_without_an_application(): def test_codebase_build_commit_not_found(): - mocks = CodebaseMocks(check_if_commit_exists=Mock(side_effect=CommitNotFoundError())) + mocks = CodebaseMocks(check_if_commit_exists=Mock(side_effect=CommitNotFoundException())) codebase = Codebase(**mocks.params()) - with pytest.raises(CommitNotFoundError): + with pytest.raises(CommitNotFoundException): codebase.build("not-an-application", "application", "ab1c23d") @@ -187,7 +187,7 @@ def test_codebase_prepare_raises_not_in_codebase_exception(tmp_path): os.chdir(tmp_path) Path(tmp_path / "copilot").mkdir() - with pytest.raises(NotInCodeBaseRepositoryError): + with pytest.raises(NotInCodeBaseRepositoryException): codebase.prepare() @@ -276,7 +276,7 @@ def test_codebase_deploy_successfully_triggers_a_pipeline_based_deploy(mock_appl def test_codebase_deploy_exception_with_a_nonexistent_codebase(): mocks = CodebaseMocks( - check_codebase_exists=Mock(side_effect=CopilotCodebaseNotFoundError("application")) + check_codebase_exists=Mock(side_effect=CopilotCodebaseNotFoundException("application")) ) client = mock_aws_client(mocks.get_aws_session_or_abort) @@ -285,14 +285,14 @@ def test_codebase_deploy_exception_with_a_nonexistent_codebase(): "Parameter": {"Value": json.dumps({"name": "application"})}, } - with pytest.raises(CopilotCodebaseNotFoundError): + with pytest.raises(CopilotCodebaseNotFoundException): codebase = Codebase(**mocks.params()) codebase.deploy("test-application", "development", "application", "nonexistent-commit-hash") def test_check_codebase_exists_returns_error_when_no_json(): mocks = CodebaseMocks( - check_codebase_exists=Mock(side_effect=CopilotCodebaseNotFoundError("application")) + check_codebase_exists=Mock(side_effect=CopilotCodebaseNotFoundException("application")) ) client = mock_aws_client(mocks.get_aws_session_or_abort) @@ -301,14 +301,14 @@ def test_check_codebase_exists_returns_error_when_no_json(): "Parameter": {"Value": json.dumps({"name": "application"})}, } - with pytest.raises(CopilotCodebaseNotFoundError): + with pytest.raises(CopilotCodebaseNotFoundException): codebase = Codebase(**mocks.params()) codebase.deploy("test-application", "development", "application", "nonexistent-commit-hash") def test_codebase_deploy_aborts_with_a_nonexistent_image_repository(): mocks = CodebaseMocks( - check_image_exists=Mock(side_effect=ImageNotFoundError("nonexistent-commit-hash")) + check_image_exists=Mock(side_effect=ImageNotFoundException("nonexistent-commit-hash")) ) client = mock_aws_client(mocks.get_aws_session_or_abort) @@ -318,14 +318,14 @@ def test_codebase_deploy_aborts_with_a_nonexistent_image_repository(): } client.describe_images.side_effect = ecr_exceptions.RepositoryNotFoundException({}, "") - with pytest.raises(ImageNotFoundError): + with pytest.raises(ImageNotFoundException): codebase = Codebase(**mocks.params()) codebase.deploy("test-application", "development", "application", "nonexistent-commit-hash") def test_codebase_deploy_aborts_with_a_nonexistent_image_tag(): mocks = CodebaseMocks( - check_image_exists=Mock(side_effect=ImageNotFoundError("nonexistent-commit-hash")) + check_image_exists=Mock(side_effect=ImageNotFoundException("nonexistent-commit-hash")) ) client = mock_aws_client(mocks.get_aws_session_or_abort) @@ -335,7 +335,7 @@ def test_codebase_deploy_aborts_with_a_nonexistent_image_tag(): } client.describe_images.side_effect = ecr_exceptions.ImageNotFoundException({}, "") - with pytest.raises(ImageNotFoundError): + with pytest.raises(ImageNotFoundException): codebase = Codebase(**mocks.params()) codebase.deploy("test-application", "development", "application", "nonexistent-commit-hash") @@ -374,10 +374,10 @@ def test_codebase_deploy_does_not_trigger_build_without_confirmation(): def test_codebase_deploy_does_not_trigger_build_without_an_application(): mocks = CodebaseMocks() - mocks.load_application.side_effect = ApplicationNotFoundError("not-an-application") + mocks.load_application.side_effect = ApplicationNotFoundException("not-an-application") codebase = Codebase(**mocks.params()) - with pytest.raises(ApplicationNotFoundError) as exc: + with pytest.raises(ApplicationNotFoundException) as exc: codebase.deploy("not-an-application", "dev", "application", "ab1c23d") @@ -387,7 +387,7 @@ def test_codebase_deploy_does_not_trigger_build_with_missing_environment(mock_ap mocks.load_application.return_value = mock_application codebase = Codebase(**mocks.params()) - with pytest.raises(ApplicationEnvironmentNotFoundError) as exc: + with pytest.raises(ApplicationEnvironmentNotFoundException) as exc: codebase.deploy("test-application", "not-an-environment", "application", "ab1c23d") mocks.echo.assert_has_calls( [ @@ -414,10 +414,10 @@ def test_codebase_deploy_does_not_trigger_deployment_without_confirmation(): def test_codebase_list_does_not_trigger_build_without_an_application(): mocks = CodebaseMocks() - mocks.load_application.side_effect = ApplicationNotFoundError("not-an-application") + mocks.load_application.side_effect = ApplicationNotFoundException("not-an-application") codebase = Codebase(**mocks.params()) - with pytest.raises(ApplicationNotFoundError) as exc: + with pytest.raises(ApplicationNotFoundException) as exc: codebase.list("not-an-application", True) diff --git a/tests/platform_helper/domain/test_conduit.py b/tests/platform_helper/domain/test_conduit.py index 0d9501cd1..0be677254 100644 --- a/tests/platform_helper/domain/test_conduit.py +++ b/tests/platform_helper/domain/test_conduit.py @@ -4,13 +4,13 @@ import pytest from dbt_platform_helper.domain.conduit import Conduit -from dbt_platform_helper.exceptions import AddonNotFoundError -from dbt_platform_helper.exceptions import CreateTaskTimeoutError -from dbt_platform_helper.exceptions import ECSAgentNotRunning -from dbt_platform_helper.exceptions import InvalidAddonTypeError -from dbt_platform_helper.exceptions import NoClusterError -from dbt_platform_helper.exceptions import ParameterNotFoundError -from dbt_platform_helper.exceptions import SecretNotFoundError +from dbt_platform_helper.providers.aws import CreateTaskTimeoutException +from dbt_platform_helper.providers.ecs import ECSAgentNotRunningException +from dbt_platform_helper.providers.ecs import NoClusterException +from dbt_platform_helper.providers.secrets import AddonNotFoundException +from dbt_platform_helper.providers.secrets import InvalidAddonTypeException +from dbt_platform_helper.providers.secrets import ParameterNotFoundException +from dbt_platform_helper.providers.secrets import SecretNotFoundException from dbt_platform_helper.utils.application import Application from dbt_platform_helper.utils.application import Environment @@ -168,13 +168,13 @@ def test_conduit_with_task_already_running(): def test_conduit_domain_when_no_cluster_exists(): conduit_mocks = ConduitMocks(app_name, addon_type) - conduit_mocks.ecs_provider.get_cluster_arn.side_effect = NoClusterError( + conduit_mocks.ecs_provider.get_cluster_arn.side_effect = NoClusterException( application_name=app_name, environment=env, ) conduit = Conduit(**conduit_mocks.params()) - with pytest.raises(NoClusterError): + with pytest.raises(NoClusterException): conduit.start(env, addon_name) conduit.secrets_provider.get_addon_type.assert_called_once_with(addon_name) conduit.ecs_provider.get_cluster_arn.assert_called_once() @@ -187,12 +187,12 @@ def test_conduit_domain_when_no_connection_secret_exists(): ) conduit_mocks.ecs_provider.get_ecs_task_arns.return_value = [] conduit_mocks.secrets_provider.get_parameter_name.return_value = "parameter_name" - conduit_mocks.create_addon_client_task.side_effect = SecretNotFoundError( + conduit_mocks.create_addon_client_task.side_effect = SecretNotFoundException( f"/copilot/{app_name}/{env}/secrets/{addon_name}" ) conduit = Conduit(**conduit_mocks.params()) - with pytest.raises(SecretNotFoundError): + with pytest.raises(SecretNotFoundException): conduit.start(env, addon_name) conduit.secrets_provider.get_addon_type.assert_called_once_with(addon_name) @@ -208,7 +208,7 @@ def test_conduit_domain_when_client_task_fails_to_start(): addon_type, ) conduit_mocks.connect_to_addon_client_task.side_effect = ( - CreateTaskTimeoutError( + CreateTaskTimeoutException( addon_name=addon_name, application_name=app_name, environment=env, @@ -217,7 +217,7 @@ def test_conduit_domain_when_client_task_fails_to_start(): conduit = Conduit(**conduit_mocks.params()) - with pytest.raises(CreateTaskTimeoutError): + with pytest.raises(CreateTaskTimeoutException): conduit.start(env, addon_name) conduit.ecs_provider.get_ecs_task_arns.assert_called_once_with(cluster_arn, task_name) conduit.connect_to_addon_client_task.assert_called_once_with( @@ -239,11 +239,13 @@ def test_conduit_domain_when_addon_type_is_invalid(): conduit_mocks = ConduitMocks(app_name, addon_type) - conduit_mocks.secrets_provider.get_addon_type.side_effect = InvalidAddonTypeError(addon_type) + conduit_mocks.secrets_provider.get_addon_type.side_effect = InvalidAddonTypeException( + addon_type + ) conduit = Conduit(**conduit_mocks.params()) conduit.application.environments[env].session.client("ecs") - with pytest.raises(InvalidAddonTypeError): + with pytest.raises(InvalidAddonTypeException): conduit.start(env, addon_name) conduit.ecs_provider.get_ecs_task_arns.assert_called_once_with(cluster_arn, task_name) @@ -251,11 +253,11 @@ def test_conduit_domain_when_addon_type_is_invalid(): def test_start_with_addon_does_not_exist_raises_error(): addon_name = "addon_doesnt_exist" conduit_mocks = ConduitMocks(app_name, addon_type) - conduit_mocks.secrets_provider.get_addon_type.side_effect = AddonNotFoundError(addon_name) + conduit_mocks.secrets_provider.get_addon_type.side_effect = AddonNotFoundException(addon_name) conduit = Conduit(**conduit_mocks.params()) - with pytest.raises(AddonNotFoundError): + with pytest.raises(AddonNotFoundException): conduit.start(env, addon_name) @@ -263,7 +265,7 @@ def test_conduit_domain_when_no_addon_config_parameter_exists(): addon_name = "parameter_doesnt_exist" conduit_mocks = ConduitMocks(app_name, addon_type) - conduit_mocks.secrets_provider.get_addon_type.side_effect = ParameterNotFoundError( + conduit_mocks.secrets_provider.get_addon_type.side_effect = ParameterNotFoundException( application_name=app_name, environment=env, ) @@ -271,7 +273,7 @@ def test_conduit_domain_when_no_addon_config_parameter_exists(): conduit = Conduit(**conduit_mocks.params()) conduit.application.environments[env].session.client("ecs") - with pytest.raises(ParameterNotFoundError): + with pytest.raises(ParameterNotFoundException): conduit.start(env, addon_name) conduit.ecs_provider.get_ecs_task_arns.assert_called_once_with(cluster_arn, task_name) @@ -285,13 +287,13 @@ def test_conduit_domain_ecs_exec_agent_does_not_start(): conduit_mocks.ecs_provider.get_ecs_task_arns.return_value = [ "arn:aws:ecs:eu-west-2:123456789012:task/MyTaskARN" ] - conduit_mocks.ecs_provider.ecs_exec_is_available.side_effect = ECSAgentNotRunning() + conduit_mocks.ecs_provider.ecs_exec_is_available.side_effect = ECSAgentNotRunningException() conduit_mocks.ecs_provider.get_cluster_arn.return_value = cluster_arn conduit = Conduit(**conduit_mocks.params()) conduit.application.environments[env].session.client("ecs") - with pytest.raises(ECSAgentNotRunning): + with pytest.raises(ECSAgentNotRunningException): conduit.start(env, addon_name) conduit.ecs_provider.ecs_exec_is_available.assert_called_once_with( diff --git a/tests/platform_helper/domain/test_database_copy.py b/tests/platform_helper/domain/test_database_copy.py index 0c8f7bc7e..f3c6749d1 100644 --- a/tests/platform_helper/domain/test_database_copy.py +++ b/tests/platform_helper/domain/test_database_copy.py @@ -6,9 +6,9 @@ from dbt_platform_helper.constants import PLATFORM_CONFIG_FILE from dbt_platform_helper.domain.database_copy import DatabaseCopy -from dbt_platform_helper.exceptions import ApplicationNotFoundError -from dbt_platform_helper.exceptions import AWSException +from dbt_platform_helper.providers.aws import AWSException from dbt_platform_helper.utils.application import Application +from dbt_platform_helper.utils.application import ApplicationNotFoundException from dbt_platform_helper.utils.aws import Vpc @@ -294,7 +294,7 @@ def test_database_dump_handles_account_id_errors(is_dump): def test_database_copy_initialization_handles_app_name_errors(): mocks = DataCopyMocks() - mocks.load_application = Mock(side_effect=ApplicationNotFoundError("bad-app")) + mocks.load_application = Mock(side_effect=ApplicationNotFoundException("bad-app")) with pytest.raises(SystemExit) as exc: DatabaseCopy("bad-app", "test-db", **mocks.params()) diff --git a/tests/platform_helper/domain/test_maintenance_page.py b/tests/platform_helper/domain/test_maintenance_page.py index 93e550fe9..9c775a142 100644 --- a/tests/platform_helper/domain/test_maintenance_page.py +++ b/tests/platform_helper/domain/test_maintenance_page.py @@ -52,7 +52,7 @@ def test_when_environment_online(self): "TagDescriptions": [{"ResourceArn": "rule_arn", "Tags": []}] } - with pytest.raises(ListenerRuleNotFoundError): + with pytest.raises(ListenerRuleNotFoundException): remove_maintenance_page(boto_mock, "listener_arn") @patch("dbt_platform_helper.domain.maintenance_page.delete_listener_rule") diff --git a/tests/platform_helper/providers/test_cloudformation.py b/tests/platform_helper/providers/test_cloudformation.py index 0dab33905..4485a74b8 100644 --- a/tests/platform_helper/providers/test_cloudformation.py +++ b/tests/platform_helper/providers/test_cloudformation.py @@ -8,8 +8,8 @@ from cfn_tools import load_yaml from moto import mock_aws -from dbt_platform_helper.exceptions import CloudFormationException from dbt_platform_helper.providers.cloudformation import CloudFormation +from dbt_platform_helper.providers.cloudformation import CloudFormationException from tests.platform_helper.conftest import mock_parameter_name from tests.platform_helper.conftest import mock_task_name diff --git a/tests/platform_helper/providers/test_copilot.py b/tests/platform_helper/providers/test_copilot.py index 1b736db1d..d1ee53bab 100644 --- a/tests/platform_helper/providers/test_copilot.py +++ b/tests/platform_helper/providers/test_copilot.py @@ -6,11 +6,11 @@ from botocore.exceptions import ClientError from moto import mock_aws -from dbt_platform_helper.exceptions import SecretNotFoundError -from dbt_platform_helper.providers.copilot import CreateTaskTimeoutError +from dbt_platform_helper.providers.aws import CreateTaskTimeoutException from dbt_platform_helper.providers.copilot import connect_to_addon_client_task from dbt_platform_helper.providers.copilot import create_addon_client_task from dbt_platform_helper.providers.copilot import create_postgres_admin_task +from dbt_platform_helper.providers.secrets import SecretNotFoundException from tests.platform_helper.conftest import NoSuchEntityException from tests.platform_helper.conftest import expected_connection_secret_name from tests.platform_helper.conftest import mock_task_name @@ -325,11 +325,11 @@ def test_create_addon_client_task_when_no_secret_found(get_connection_secret_arn iam_client = mock_application.environments[env].session.client("iam") ssm_client = mock_application.environments[env].session.client("ssm") - get_connection_secret_arn.side_effect = SecretNotFoundError( + get_connection_secret_arn.side_effect = SecretNotFoundException( "/copilot/test-application/development/secrets/named-postgres" ) - with pytest.raises(SecretNotFoundError): + with pytest.raises(SecretNotFoundException): create_addon_client_task( iam_client, ssm_client, @@ -401,7 +401,7 @@ def test_connect_to_addon_client_task_with_timeout_reached_throws_exception( mock_subprocess = Mock() get_ecs_task_arns = Mock(return_value=[]) - with pytest.raises(CreateTaskTimeoutError): + with pytest.raises(CreateTaskTimeoutException): connect_to_addon_client_task( ecs_client, mock_subprocess, diff --git a/tests/platform_helper/providers/test_ecs.py b/tests/platform_helper/providers/test_ecs.py index 1bb15b022..3685036fb 100644 --- a/tests/platform_helper/providers/test_ecs.py +++ b/tests/platform_helper/providers/test_ecs.py @@ -4,9 +4,9 @@ import pytest from moto import mock_aws -from dbt_platform_helper.exceptions import ECSAgentNotRunning -from dbt_platform_helper.exceptions import NoClusterError from dbt_platform_helper.providers.ecs import ECS +from dbt_platform_helper.providers.ecs import ECSAgentNotRunningException +from dbt_platform_helper.providers.ecs import NoClusterException from tests.platform_helper.conftest import mock_parameter_name from tests.platform_helper.conftest import mock_task_name @@ -33,7 +33,7 @@ def test_get_cluster_arn_with_no_cluster_raises_error(mock_application): ecs_manager = ECS(ecs_client, ssm_client, application_name, env) - with pytest.raises(NoClusterError): + with pytest.raises(NoClusterException): ecs_manager.get_cluster_arn() @@ -137,7 +137,7 @@ def test_ecs_exec_is_available_with_exec_not_running_raises_exception( mock_application.name, "development", ) - with pytest.raises(ECSAgentNotRunning): + with pytest.raises(ECSAgentNotRunningException): ecs_manager.ecs_exec_is_available( mocked_cluster_arn, ["arn:aws:ecs:eu-west-2:12345678:task/does-not-matter/1234qwer"] ) diff --git a/tests/platform_helper/providers/test_load_balancers.py b/tests/platform_helper/providers/test_load_balancers.py index 0f9e8cb6d..f91afb06e 100644 --- a/tests/platform_helper/providers/test_load_balancers.py +++ b/tests/platform_helper/providers/test_load_balancers.py @@ -3,8 +3,8 @@ import pytest -from dbt_platform_helper.providers.load_balancers import ListenerNotFoundError -from dbt_platform_helper.providers.load_balancers import LoadBalancerNotFoundError +from dbt_platform_helper.providers.load_balancers import ListenerNotFoundException +from dbt_platform_helper.providers.load_balancers import LoadBalancerNotFoundException from dbt_platform_helper.providers.load_balancers import find_https_listener from dbt_platform_helper.providers.load_balancers import find_load_balancer @@ -14,7 +14,7 @@ class TestFindHTTPSListener: def test_when_no_https_listener_present(self, find_load_balancer): boto_mock = MagicMock() boto_mock.client().describe_listeners.return_value = {"Listeners": []} - with pytest.raises(ListenerNotFoundError): + with pytest.raises(ListenerNotFoundException): find_https_listener(boto_mock, "test-application", "development") @patch("dbt_platform_helper.providers.load_balancers.find_load_balancer", return_value="lb_arn") @@ -34,7 +34,7 @@ def test_when_no_load_balancer_exists(self): boto_mock = MagicMock() boto_mock.client().describe_load_balancers.return_value = {"LoadBalancers": []} - with pytest.raises(LoadBalancerNotFoundError): + with pytest.raises(LoadBalancerNotFoundException): find_load_balancer(boto_mock, "test-application", "development") def test_when_a_load_balancer_exists(self): diff --git a/tests/platform_helper/providers/test_secrets.py b/tests/platform_helper/providers/test_secrets.py index 43f716398..4a7da9bf2 100644 --- a/tests/platform_helper/providers/test_secrets.py +++ b/tests/platform_helper/providers/test_secrets.py @@ -4,11 +4,11 @@ import pytest from moto import mock_aws -from dbt_platform_helper.exceptions import AddonNotFoundError -from dbt_platform_helper.exceptions import AddonTypeMissingFromConfigError -from dbt_platform_helper.exceptions import InvalidAddonTypeError -from dbt_platform_helper.exceptions import ParameterNotFoundError -from dbt_platform_helper.exceptions import SecretNotFoundError +from dbt_platform_helper.providers.secrets import AddonNotFoundException +from dbt_platform_helper.providers.secrets import AddonTypeMissingFromConfigException +from dbt_platform_helper.providers.secrets import InvalidAddonTypeException +from dbt_platform_helper.providers.secrets import ParameterNotFoundException +from dbt_platform_helper.providers.secrets import SecretNotFoundException from dbt_platform_helper.providers.secrets import Secrets from tests.platform_helper.conftest import add_addon_config_parameter from tests.platform_helper.conftest import mock_parameter_name @@ -95,7 +95,7 @@ def test_get_connection_secret_arn_when_secret_does_not_exist(mock_application): secrets_client = mock_application.environments[env].session.client("secretsmanager") secrets_manager = Secrets(ssm_client, secrets_client, mock_application.name, env) - with pytest.raises(SecretNotFoundError) as ex: + with pytest.raises(SecretNotFoundException) as ex: secrets_manager.get_connection_secret_arn("POSTGRES") @@ -165,7 +165,7 @@ def test_get_addon_type_with_not_found_throws_exception(mock_application): secrets_client = mock_application.environments[env].session.client("secretsmanager") secrets_manager = Secrets(ssm_client, secrets_client, mock_application.name, env) - with pytest.raises(AddonNotFoundError): + with pytest.raises(AddonNotFoundException): secrets_manager.get_addon_type("custom-name-postgres") @@ -185,7 +185,7 @@ def test_get_addon_type_with_parameter_not_found_throws_exception(mock_applicati Value=json.dumps({"custom-name-postgres": {"type": "postgres"}}), ) - with pytest.raises(ParameterNotFoundError): + with pytest.raises(ParameterNotFoundException): secrets_manager.get_addon_type("custom-name-postgres") @@ -199,7 +199,7 @@ def test_get_addon_type_with_invalid_type_throws_exception(mock_application): secrets_client = mock_application.environments[env].session.client("secretsmanager") secrets_manager = Secrets(ssm_client, secrets_client, mock_application.name, env) - with pytest.raises(InvalidAddonTypeError): + with pytest.raises(InvalidAddonTypeException): secrets_manager.get_addon_type("invalid-extension") @@ -213,7 +213,7 @@ def test_get_addon_type_with_blank_type_throws_exception(mock_application): secrets_client = mock_application.environments[env].session.client("secretsmanager") secrets_manager = Secrets(ssm_client, secrets_client, mock_application.name, env) - with pytest.raises(AddonTypeMissingFromConfigError): + with pytest.raises(AddonTypeMissingFromConfigException): secrets_manager.get_addon_type("blank-extension") @@ -227,7 +227,7 @@ def test_get_addon_type_with_unspecified_type_throws_exception(mock_application) secrets_client = mock_application.environments[env].session.client("secretsmanager") secrets_manager = Secrets(ssm_client, secrets_client, mock_application.name, env) - with pytest.raises(AddonTypeMissingFromConfigError): + with pytest.raises(AddonTypeMissingFromConfigException): secrets_manager.get_addon_type("addon-type-unspecified") diff --git a/tests/platform_helper/test_command_codebase.py b/tests/platform_helper/test_command_codebase.py index 894b746f7..33e9adf34 100644 --- a/tests/platform_helper/test_command_codebase.py +++ b/tests/platform_helper/test_command_codebase.py @@ -8,13 +8,12 @@ from dbt_platform_helper.commands.codebase import deploy from dbt_platform_helper.commands.codebase import list from dbt_platform_helper.commands.codebase import prepare as prepare_command -from dbt_platform_helper.exceptions import ApplicationEnvironmentNotFoundError -from dbt_platform_helper.exceptions import ApplicationNotFoundError -from dbt_platform_helper.exceptions import CopilotCodebaseNotFoundError -from dbt_platform_helper.exceptions import ImageNotFoundError -from dbt_platform_helper.exceptions import NoCopilotCodebasesFoundError -from dbt_platform_helper.exceptions import NotInCodeBaseRepositoryError -from dbt_platform_helper.utils.git import CommitNotFoundError +from dbt_platform_helper.domain.codebase import ApplicationEnvironmentNotFoundException +from dbt_platform_helper.domain.codebase import NotInCodeBaseRepositoryException +from dbt_platform_helper.providers.aws import CopilotCodebaseNotFoundException +from dbt_platform_helper.providers.aws import ImageNotFoundException +from dbt_platform_helper.utils.application import ApplicationNotFoundException +from dbt_platform_helper.utils.git import CommitNotFoundException def mock_aws_client(get_aws_session_or_abort): @@ -40,7 +39,7 @@ def test_codebase_prepare_calls_codebase_prepare_method(self, mock_codebase_obje @patch("click.secho") def test_aborts_when_not_in_a_codebase_repository(self, mock_click, mock_codebase_object): mock_codebase_object_instance = mock_codebase_object.return_value - mock_codebase_object_instance.prepare.side_effect = NotInCodeBaseRepositoryError + mock_codebase_object_instance.prepare.side_effect = NotInCodeBaseRepositoryException os.environ["AWS_PROFILE"] = "foo" result = CliRunner().invoke(prepare_command) @@ -56,7 +55,7 @@ def test_codebase_build_does_not_trigger_build_without_an_application( ): mock_codebase_object_instance = mock_codebase_object.return_value - mock_codebase_object_instance.build.side_effect = ApplicationNotFoundError + mock_codebase_object_instance.build.side_effect = ApplicationNotFoundException os.environ["AWS_PROFILE"] = "foo" result = CliRunner().invoke( @@ -79,7 +78,7 @@ def test_codebase_build_aborts_with_a_nonexistent_commit_hash( self, mock_click, mock_codebase_object ): mock_codebase_object_instance = mock_codebase_object.return_value - mock_codebase_object_instance.build.side_effect = CommitNotFoundError() + mock_codebase_object_instance.build.side_effect = CommitNotFoundException() os.environ["AWS_PROFILE"] = "foo" result = CliRunner().invoke( @@ -133,7 +132,7 @@ def test_codebase_deploy_aborts_with_a_nonexistent_image_repository_or_image_tag self, mock_click, codebase_object_mock ): mock_codebase_object_instance = codebase_object_mock.return_value - mock_codebase_object_instance.deploy.side_effect = ImageNotFoundError + mock_codebase_object_instance.deploy.side_effect = ImageNotFoundException result = CliRunner().invoke( deploy, [ @@ -159,7 +158,7 @@ def test_codebase_deploy_does_not_trigger_build_without_an_application( self, mock_click, mock_codebase_object ): mock_codebase_object_instance = mock_codebase_object.return_value - mock_codebase_object_instance.deploy.side_effect = ApplicationNotFoundError + mock_codebase_object_instance.deploy.side_effect = ApplicationNotFoundException os.environ["AWS_PROFILE"] = "foo" result = CliRunner().invoke( @@ -187,7 +186,7 @@ def test_codebase_deploy_does_not_trigger_build_with_missing_environment( self, mock_click, mock_codebase_object ): mock_codebase_object_instance = mock_codebase_object.return_value - mock_codebase_object_instance.deploy.side_effect = ApplicationEnvironmentNotFoundError + mock_codebase_object_instance.deploy.side_effect = ApplicationEnvironmentNotFoundException os.environ["AWS_PROFILE"] = "foo" result = CliRunner().invoke( @@ -215,7 +214,7 @@ def test_codebase_deploy_does_not_trigger_build_with_missing_codebase( self, mock_click, mock_codebase_object ): mock_codebase_object_instance = mock_codebase_object.return_value - mock_codebase_object_instance.deploy.side_effect = CopilotCodebaseNotFoundError + mock_codebase_object_instance.deploy.side_effect = CopilotCodebaseNotFoundException os.environ["AWS_PROFILE"] = "foo" result = CliRunner().invoke( @@ -249,22 +248,11 @@ def test_lists_codebases_successfully(self, mock_codebase_object): mock_codebase_object_instance.list.assert_called_once_with("test-application", True) assert result.exit_code == 0 - @patch("dbt_platform_helper.commands.codebase.Codebase") - @patch("click.secho") - def test_list_aborts_when_application_has_no_codebases(self, mock_click, mock_codebase_object): - mock_codebase_object_instance = mock_codebase_object.return_value - mock_codebase_object_instance.list.side_effect = NoCopilotCodebasesFoundError - os.environ["AWS_PROFILE"] = "foo" - - result = CliRunner().invoke(list, ["--app", "test-application", "--with-images"]) - - assert result.exit_code == 1 - @patch("dbt_platform_helper.commands.codebase.Codebase") @patch("click.secho") def test_aborts_when_application_does_not_exist(self, mock_click, mock_codebase_object): mock_codebase_object_instance = mock_codebase_object.return_value - mock_codebase_object_instance.list.side_effect = ApplicationNotFoundError + mock_codebase_object_instance.list.side_effect = ApplicationNotFoundException os.environ["AWS_PROFILE"] = "foo" result = CliRunner().invoke(list, ["--app", "test-application", "--with-images"]) diff --git a/tests/platform_helper/test_command_conduit.py b/tests/platform_helper/test_command_conduit.py index 880486335..62c4d512c 100644 --- a/tests/platform_helper/test_command_conduit.py +++ b/tests/platform_helper/test_command_conduit.py @@ -5,7 +5,7 @@ from click.testing import CliRunner from dbt_platform_helper.commands.conduit import conduit -from dbt_platform_helper.exceptions import SecretNotFoundError +from dbt_platform_helper.providers.secrets import SecretNotFoundException @pytest.mark.parametrize( @@ -61,7 +61,7 @@ def test_start_conduit_with_exception_raised_exit_1( ): mock_conduit_instance = mock_conduit_object.return_value - mock_conduit_instance.start.side_effect = SecretNotFoundError(secret_name="test-secret") + mock_conduit_instance.start.side_effect = SecretNotFoundException(secret_name="test-secret") addon_name = "important-db" result = CliRunner().invoke( conduit, diff --git a/tests/platform_helper/test_command_environment.py b/tests/platform_helper/test_command_environment.py index 2715df834..c8992112d 100644 --- a/tests/platform_helper/test_command_environment.py +++ b/tests/platform_helper/test_command_environment.py @@ -11,7 +11,7 @@ from click.testing import CliRunner from moto import mock_aws -from dbt_platform_helper.commands.environment import CertificateNotFoundError +from dbt_platform_helper.commands.environment import CertificateNotFoundException from dbt_platform_helper.commands.environment import find_https_certificate from dbt_platform_helper.commands.environment import generate from dbt_platform_helper.commands.environment import generate_terraform @@ -21,8 +21,8 @@ from dbt_platform_helper.commands.environment import offline from dbt_platform_helper.commands.environment import online from dbt_platform_helper.constants import PLATFORM_CONFIG_FILE -from dbt_platform_helper.providers.load_balancers import ListenerNotFoundError -from dbt_platform_helper.providers.load_balancers import LoadBalancerNotFoundError +from dbt_platform_helper.providers.load_balancers import ListenerNotFoundException +from dbt_platform_helper.providers.load_balancers import LoadBalancerNotFoundException from dbt_platform_helper.utils.application import Service from tests.platform_helper.conftest import BASE_DIR @@ -200,7 +200,7 @@ def test_offline_an_environment_when_load_balancer_not_found( load_application, mock_application, ): - find_https_listener.side_effect = LoadBalancerNotFoundError() + find_https_listener.side_effect = LoadBalancerNotFoundException() load_application.return_value = mock_application result = CliRunner().invoke( @@ -232,7 +232,7 @@ def test_offline_an_environment_when_listener_not_found( mock_application, ): load_application.return_value = mock_application - find_https_listener.side_effect = ListenerNotFoundError() + find_https_listener.side_effect = ListenerNotFoundException() result = CliRunner().invoke( offline, ["--app", "test-application", "--env", "development"], input="y\n" @@ -380,7 +380,7 @@ def test_online_an_environment_when_listener_not_found( mock_application, ): load_application.return_value = mock_application - find_https_listener.side_effect = ListenerNotFoundError() + find_https_listener.side_effect = ListenerNotFoundException() result = CliRunner().invoke( online, ["--app", "test-application", "--env", "development"], input="y\n" @@ -411,7 +411,7 @@ def test_online_an_environment_when_load_balancer_not_found( from dbt_platform_helper.commands.environment import online load_application.return_value = mock_application - find_https_listener.side_effect = LoadBalancerNotFoundError() + find_https_listener.side_effect = LoadBalancerNotFoundException() result = CliRunner().invoke( online, ["--app", "test-application", "--env", "development"], input="y\n" @@ -798,7 +798,7 @@ def test_when_no_certificate_present(self, mock_find_https_listener): boto_mock = MagicMock() boto_mock.client().describe_listener_certificates.return_value = {"Certificates": []} - with pytest.raises(CertificateNotFoundError): + with pytest.raises(CertificateNotFoundException): find_https_certificate(boto_mock, "test-application", "development") @patch( diff --git a/tests/platform_helper/test_exceptions.py b/tests/platform_helper/test_exceptions.py index 7c7d7a8d3..7145fb42c 100644 --- a/tests/platform_helper/test_exceptions.py +++ b/tests/platform_helper/test_exceptions.py @@ -2,33 +2,33 @@ import pytest -from dbt_platform_helper.exceptions import AddonNotFoundError -from dbt_platform_helper.exceptions import AddonTypeMissingFromConfigError -from dbt_platform_helper.exceptions import ApplicationDeploymentNotTriggered -from dbt_platform_helper.exceptions import ApplicationEnvironmentNotFoundError -from dbt_platform_helper.exceptions import ApplicationNotFoundError -from dbt_platform_helper.exceptions import CopilotCodebaseNotFoundError -from dbt_platform_helper.exceptions import CreateTaskTimeoutError -from dbt_platform_helper.exceptions import ECSAgentNotRunning -from dbt_platform_helper.exceptions import ImageNotFoundError -from dbt_platform_helper.exceptions import InvalidAddonTypeError -from dbt_platform_helper.exceptions import NoClusterError -from dbt_platform_helper.exceptions import NoCopilotCodebasesFoundError -from dbt_platform_helper.exceptions import NotInCodeBaseRepositoryError -from dbt_platform_helper.exceptions import ParameterNotFoundError -from dbt_platform_helper.exceptions import SecretNotFoundError +from dbt_platform_helper.domain.codebase import ApplicationDeploymentNotTriggered +from dbt_platform_helper.domain.codebase import ApplicationEnvironmentNotFoundException +from dbt_platform_helper.domain.codebase import NotInCodeBaseRepositoryException +from dbt_platform_helper.providers.aws import CopilotCodebaseNotFoundException +from dbt_platform_helper.providers.aws import CreateTaskTimeoutException +from dbt_platform_helper.providers.aws import ImageNotFoundException +from dbt_platform_helper.providers.aws import LogGroupNotFoundException +from dbt_platform_helper.providers.ecs import ECSAgentNotRunningException +from dbt_platform_helper.providers.ecs import NoClusterException +from dbt_platform_helper.providers.secrets import AddonNotFoundException +from dbt_platform_helper.providers.secrets import AddonTypeMissingFromConfigException +from dbt_platform_helper.providers.secrets import InvalidAddonTypeException +from dbt_platform_helper.providers.secrets import ParameterNotFoundException +from dbt_platform_helper.providers.secrets import SecretNotFoundException +from dbt_platform_helper.utils.application import ApplicationNotFoundException @pytest.mark.parametrize( "exception, exception_params, expected_message", [ ( - AddonNotFoundError, + AddonNotFoundException, {"addon_name": "test-addon"}, """Addon "test-addon" does not exist.""", ), ( - AddonTypeMissingFromConfigError, + AddonTypeMissingFromConfigException, {"addon_name": "test-addon"}, """The configuration for the addon test-addon, is misconfigured and missing the addon type.""", ), @@ -38,22 +38,22 @@ """Your deployment for test-codebase was not triggered.""", ), ( - ApplicationEnvironmentNotFoundError, + ApplicationEnvironmentNotFoundException, {"environment": "development"}, """The environment "development" either does not exist or has not been deployed.""", ), ( - ApplicationNotFoundError, + ApplicationNotFoundException, {"application_name": "test-application"}, """The account "foo" does not contain the application "test-application"; ensure you have set the environment variable "AWS_PROFILE" correctly.""", ), ( - CopilotCodebaseNotFoundError, + CopilotCodebaseNotFoundException, {"codebase": "test-codebase-exists"}, """The codebase "test-codebase-exists" either does not exist or has not been deployed.""", ), ( - CreateTaskTimeoutError, + CreateTaskTimeoutException, { "addon_name": "test-addon", "application_name": "test-application", @@ -62,42 +62,42 @@ """Client (test-addon) ECS task has failed to start for "test-application" in "environment" environment.""", ), ( - InvalidAddonTypeError, + InvalidAddonTypeException, {"addon_type": "test-addon-type"}, """Addon type "test-addon-type" is not supported, we support: opensearch, postgres, redis.""", ), ( - ImageNotFoundError, + ImageNotFoundException, {"commit": "test-commit-hash"}, """The commit hash "test-commit-hash" has not been built into an image, try the `platform-helper codebase build` command first.""", ), ( - NoCopilotCodebasesFoundError, - {"application_name": "test-application"}, - """No codebases found for application "test-application".""", + LogGroupNotFoundException, + {"log_group_name": "test-log-group"}, + """No log group called "test-log-group".""", ), ( - NoClusterError, + NoClusterException, {"application_name": "test-application", "environment": "environment"}, """No ECS cluster found for "test-application" in "environment" environment.""", ), ( - NotInCodeBaseRepositoryError, + NotInCodeBaseRepositoryException, {}, """You are in the deploy repository; make sure you are in the application codebase repository.""", ), ( - ParameterNotFoundError, + ParameterNotFoundException, {"application_name": "test-application", "environment": "environment"}, """No parameter called "/copilot/applications/test-application/environments/environment/addons". Try deploying the "test-application" "environment" environment.""", ), ( - SecretNotFoundError, + SecretNotFoundException, {"secret_name": "test-secret"}, """No secret called "test-secret".""", ), ( - ECSAgentNotRunning, + ECSAgentNotRunningException, {}, """ECS exec agent never reached "RUNNING" status""", ), diff --git a/tests/platform_helper/utils/test_application.py b/tests/platform_helper/utils/test_application.py index a3bce6d5a..b7d26c3c0 100644 --- a/tests/platform_helper/utils/test_application.py +++ b/tests/platform_helper/utils/test_application.py @@ -7,8 +7,8 @@ import boto3 from moto import mock_aws -from dbt_platform_helper.exceptions import ApplicationNotFoundError from dbt_platform_helper.utils.application import Application +from dbt_platform_helper.utils.application import ApplicationNotFoundException from dbt_platform_helper.utils.application import Environment from dbt_platform_helper.utils.application import get_application_name from dbt_platform_helper.utils.application import load_application @@ -253,4 +253,4 @@ def test_loading_an_application_in_a_different_account( Type="String", ) - self.assertRaises(ApplicationNotFoundError, load_application, "sample") + self.assertRaises(ApplicationNotFoundException, load_application, "sample") diff --git a/tests/platform_helper/utils/test_arn_parser.py b/tests/platform_helper/utils/test_arn_parser.py index b26d44afe..4071127b0 100644 --- a/tests/platform_helper/utils/test_arn_parser.py +++ b/tests/platform_helper/utils/test_arn_parser.py @@ -1,7 +1,7 @@ import pytest from parameterized import parameterized -from dbt_platform_helper.exceptions import ValidationException +from dbt_platform_helper.providers.validation import ValidationException from dbt_platform_helper.utils.arn_parser import ARN diff --git a/tests/platform_helper/utils/test_aws.py b/tests/platform_helper/utils/test_aws.py index 9c0b8d239..bc92aa7b6 100644 --- a/tests/platform_helper/utils/test_aws.py +++ b/tests/platform_helper/utils/test_aws.py @@ -10,11 +10,11 @@ import pytest from moto import mock_aws -from dbt_platform_helper.exceptions import AWSException -from dbt_platform_helper.exceptions import CopilotCodebaseNotFoundError -from dbt_platform_helper.exceptions import ResourceNotFoundException -from dbt_platform_helper.exceptions import ValidationException -from dbt_platform_helper.utils.aws import NoProfileForAccountIdError +from dbt_platform_helper.providers.aws import AWSException +from dbt_platform_helper.providers.aws import CopilotCodebaseNotFoundException +from dbt_platform_helper.providers.aws import LogGroupNotFoundException +from dbt_platform_helper.providers.validation import ValidationException +from dbt_platform_helper.utils.aws import NoProfileForAccountIdException from dbt_platform_helper.utils.aws import Vpc from dbt_platform_helper.utils.aws import check_codebase_exists from dbt_platform_helper.utils.aws import get_account_details @@ -462,7 +462,7 @@ def test_check_codebase_does_not_exist(mock_application): """, ) - with pytest.raises(CopilotCodebaseNotFoundError): + with pytest.raises(CopilotCodebaseNotFoundException): check_codebase_exists( mock_application.environments["development"].session, mock_application, @@ -480,7 +480,7 @@ def test_check_codebase_errors_when_json_is_malformed(mock_application): Value="not valid JSON", ) - with pytest.raises(CopilotCodebaseNotFoundError): + with pytest.raises(CopilotCodebaseNotFoundException): check_codebase_exists( mock_application.environments["development"].session, mock_application, "application" ) @@ -532,7 +532,7 @@ def test_get_profile_name_from_account_id_when_not_using_sso(fs): def test_get_profile_name_from_account_id_with_no_matching_account(fakefs): - with pytest.raises(NoProfileForAccountIdError) as error: + with pytest.raises(NoProfileForAccountIdException) as error: get_profile_name_from_account_id("999999999") assert str(error.value) == "No profile found for account 999999999" @@ -1012,5 +1012,5 @@ def test_wait_for_log_group_to_exist_fails_when_log_group_not_found(): mock_client = Mock() mock_client.describe_log_groups.return_value = {"logGroups": [{"logGroupName": log_group_name}]} - with pytest.raises(ResourceNotFoundException): + with pytest.raises(LogGroupNotFoundException, match=f'No log group called "not_found"'): wait_for_log_group_to_exist(mock_client, "not_found", 1) diff --git a/tests/platform_helper/utils/test_versioning.py b/tests/platform_helper/utils/test_versioning.py index f5fa14bc2..195c6320e 100644 --- a/tests/platform_helper/utils/test_versioning.py +++ b/tests/platform_helper/utils/test_versioning.py @@ -10,9 +10,9 @@ from dbt_platform_helper.constants import PLATFORM_CONFIG_FILE from dbt_platform_helper.constants import PLATFORM_HELPER_VERSION_FILE -from dbt_platform_helper.exceptions import IncompatibleMajorVersion -from dbt_platform_helper.exceptions import IncompatibleMinorVersion -from dbt_platform_helper.exceptions import ValidationException +from dbt_platform_helper.providers.validation import IncompatibleMajorVersionException +from dbt_platform_helper.providers.validation import IncompatibleMinorVersionException +from dbt_platform_helper.providers.validation import ValidationException from dbt_platform_helper.utils.versioning import PlatformHelperVersions from dbt_platform_helper.utils.versioning import check_platform_helper_version_mismatch from dbt_platform_helper.utils.versioning import ( @@ -87,10 +87,10 @@ def test_get_github_version_from_tags(request_get): @pytest.mark.parametrize( "version_check", [ - ((1, 40, 0), (1, 30, 0), IncompatibleMinorVersion), - ((1, 40, 0), (2, 1, 0), IncompatibleMajorVersion), - ((0, 2, 40), (0, 1, 30), IncompatibleMajorVersion), - ((0, 1, 40), (0, 1, 30), IncompatibleMajorVersion), + ((1, 40, 0), (1, 30, 0), IncompatibleMinorVersionException), + ((1, 40, 0), (2, 1, 0), IncompatibleMajorVersionException), + ((0, 2, 40), (0, 1, 30), IncompatibleMajorVersionException), + ((0, 1, 40), (0, 1, 30), IncompatibleMajorVersionException), ], ) def test_validate_version_compatability( @@ -109,10 +109,10 @@ def test_validate_version_compatability( @pytest.mark.parametrize( "template_check", [ - ("addon_newer_major_version.yml", IncompatibleMajorVersion, ""), - ("addon_newer_minor_version.yml", IncompatibleMinorVersion, ""), - ("addon_older_major_version.yml", IncompatibleMajorVersion, ""), - ("addon_older_minor_version.yml", IncompatibleMinorVersion, ""), + ("addon_newer_major_version.yml", IncompatibleMajorVersionException, ""), + ("addon_newer_minor_version.yml", IncompatibleMinorVersionException, ""), + ("addon_older_major_version.yml", IncompatibleMajorVersionException, ""), + ("addon_older_minor_version.yml", IncompatibleMinorVersionException, ""), ("addon_no_version.yml", ValidationException, "Template %s has no version information"), ], ) @@ -130,9 +130,9 @@ def test_validate_template_version(template_check: Tuple[str, Type[BaseException @pytest.mark.parametrize( "expected_exception", [ - IncompatibleMajorVersion, - IncompatibleMinorVersion, - IncompatibleMinorVersion, + IncompatibleMajorVersionException, + IncompatibleMinorVersionException, + IncompatibleMinorVersionException, ], ) @patch("click.secho") @@ -152,14 +152,14 @@ def test_check_platform_helper_version_needs_update( mock_get_platform_helper_versions.assert_called_with(include_project_versions=False) - if expected_exception == IncompatibleMajorVersion: + if expected_exception == IncompatibleMajorVersionException: secho.assert_called_with( "You are running platform-helper v1.0.0, upgrade to v1.0.0 by running run `pip install " "--upgrade dbt-platform-helper`.", fg="red", ) - if expected_exception == IncompatibleMinorVersion: + if expected_exception == IncompatibleMinorVersionException: secho.assert_called_with( "You are running platform-helper v1.0.0, upgrade to v1.0.0 by running run `pip install " "--upgrade dbt-platform-helper`.", From d06ddcc0253a76950f54b881af84be14b0981b66 Mon Sep 17 00:00:00 2001 From: John Stainsby Date: Fri, 6 Dec 2024 14:43:08 +0000 Subject: [PATCH 29/38] fix: DBTP-1498 - Add option for database dump filename (#681) --- dbt_platform_helper/COMMANDS.md | 7 ++- dbt_platform_helper/commands/database.py | 18 ++++-- dbt_platform_helper/domain/database_copy.py | 21 +++---- images/tools/database-copy/entrypoint.sh | 12 ++-- .../domain/test_database_copy.py | 22 +++---- .../platform_helper/test_command_database.py | 57 ++++++++++++++++++- 6 files changed, 103 insertions(+), 34 deletions(-) diff --git a/dbt_platform_helper/COMMANDS.md b/dbt_platform_helper/COMMANDS.md index fd2330c87..ed754c16f 100644 --- a/dbt_platform_helper/COMMANDS.md +++ b/dbt_platform_helper/COMMANDS.md @@ -765,6 +765,7 @@ platform-helper database (dump|load|copy) ``` platform-helper database dump --from --database [--app ] [--from-vpc ] + [--filename ] ``` ## Options @@ -777,6 +778,8 @@ platform-helper database dump --from --database - The name of the database you are dumping data from - `--from-vpc ` - The vpc the specified environment is running in. Required unless you are running the command from your deploy repo +- `--filename ` + - Specify a name for the database dump file. Recommended if the same dump database is being used for multiple load environments - `--help ` _Defaults to False._ - Show this message and exit. @@ -791,7 +794,7 @@ platform-helper database dump --from --database ``` platform-helper database load --to --database [--app ] [--to-vpc ] - [--auto-approve] + [--filename ] [--auto-approve] ``` ## Options @@ -806,6 +809,8 @@ platform-helper database load --to --database - The vpc the specified environment is running in. Required unless you are running the command from your deploy repo - `--auto-approve ` _Defaults to False._ +- `--filename ` + - Specify a name for the database dump file. Recommended if the same dump database is being used for multiple load environments - `--help ` _Defaults to False._ - Show this message and exit. diff --git a/dbt_platform_helper/commands/database.py b/dbt_platform_helper/commands/database.py index 64aaaf905..acb1a1520 100644 --- a/dbt_platform_helper/commands/database.py +++ b/dbt_platform_helper/commands/database.py @@ -31,10 +31,15 @@ def database(): type=str, help="The vpc the specified environment is running in. Required unless you are running the command from your deploy repo", ) -def dump(app, from_env, database, from_vpc): +@click.option( + "--filename", + type=str, + help="Specify a name for the database dump file. Recommended if the same dump database is being used for multiple load environments", +) +def dump(app, from_env, database, from_vpc, filename): """Dump a database into an S3 bucket.""" data_copy = DatabaseCopy(app, database) - data_copy.dump(from_env, from_vpc) + data_copy.dump(from_env, from_vpc, filename) # Todo: Catch expected errors and output message @@ -56,10 +61,15 @@ def dump(app, from_env, database, from_vpc): help="The vpc the specified environment is running in. Required unless you are running the command from your deploy repo", ) @click.option("--auto-approve/--no-auto-approve", default=False) -def load(app, to_env, database, to_vpc, auto_approve): +@click.option( + "--filename", + type=str, + help="Specify a name for the database dump file. Recommended if the same dump database is being used for multiple load environments", +) +def load(app, to_env, database, to_vpc, auto_approve, filename): """Load a database from an S3 bucket.""" data_copy = DatabaseCopy(app, database, auto_approve) - data_copy.load(to_env, to_vpc) + data_copy.load(to_env, to_vpc, filename) # Todo: Catch expected errors and output message diff --git a/dbt_platform_helper/domain/database_copy.py b/dbt_platform_helper/domain/database_copy.py index 5a6bf4a43..46f019af7 100644 --- a/dbt_platform_helper/domain/database_copy.py +++ b/dbt_platform_helper/domain/database_copy.py @@ -61,7 +61,7 @@ def __init__( except ApplicationNotFoundException: abort(f"No such application '{app}'.") - def _execute_operation(self, is_dump: bool, env: str, vpc_name: str, to_env: str): + def _execute_operation(self, is_dump: bool, env: str, vpc_name: str, filename: str): vpc_name = self.enrich_vpc_name(env, vpc_name) environments = self.application.environments @@ -89,7 +89,7 @@ def _execute_operation(self, is_dump: bool, env: str, vpc_name: str, to_env: str try: task_arn = self.run_database_copy_task( - env_session, env, vpc_config, is_dump, db_connection_string, to_env + env_session, env, vpc_config, is_dump, db_connection_string, filename ) except Exception as exc: self.abort(f"{exc} (Account id: {self.account_id(env)})") @@ -122,14 +122,15 @@ def run_database_copy_task( vpc_config: Vpc, is_dump: bool, db_connection_string: str, - to_env: str, + filename: str, ) -> str: client = session.client("ecs") action = "dump" if is_dump else "load" + dump_file_name = filename if filename else "data_dump" env_vars = [ {"name": "DATA_COPY_OPERATION", "value": action.upper()}, {"name": "DB_CONNECTION_STRING", "value": db_connection_string}, - {"name": "TO_ENVIRONMENT", "value": to_env}, + {"name": "DUMP_FILE_NAME", "value": dump_file_name}, ] if not is_dump: env_vars.append({"name": "ECS_CLUSTER", "value": f"{self.app}-{env}"}) @@ -159,12 +160,12 @@ def run_database_copy_task( return response.get("tasks", [{}])[0].get("taskArn") - def dump(self, env: str, vpc_name: str, to_env: str): - self._execute_operation(True, env, vpc_name, to_env) + def dump(self, env: str, vpc_name: str, filename: str = None): + self._execute_operation(True, env, vpc_name, filename) - def load(self, env: str, vpc_name: str): + def load(self, env: str, vpc_name: str, filename: str = None): if self.is_confirmed_ready_to_load(env): - self._execute_operation(False, env, vpc_name, to_env=env) + self._execute_operation(False, env, vpc_name, filename) def copy( self, @@ -179,8 +180,8 @@ def copy( to_vpc = self.enrich_vpc_name(to_env, to_vpc) if not no_maintenance_page: self.maintenance_page_provider.activate(self.app, to_env, services, template, to_vpc) - self.dump(from_env, from_vpc, to_env) - self.load(to_env, to_vpc) + self.dump(from_env, from_vpc, f"data_dump_{to_env}") + self.load(to_env, to_vpc, f"data_dump_{to_env}") if not no_maintenance_page: self.maintenance_page_provider.deactivate(self.app, to_env) diff --git a/images/tools/database-copy/entrypoint.sh b/images/tools/database-copy/entrypoint.sh index 4dd56c8a3..487fa3479 100644 --- a/images/tools/database-copy/entrypoint.sh +++ b/images/tools/database-copy/entrypoint.sh @@ -2,9 +2,9 @@ clean_up(){ echo "Cleaning up dump file" - rm "data_dump_${TO_ENVIRONMENT}.sql" + rm "${DUMP_FILE_NAME}.sql" echo "Removing dump file from S3" - aws s3 rm s3://${S3_BUCKET_NAME}/"data_dump_${TO_ENVIRONMENT}.sql" + aws s3 rm s3://${S3_BUCKET_NAME}/"${DUMP_FILE_NAME}.sql" exit_code=$? if [ ${exit_code} -ne 0 ] then @@ -27,7 +27,7 @@ handle_errors(){ if [ "${DATA_COPY_OPERATION:-DUMP}" != "LOAD" ] then echo "Starting data dump" - pg_dump --no-owner --no-acl --format c "${DB_CONNECTION_STRING}" > "data_dump_${TO_ENVIRONMENT}.sql" + pg_dump --no-owner --no-acl --format c "${DB_CONNECTION_STRING}" > "${DUMP_FILE_NAME}.sql" exit_code=$? if [ ${exit_code} -ne 0 ] @@ -36,7 +36,7 @@ then exit $exit_code fi - aws s3 cp "data_dump_${TO_ENVIRONMENT}.sql" s3://${S3_BUCKET_NAME}/ + aws s3 cp "${DUMP_FILE_NAME}.sql" s3://${S3_BUCKET_NAME}/ exit_code=$? if [ ${exit_code} -ne 0 ] @@ -50,7 +50,7 @@ else echo "Starting data load" echo "Copying data dump from S3" - aws s3 cp s3://${S3_BUCKET_NAME}/"data_dump_${TO_ENVIRONMENT}.sql" "data_dump_${TO_ENVIRONMENT}.sql" + aws s3 cp s3://${S3_BUCKET_NAME}/"${DUMP_FILE_NAME}.sql" "${DUMP_FILE_NAME}.sql" handle_errors $? "Copy failed" @@ -82,7 +82,7 @@ else handle_errors $? "Clear down failed" echo "Restoring data from dump file" - pg_restore --format c --dbname "${DB_CONNECTION_STRING}" "data_dump_${TO_ENVIRONMENT}.sql" + pg_restore --format c --dbname "${DB_CONNECTION_STRING}" "${DUMP_FILE_NAME}.sql" handle_errors $? "Restore failed" for service in ${SERVICES} diff --git a/tests/platform_helper/domain/test_database_copy.py b/tests/platform_helper/domain/test_database_copy.py index f3c6749d1..83ec35097 100644 --- a/tests/platform_helper/domain/test_database_copy.py +++ b/tests/platform_helper/domain/test_database_copy.py @@ -58,7 +58,7 @@ def test_run_database_copy_task(is_dump, exp_operation): mock_client.run_task.return_value = {"tasks": [{"taskArn": "arn:aws:ecs:test-task-arn"}]} actual_task_arn = db_copy.run_database_copy_task( - mock_session, "test-env", vpc, is_dump, db_connection_string, "test-env" + mock_session, "test-env", vpc, is_dump, db_connection_string, "test-dump-file" ) assert actual_task_arn == "arn:aws:ecs:test-task-arn" @@ -67,7 +67,7 @@ def test_run_database_copy_task(is_dump, exp_operation): expected_env_vars = [ {"name": "DATA_COPY_OPERATION", "value": exp_operation.upper()}, {"name": "DB_CONNECTION_STRING", "value": "connection_string"}, - {"name": "TO_ENVIRONMENT", "value": "test-env"}, + {"name": "DUMP_FILE_NAME", "value": "test-dump-file"}, ] if not is_dump: expected_env_vars.append( @@ -117,7 +117,7 @@ def test_database_dump(): db_copy.enrich_vpc_name = Mock() db_copy.enrich_vpc_name.return_value = "test-vpc-override" - db_copy.dump(env, vpc_name, "test-env") + db_copy.dump(env, vpc_name) mocks.load_application.assert_called_once() mocks.vpc_config.assert_called_once_with( @@ -127,7 +127,7 @@ def test_database_dump(): mocks.environment.session, app, env, "test-app-test-env-test-db" ) mock_run_database_copy_task.assert_called_once_with( - mocks.environment.session, env, mocks.vpc, True, "test-db-connection-string", "test-env" + mocks.environment.session, env, mocks.vpc, True, "test-db-connection-string", None ) mocks.input.assert_not_called() mocks.echo.assert_has_calls( @@ -170,7 +170,7 @@ def test_database_load_with_response_of_yes(): ) mock_run_database_copy_task.assert_called_once_with( - mocks.environment.session, env, mocks.vpc, False, "test-db-connection-string", "test-env" + mocks.environment.session, env, mocks.vpc, False, "test-db-connection-string", None ) mocks.input.assert_called_once_with( @@ -247,7 +247,7 @@ def test_database_dump_handles_db_name_errors(is_dump): with pytest.raises(SystemExit) as exc: if is_dump: - db_copy.dump("test-env", "vpc-name", "test-env") + db_copy.dump("test-env", "vpc-name") else: db_copy.load("test-env", "vpc-name") @@ -263,7 +263,7 @@ def test_database_dump_handles_env_name_errors(is_dump): with pytest.raises(SystemExit) as exc: if is_dump: - db_copy.dump("bad-env", "vpc-name", "test-env") + db_copy.dump("bad-env", "vpc-name") else: db_copy.load("bad-env", "vpc-name") @@ -284,7 +284,7 @@ def test_database_dump_handles_account_id_errors(is_dump): with pytest.raises(SystemExit) as exc: if is_dump: - db_copy.dump("test-env", "vpc-name", "test-env") + db_copy.dump("test-env", "vpc-name") else: db_copy.load("test-env", "vpc-name") @@ -363,8 +363,10 @@ def test_copy_command(services, template): mocks.maintenance_page_provider.activate.assert_called_once_with( "test-app", "test-to-env", services, template, "test-vpc-override" ) - db_copy.dump.assert_called_once_with("test-from-env", "test-from-vpc", "test-to-env") - db_copy.load.assert_called_once_with("test-to-env", "test-vpc-override") + db_copy.dump.assert_called_once_with("test-from-env", "test-from-vpc", "data_dump_test-to-env") + db_copy.load.assert_called_once_with( + "test-to-env", "test-vpc-override", "data_dump_test-to-env" + ) mocks.maintenance_page_provider.deactivate.assert_called_once_with("test-app", "test-to-env") diff --git a/tests/platform_helper/test_command_database.py b/tests/platform_helper/test_command_database.py index 7a0428563..4969a0537 100644 --- a/tests/platform_helper/test_command_database.py +++ b/tests/platform_helper/test_command_database.py @@ -28,7 +28,33 @@ def test_command_dump_success(mock_database_copy_object): assert result.exit_code == 0 mock_database_copy_object.assert_called_once_with("my_app", "my_postgres") - mock_database_copy_instance.dump.assert_called_once_with("my_env", "my_vpc") + mock_database_copy_instance.dump.assert_called_once_with("my_env", "my_vpc", None) + + +@patch("dbt_platform_helper.commands.database.DatabaseCopy") +def test_command_dump_success_with_filename(mock_database_copy_object): + mock_database_copy_instance = mock_database_copy_object.return_value + + runner = CliRunner() + result = runner.invoke( + dump, + [ + "--app", + "my_app", + "--from", + "my_env", + "--database", + "my_postgres", + "--from-vpc", + "my_vpc", + "--filename", + "my_dump_file", + ], + ) + + assert result.exit_code == 0 + mock_database_copy_object.assert_called_once_with("my_app", "my_postgres") + mock_database_copy_instance.dump.assert_called_once_with("my_env", "my_vpc", "my_dump_file") @patch("dbt_platform_helper.commands.database.DatabaseCopy") @@ -51,7 +77,7 @@ def test_command_load_success(mock_database_copy_object): assert result.exit_code == 0 mock_database_copy_object.assert_called_once_with("my_app", "my_postgres", False) - mock_database_copy_instance.load.assert_called_once_with("my_env", "my_vpc") + mock_database_copy_instance.load.assert_called_once_with("my_env", "my_vpc", None) @patch("dbt_platform_helper.commands.database.DatabaseCopy") @@ -75,7 +101,32 @@ def test_command_load_success_with_auto_approve(mock_database_copy_object): assert result.exit_code == 0 mock_database_copy_object.assert_called_once_with("my_app", "my_postgres", True) - mock_database_copy_instance.load.assert_called_once_with("my_env", "my_vpc") + mock_database_copy_instance.load.assert_called_once_with("my_env", "my_vpc", None) + + +@patch("dbt_platform_helper.commands.database.DatabaseCopy") +def test_command_load_success_with_filename(mock_database_copy_object): + mock_database_copy_instance = mock_database_copy_object.return_value + runner = CliRunner() + result = runner.invoke( + load, + [ + "--app", + "my_app", + "--to", + "my_env", + "--database", + "my_postgres", + "--to-vpc", + "my_vpc", + "--filename", + "my_dump_file", + ], + ) + + assert result.exit_code == 0 + mock_database_copy_object.assert_called_once_with("my_app", "my_postgres", False) + mock_database_copy_instance.load.assert_called_once_with("my_env", "my_vpc", "my_dump_file") @patch("dbt_platform_helper.commands.database.DatabaseCopy") From cda1e7bc9daa1732e9032c7d6566716e3151b961 Mon Sep 17 00:00:00 2001 From: John Stainsby Date: Fri, 6 Dec 2024 15:21:46 +0000 Subject: [PATCH 30/38] fix: DBTP-1498 - Add validation for database copy pipeline (#683) --- dbt_platform_helper/utils/validation.py | 1 + .../utils/fixtures/addons_files/postgres_addons.yml | 10 ++++++++++ .../addons_files/postgres_addons_bad_data.yml | 13 ++++++++++++- tests/platform_helper/utils/test_validation.py | 12 ++++++++++++ 4 files changed, 35 insertions(+), 1 deletion(-) diff --git a/dbt_platform_helper/utils/validation.py b/dbt_platform_helper/utils/validation.py index 7a812b7a9..cc61b48d3 100644 --- a/dbt_platform_helper/utils/validation.py +++ b/dbt_platform_helper/utils/validation.py @@ -215,6 +215,7 @@ def is_between(value): "to": ENV_NAME, Optional("from_account"): str, Optional("to_account"): str, + Optional("pipeline"): {Optional("schedule"): str}, } POSTGRES_DEFINITION = { diff --git a/tests/platform_helper/utils/fixtures/addons_files/postgres_addons.yml b/tests/platform_helper/utils/fixtures/addons_files/postgres_addons.yml index 85fae77e3..21600f384 100644 --- a/tests/platform_helper/utils/fixtures/addons_files/postgres_addons.yml +++ b/tests/platform_helper/utils/fixtures/addons_files/postgres_addons.yml @@ -16,3 +16,13 @@ my-rds-db: dev: deletion_policy: "Delete" deletion_protection: false + database_copy: + - from: prod + to: hotfix + pipeline: { } + - from: prod + from_account: "9999999999" + to: staging + to_account: "1122334455" + pipeline: + schedule: "0 0 * * WED" diff --git a/tests/platform_helper/utils/fixtures/addons_files/postgres_addons_bad_data.yml b/tests/platform_helper/utils/fixtures/addons_files/postgres_addons_bad_data.yml index 0cb88ae00..cf214847c 100644 --- a/tests/platform_helper/utils/fixtures/addons_files/postgres_addons_bad_data.yml +++ b/tests/platform_helper/utils/fixtures/addons_files/postgres_addons_bad_data.yml @@ -106,4 +106,15 @@ my-rds-data-migration-invalid-environments: version: 14.4 database_copy: - from: $ - to: £ \ No newline at end of file + to: £ + +my-rds-data-migration-schedule-should-be-a-string: + type: postgres + version: 14.4 + environments: + prod: {} + database_copy: + - from: prod + to: dev + pipeline: + schedule: false diff --git a/tests/platform_helper/utils/test_validation.py b/tests/platform_helper/utils/test_validation.py index 3b26b5e25..4902b2096 100644 --- a/tests/platform_helper/utils/test_validation.py +++ b/tests/platform_helper/utils/test_validation.py @@ -142,6 +142,7 @@ def test_validate_addons_success(addons_file): "my-rds-data-migration-invalid-environments": r"Environment name \$ is invalid: names must only contain lowercase alphanumeric characters, or be the '\*' default environment", "my-rds-data-migration-missing-key": r"Missing key: 'to'.*", "my-rds-data-migration-invalid-key": r"Wrong key 'non-existent-key' in.*", + "my-rds-data-migration-schedule-should-be-a-string": r"'database_copy.*False should be instance of 'str'", }, ), ( @@ -757,6 +758,17 @@ def test_config_file_check_fails_for_unsupported_files_exist( "to_account": "9999999999", } ], + [{"from": "test", "to": "dev", "pipeline": {}}], + [{"from": "test", "to": "dev", "pipeline": {"schedule": "0 0 * * WED"}}], + [ + { + "from": "test", + "to": "dev", + "from_account": "9999999999", + "to_account": "1122334455", + "pipeline": {"schedule": "0 0 * * WED"}, + } + ], ], ) def test_validate_database_copy_section_success_cases(database_copy_section): From 7e1d75f95cacb68e01f0f62448359166509c20b0 Mon Sep 17 00:00:00 2001 From: Kate Sugden <107400614+ksugden@users.noreply.github.com> Date: Fri, 6 Dec 2024 15:40:39 +0000 Subject: [PATCH 31/38] feat: DBTP-1568 - Add s3 support for cross environment service access (#654) Co-authored-by: Anthony Roy Co-authored-by: Anthony Roy <81255001+antroy-madetech@users.noreply.github.com> --- dbt_platform_helper/addons-template-map.yml | 29 ---- dbt_platform_helper/commands/copilot.py | 28 ++-- dbt_platform_helper/utils/validation.py | 21 ++- tests/platform_helper/conftest.py | 1 - .../utils/fixtures/addons_files/s3_addons.yml | 15 ++ .../addons_files/s3_addons_bad_data.yml | 131 ++++++++++++++++++ .../platform_helper/utils/test_validation.py | 9 ++ 7 files changed, 185 insertions(+), 49 deletions(-) delete mode 100644 dbt_platform_helper/addons-template-map.yml diff --git a/dbt_platform_helper/addons-template-map.yml b/dbt_platform_helper/addons-template-map.yml deleted file mode 100644 index eb9d9b610..000000000 --- a/dbt_platform_helper/addons-template-map.yml +++ /dev/null @@ -1,29 +0,0 @@ -# This file maps addon types to svc level templates - -# explanation: - -# {addons-type}: -# svc: -# - template: path/to/template.yml - -redis: {} -postgres: {} -opensearch: {} -s3: - svc: - - template: addons/svc/s3-policy.yml -s3-policy: - svc: - - template: addons/svc/s3-policy.yml -appconfig-ipfilter: - svc: - - template: addons/svc/appconfig-ipfilter.yml -subscription-filter: - svc: - - template: addons/svc/subscription-filter.yml -monitoring: {} -vpc: {} -alb: {} -prometheus-policy: - svc: - - template: addons/svc/prometheus-policy.yml diff --git a/dbt_platform_helper/commands/copilot.py b/dbt_platform_helper/commands/copilot.py index b6af50965..620d99c7c 100755 --- a/dbt_platform_helper/commands/copilot.py +++ b/dbt_platform_helper/commands/copilot.py @@ -35,6 +35,14 @@ "Worker Service", ] +ADDON_TEMPLATE_MAP = { + "s3": ["addons/svc/s3-policy.yml"], + "s3-policy": ["addons/svc/s3-policy.yml"], + "appconfig-ipfilter": ["addons/svc/appconfig-ipfilter.yml"], + "subscription-filter": ["addons/svc/subscription-filter.yml"], + "prometheus-policy": ["addons/svc/prometheus-policy.yml"], +} + def list_copilot_local_environments(): return [ @@ -248,9 +256,6 @@ def make_addons(): application_name = get_application_name() - with open(PACKAGE_DIR / "addons-template-map.yml") as fd: - addon_template_map = yaml.safe_load(fd) - click.echo("\n>>> Generating Terraform compatible addons CloudFormation\n") env_path = Path(f"copilot/environments/") @@ -270,11 +275,6 @@ def make_addons(): print(f">>>>>>>>> {addon_name}") addon_type = addon_config.pop("type") environments = addon_config.pop("environments") - if addon_template_map[addon_type].get("requires_addons_parameters", False): - pass - if addon_type in ["postgres"]: - pass - environment_addon_config = { "addon_type": addon_type, "environments": environments, @@ -309,7 +309,6 @@ def make_addons(): _generate_service_addons( addon_config, addon_name, - addon_template_map, addon_type, output_dir, service_addon_config, @@ -339,7 +338,6 @@ def _generate_env_overrides(output_dir): def _generate_service_addons( addon_config, addon_name, - addon_template_map, addon_type, output_dir, service_addon_config, @@ -347,8 +345,8 @@ def _generate_service_addons( log_destination_arns, ): # generate svc addons - for addon in addon_template_map[addon_type].get("svc", []): - template = templates.get_template(addon["template"]) + for addon_template in ADDON_TEMPLATE_MAP.get(addon_type, []): + template = templates.get_template(addon_template) for svc in addon_config.get("services", []): service_path = Path(f"copilot/{svc}/addons/") @@ -360,10 +358,10 @@ def _generate_service_addons( } ) - filename = addon.get("filename", f"{addon_name}.yml") - (output_dir / service_path).mkdir(parents=True, exist_ok=True) - click.echo(mkfile(output_dir, service_path / filename, contents, overwrite=True)) + click.echo( + mkfile(output_dir, service_path / f"{addon_name}.yml", contents, overwrite=True) + ) def _cleanup_old_files(config, output_dir, env_addons_path, env_overrides_path): diff --git a/dbt_platform_helper/utils/validation.py b/dbt_platform_helper/utils/validation.py index cc61b48d3..aaaf62e98 100644 --- a/dbt_platform_helper/utils/validation.py +++ b/dbt_platform_helper/utils/validation.py @@ -286,9 +286,19 @@ def dbt_email_address_regex(key): "cyber_sign_off_by": dbt_email_address_regex("cyber_sign_off_by"), } -EXTERNAL_ROLE_ACCESS_NAME = Regex( - r"^([a-z][a-zA-Z0-9_-]*)$", - error="External role access block name {} is invalid: names must only contain lowercase alphanumeric characters separated by hypen or underscore", +CROSS_ENVIRONMENT_SERVICE_ACCESS = { + "application": str, + "environment": ENV_NAME, + "account": str, + "service": str, + "read": bool, + "write": bool, + "cyber_sign_off_by": dbt_email_address_regex("cyber_sign_off_by"), +} + +LOWER_ALPHANUMERIC = Regex( + r"^([a-z][a-zA-Z0-9_-]*|\*)$", + error="{} is invalid: must only contain lowercase alphanumeric characters separated by hyphen or underscore", ) DATA_IMPORT = { @@ -313,7 +323,10 @@ def dbt_email_address_regex(key): Optional("versioning"): bool, Optional("lifecycle_rules"): [LIFECYCLE_RULE], Optional("data_migration"): DATA_MIGRATION, - Optional("external_role_access"): {EXTERNAL_ROLE_ACCESS_NAME: EXTERNAL_ROLE_ACCESS}, + Optional("external_role_access"): {LOWER_ALPHANUMERIC: EXTERNAL_ROLE_ACCESS}, + Optional("cross_environment_service_access"): { + LOWER_ALPHANUMERIC: CROSS_ENVIRONMENT_SERVICE_ACCESS + }, }, }, } diff --git a/tests/platform_helper/conftest.py b/tests/platform_helper/conftest.py index 95a0d23b6..3c74652ef 100644 --- a/tests/platform_helper/conftest.py +++ b/tests/platform_helper/conftest.py @@ -43,7 +43,6 @@ def fakefs(fs): fs.add_real_directory(FIXTURES_DIR, lazy_read=True) fs.add_real_file(BASE_DIR / "dbt_platform_helper/addon-plans.yml") fs.add_real_file(BASE_DIR / "dbt_platform_helper/default-extensions.yml") - fs.add_real_file(BASE_DIR / "dbt_platform_helper/addons-template-map.yml") # To avoid 'Could not find a suitable TLS CA certificate bundle...' error fs.add_real_file(Path(certifi.__file__).parent / "cacert.pem") diff --git a/tests/platform_helper/utils/fixtures/addons_files/s3_addons.yml b/tests/platform_helper/utils/fixtures/addons_files/s3_addons.yml index ec12f330f..ee08afc70 100644 --- a/tests/platform_helper/utils/fixtures/addons_files/s3_addons.yml +++ b/tests/platform_helper/utils/fixtures/addons_files/s3_addons.yml @@ -71,3 +71,18 @@ my-s3-bucket-with-external-access: write: True read: True cyber_sign_off_by: somebody@businessandtrade.gov.uk + +my-s3-cross-environment-service-access-bucket: + type: s3 + environments: + dev: + bucket_name: mandatory + cross_environment_service_access: + demodjango-hotfix: + application: test-app + environment: prod + account: test-account + service: web + write: True + read: True + cyber_sign_off_by: so.me-bo_dy@digital.trade.gov.uk diff --git a/tests/platform_helper/utils/fixtures/addons_files/s3_addons_bad_data.yml b/tests/platform_helper/utils/fixtures/addons_files/s3_addons_bad_data.yml index 8d9266428..a6ebd1779 100644 --- a/tests/platform_helper/utils/fixtures/addons_files/s3_addons_bad_data.yml +++ b/tests/platform_helper/utils/fixtures/addons_files/s3_addons_bad_data.yml @@ -177,3 +177,134 @@ my-s3-external-access-bucket-invalid-email: write: True read: True cyber_sign_off_by: somebody@dodgy-domain.com + +my-s3-cross-environment-service-access-bucket-invalid-environment: + type: s3 + environments: + dev: + bucket_name: mandatory + cross_environment_service_access: + demodjango-hotfix: + application: test-app + environment: hyphen-not-allowed-in-environment + account: test-account + service: web + write: True + read: True + cyber_sign_off_by: somebody@businessandtrade.gov.uk + +my-s3-cross-environment-service-access-bucket-invalid-email: + type: s3 + environments: + dev: + bucket_name: mandatory + cross_environment_service_access: + demodjango-hotfix: + application: test-app + environment: anotherenvironment + account: test-account + service: web + write: True + read: True + cyber_sign_off_by: noone-signed-this-off + +my-s3-cross-environment-service-access-bucket-missing-application: + type: s3 + environments: + dev: + bucket_name: mandatory + cross_environment_service_access: + demodjango-hotfix: + environment: anotherenvironment + account: test-account + service: web + write: True + read: True + cyber_sign_off_by: somebody@businessandtrade.gov.uk + +my-s3-cross-environment-service-access-bucket-missing-environment: + type: s3 + environments: + dev: + bucket_name: mandatory + cross_environment_service_access: + demodjango-hotfix: + application: test-app + account: test-account + service: web + write: True + read: True + cyber_sign_off_by: somebody@businessandtrade.gov.uk + +my-s3-cross-environment-service-access-bucket-missing-account: + type: s3 + environments: + dev: + bucket_name: mandatory + cross_environment_service_access: + demodjango-hotfix: + application: test-app + environment: anotherenvironment + service: web + write: True + read: True + cyber_sign_off_by: somebody@businessandtrade.gov.uk + +my-s3-cross-environment-service-access-bucket-missing-service: + type: s3 + environments: + dev: + bucket_name: mandatory + cross_environment_service_access: + demodjango-hotfix: + application: test-app + environment: anotherenvironment + account: test-account + write: True + read: True + cyber_sign_off_by: somebody@businessandtrade.gov.uk + +my-s3-cross-environment-service-access-bucket-invalid-write: + type: s3 + environments: + dev: + bucket_name: mandatory + cross_environment_service_access: + demodjango-hotfix: + application: test-app + environment: anotherenvironment + account: test-account + service: web + write: WRITE + read: True + cyber_sign_off_by: somebody@businessandtrade.gov.uk + +my-s3-cross-environment-service-access-bucket-invalid-read: + type: s3 + environments: + dev: + bucket_name: mandatory + cross_environment_service_access: + demodjango-hotfix: + application: test-app + environment: anotherenvironment + account: test-account + service: web + write: True + read: READ + cyber_sign_off_by: somebody@businessandtrade.gov.uk + +my-s3-cross-environment-service-access-bucket-missing-cyber-sign-off: + type: s3 + environments: + dev: + bucket_name: mandatory + cross_environment_service_access: + demodjango-hotfix: + application: test-app + environment: anotherenvironment + account: test-account + service: web + write: True + read: True + diff --git a/tests/platform_helper/utils/test_validation.py b/tests/platform_helper/utils/test_validation.py index 4902b2096..42e27ea1a 100644 --- a/tests/platform_helper/utils/test_validation.py +++ b/tests/platform_helper/utils/test_validation.py @@ -108,6 +108,15 @@ def test_validate_addons_success(addons_file): "my-s3-bucket-data-migration-worker-role-invalid-arn": r"worker_role_arn must contain a valid ARN for an IAM role", "my-s3-external-access-bucket-invalid-arn": r"role_arn must contain a valid ARN for an IAM role", "my-s3-external-access-bucket-invalid-email": r"cyber_sign_off_by must contain a valid DBT email address", + "my-s3-cross-environment-service-access-bucket-invalid-environment": r"Environment name hyphen-not-allowed-in-environment is invalid", + "my-s3-cross-environment-service-access-bucket-invalid-email": r"cyber_sign_off_by must contain a valid DBT email address", + "my-s3-cross-environment-service-access-bucket-missing-application": r"Missing key: 'application'", + "my-s3-cross-environment-service-access-bucket-missing-environment": r"Missing key: 'environment'", + "my-s3-cross-environment-service-access-bucket-missing-account": r"Missing key: 'account'", + "my-s3-cross-environment-service-access-bucket-missing-service": r"Missing key: 'service'", + "my-s3-cross-environment-service-access-bucket-invalid-write": r"cross_environment_service_access.*'WRITE' should be instance of 'bool'", + "my-s3-cross-environment-service-access-bucket-invalid-read": r"cross_environment_service_access.*'READ' should be instance of 'bool'", + "my-s3-cross-environment-service-access-bucket-missing-cyber-sign-off": r"Missing key: 'cyber_sign_off_by'", }, ), ( From 7ab7e155437c313a0ebfa99a1755371391263fcb Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" <41898282+github-actions[bot]@users.noreply.github.com> Date: Fri, 6 Dec 2024 16:17:37 +0000 Subject: [PATCH 32/38] chore(main): release 12.4.0 (#675) Co-authored-by: github-actions[bot] <41898282+github-actions[bot]@users.noreply.github.com> --- CHANGELOG.md | 18 ++++++++++++++++++ pyproject.toml | 2 +- release-manifest.json | 2 +- 3 files changed, 20 insertions(+), 2 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 9342a7cef..666123c8f 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,23 @@ # Changelog +## [12.4.0](https://github.com/uktrade/platform-tools/compare/12.3.0...12.4.0) (2024-12-06) + + +### Features + +* DBTP-1568 - Add s3 support for cross environment service access ([#654](https://github.com/uktrade/platform-tools/issues/654)) ([7e1d75f](https://github.com/uktrade/platform-tools/commit/7e1d75f95cacb68e01f0f62448359166509c20b0)) + + +### Bug Fixes + +* DBTP-1498 - Add option for database dump filename ([#681](https://github.com/uktrade/platform-tools/issues/681)) ([d06ddcc](https://github.com/uktrade/platform-tools/commit/d06ddcc0253a76950f54b881af84be14b0981b66)) +* DBTP-1498 - Add validation for database copy pipeline ([#683](https://github.com/uktrade/platform-tools/issues/683)) ([cda1e7b](https://github.com/uktrade/platform-tools/commit/cda1e7bc9daa1732e9032c7d6566716e3151b961)) + + +### Documentation + +* Document new dbt-platform-helper architecture ([#669](https://github.com/uktrade/platform-tools/issues/669)) ([ae4862d](https://github.com/uktrade/platform-tools/commit/ae4862da9e3e3d39c82c99222fa21450191f260a)) + ## [12.3.0](https://github.com/uktrade/platform-tools/compare/12.2.4...12.3.0) (2024-12-03) diff --git a/pyproject.toml b/pyproject.toml index 5d45dfb70..2aa14d184 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -3,7 +3,7 @@ line-length = 100 [tool.poetry] name = "dbt-platform-helper" -version = "12.3.0" +version = "12.4.0" description = "Set of tools to help transfer applications/services from GOV.UK PaaS to DBT PaaS augmenting AWS Copilot." authors = ["Department for Business and Trade Platform Team "] license = "MIT" diff --git a/release-manifest.json b/release-manifest.json index b06fb695a..a9908fe75 100644 --- a/release-manifest.json +++ b/release-manifest.json @@ -1,3 +1,3 @@ { - ".": "12.3.0" + ".": "12.4.0" } From bbc7dcdfa1a47a92ed86126526d509ad8d5897eb Mon Sep 17 00:00:00 2001 From: Kate Sugden <107400614+ksugden@users.noreply.github.com> Date: Mon, 9 Dec 2024 11:33:53 +0000 Subject: [PATCH 33/38] fix: DBTP-1290 - fix red herring "Not a Git repository" error (#682) Co-authored-by: chiaramapellimt --- dbt_platform_helper/commands/pipeline.py | 30 +------------------ .../platform_helper/test_command_pipeline.py | 10 +++++++ 2 files changed, 11 insertions(+), 29 deletions(-) diff --git a/dbt_platform_helper/commands/pipeline.py b/dbt_platform_helper/commands/pipeline.py index ea7ec4328..f4aafffb7 100644 --- a/dbt_platform_helper/commands/pipeline.py +++ b/dbt_platform_helper/commands/pipeline.py @@ -67,14 +67,9 @@ def generate(terraform_platform_modules_version, deploy_branch): pipeline_config = load_and_validate_platform_config() has_codebase_pipelines = CODEBASE_PIPELINES_KEY in pipeline_config - has_legacy_environment_pipelines = ENVIRONMENTS_KEY in pipeline_config has_environment_pipelines = ENVIRONMENT_PIPELINES_KEY in pipeline_config - if ( - not has_codebase_pipelines - and not has_legacy_environment_pipelines - and not has_environment_pipelines - ): + if not (has_codebase_pipelines or has_environment_pipelines): click.secho("No pipelines defined: nothing to do.", err=True, fg="yellow") return @@ -178,29 +173,6 @@ def _generate_codebase_pipeline( ) -def _generate_copilot_environments_pipeline( - app_name, codestar_connection_arn, git_repo, configuration, base_path, pipelines_dir, templates -): - makedirs(pipelines_dir / "environments/overrides", exist_ok=True) - - template_data = { - "app_name": app_name, - "git_repo": git_repo, - "codestar_connection_arn": codestar_connection_arn, - "pipeline_environments": configuration, - } - - _create_file_from_template( - base_path, "environments/buildspec.yml", pipelines_dir, template_data, templates - ) - _create_file_from_template( - base_path, "environments/manifest.yml", pipelines_dir, template_data, templates - ) - _create_file_from_template( - base_path, "environments/overrides/cfn.patches.yml", pipelines_dir, template_data, templates - ) - - def _create_file_from_template( base_path, file_name, pipelines_dir, template_data, templates, template_name=None ): diff --git a/tests/platform_helper/test_command_pipeline.py b/tests/platform_helper/test_command_pipeline.py index 5d06e2863..beb86146f 100644 --- a/tests/platform_helper/test_command_pipeline.py +++ b/tests/platform_helper/test_command_pipeline.py @@ -186,6 +186,16 @@ def test_pipeline_generate_with_empty_platform_config_yml_outputs_warning(get_aw assert "No pipelines defined: nothing to do." in result.output +@patch("dbt_platform_helper.commands.pipeline.load_and_validate_platform_config") +def test_pipeline_generate_with_non_empty_platform_config_but_no_pipelines_outputs_warning( + mock_config, +): + mock_config.return_value = {"environments": {}} + result = CliRunner().invoke(generate) + + assert "No pipelines defined: nothing to do." in result.output + + @freeze_time("2023-08-22 16:00:00") @patch("dbt_platform_helper.jinja2_tags.version", new=Mock(return_value="v0.1-TEST")) @patch("dbt_platform_helper.utils.aws.get_aws_session_or_abort") From 5304ae2d82382d6d710bacc68a79301d5b585300 Mon Sep 17 00:00:00 2001 From: Will Gibson <8738245+WillGibson@users.noreply.github.com> Date: Mon, 9 Dec 2024 11:36:02 +0000 Subject: [PATCH 34/38] chore: Remove old AWS Copilot environment pipeline templates (#684) --- .../pipelines/environments/buildspec.yml | 80 ------------------- .../pipelines/environments/manifest.yml | 48 ----------- .../environments/overrides/cfn.patches.yml | 21 ----- 3 files changed, 149 deletions(-) delete mode 100644 dbt_platform_helper/templates/pipelines/environments/buildspec.yml delete mode 100644 dbt_platform_helper/templates/pipelines/environments/manifest.yml delete mode 100644 dbt_platform_helper/templates/pipelines/environments/overrides/cfn.patches.yml diff --git a/dbt_platform_helper/templates/pipelines/environments/buildspec.yml b/dbt_platform_helper/templates/pipelines/environments/buildspec.yml deleted file mode 100644 index c3ea49d6c..000000000 --- a/dbt_platform_helper/templates/pipelines/environments/buildspec.yml +++ /dev/null @@ -1,80 +0,0 @@ -# {% extra_header %} -# {% version_info %} -# Buildspec runs in the build stage of your environment pipeline to generate the environment CloudFormation stack config. -version: 0.2 -env: - variables: - DYFF_VERSION: 1.5.8 -phases: - install: - commands: - - cd $CODEBUILD_SRC_DIR - - | - if [ ! -f .copilot-version ]; then - echo "Cannot find .copilot-version file" - exit 1 - fi - - COPILOT_VERSION=$(cat .copilot-version) - # Install pyyaml and dbt-platform-helper - - pip install PyYAML dbt-platform-helper - # Reinstall if we require a different version to the latest - - CURRENT_PLATFORM_HELPER_VERSION=$(platform-helper --version) - - export PLATFORM_HELPER_VERSION=$(platform-helper version get-platform-helper-for-project) - - | - if [ ! "${PLATFORM_HELPER_VERSION}" == "${CURRENT_PLATFORM_HELPER_VERSION}" ] - then - pip uninstall dbt-platform-helper - pip install dbt-platform-helper==$PLATFORM_HELPER_VERSION - fi - - mkdir ./build-tools - - cd ./build-tools - # Install copilot - - wget -q "https://ecs-cli-v2-release.s3.amazonaws.com/copilot-linux-v${COPILOT_VERSION}" - - mv "./copilot-linux-v${COPILOT_VERSION}" ./copilot - - chmod +x ./copilot - # Install dyff - yaml differ - - wget -q "https://github.com/homeport/dyff/releases/download/v${DYFF_VERSION}/dyff_${DYFF_VERSION}_linux_amd64.tar.gz" - - tar -zxvf "dyff_${DYFF_VERSION}_linux_amd64.tar.gz" - - chmod +x ./dyff - build: - commands: - - cd $CODEBUILD_SRC_DIR - - cp -r copilot/ current-copilot/ - - platform-helper copilot make-addons - - > - for FILE in $(ls copilot/**/addons/*.yml); do - ./build-tools/dyff between --omit-header $FILE current-$FILE >> ./build-tools/file-differences - done; - - | - if [[ "$(cat ./build-tools/file-differences)" = *[![:space:]]* ]]; then - echo 'Changes are introduced with version ${PLATFORM_HELPER_VERSION} of platform-helper:' - echo - for FILE in $(ls copilot/**/addons/*.yml); do - echo "Changes in $FILE:" - ./build-tools/dyff between --omit-header $FILE current-$FILE - done; - echo - echo 'Ensure you are running version ${PLATFORM_HELPER_VERSION} with pip install dbt-platform-helper==${PLATFORM_HELPER_VERSION}' - echo 'And run platform-helper copilot make-addons to regenerate your addons templates' - exit 1 - fi - post_build: - commands: - - git checkout -- . - - export COLOR="false" - - export CI="true" - - pipeline=$(cat $CODEBUILD_SRC_DIR/copilot/pipelines/environments/manifest.yml | python -c 'import sys, json, yaml; print(json.dumps(yaml.safe_load(sys.stdin.read())))') - - stages=$(echo $pipeline | jq -r '.stages[].name') - # Generate the cloudformation templates. - - > - for env in $stages; do - ./build-tools/copilot env package -n $env --output-dir './infrastructure' --upload-assets --force; - if [ $? -ne 0 ]; then - echo "Cloudformation stack and config files were not generated. Please check build logs to see if there was a manifest validation error." 1>&2; - exit 1; - fi - done; - - ls -lah ./infrastructure -artifacts: - files: - - "infrastructure/*" diff --git a/dbt_platform_helper/templates/pipelines/environments/manifest.yml b/dbt_platform_helper/templates/pipelines/environments/manifest.yml deleted file mode 100644 index aaa1ce0d5..000000000 --- a/dbt_platform_helper/templates/pipelines/environments/manifest.yml +++ /dev/null @@ -1,48 +0,0 @@ -# {% extra_header %} -# {% version_info %} -# This YAML file defines your pipeline: the source repository it tracks and the order of the environments to deploy to. -# For more info: https://aws.github.io/copilot-cli/docs/manifest/pipeline/ - -# The name of the pipeline. -name: environments - -# The version of the schema used in this template. -version: 1 - -# This section defines your source, changes to which trigger your pipeline. -source: - # The name of the provider that is used to store the source artifacts. - # (i.e. GitHub, Bitbucket, CodeCommit) - provider: GitHub - # Additional properties that further specify the location of the artifacts. - properties: - branch: main - repository: https://github.com/{{ git_repo }} - connection_name: {{ app_name }} - -build: - additional_policy: - PolicyDocument: - Version: "2012-10-17" - Statement: - - Effect: Allow - Action: - - iam:ListAccountAliases - Resource: '*' - -# This section defines the order of the environments your pipeline will deploy to. -stages: -{% for name, environment in pipeline_environments.items() -%} - - # The name of the environment. - name: {{ name }} - deployments: - deploy-env: - template_path: infrastructure/{{ name }}.env.yml - template_config: infrastructure/{{ name }}.env.params.json - stack_name: {{ app_name }}-{{ name }} - # Optional: flag for manual approval action before deployment. - {% if not environment.requires_approval %}# {% endif %}requires_approval: true - # Optional: use test commands to validate this stage of your build. - # test_commands: [echo 'running tests', make test] - -{% endfor %} diff --git a/dbt_platform_helper/templates/pipelines/environments/overrides/cfn.patches.yml b/dbt_platform_helper/templates/pipelines/environments/overrides/cfn.patches.yml deleted file mode 100644 index 9b7539c34..000000000 --- a/dbt_platform_helper/templates/pipelines/environments/overrides/cfn.patches.yml +++ /dev/null @@ -1,21 +0,0 @@ -# {% extra_header %} -# {% version_info %} -# Give the source stage a more meaningful name -- op: replace - path: /Resources/Pipeline/Properties/Stages/0/Actions/0/Name - value: DeployCodebase - -# Add git metadata to the source output artefact -- op: add - path: /Resources/Pipeline/Properties/Stages/0/Actions/0/Configuration/OutputArtifactFormat - value: CODEBUILD_CLONE_REF - -# Add codestar permissions to codebuild role -- op: add - path: /Resources/BuildProjectPolicy/Properties/PolicyDocument/Statement/- - value: - Effect: Allow - Action: - - codestar-connections:UseConnection - Resource: - - {{ codestar_connection_arn }} From 50de4f3896e7d03ab0ed626ff5c1339cd1da817d Mon Sep 17 00:00:00 2001 From: Will Gibson <8738245+WillGibson@users.noreply.github.com> Date: Mon, 9 Dec 2024 11:36:25 +0000 Subject: [PATCH 35/38] refactor: DBTP-1596 Separate out the platform-config.yml schema validation (part 1) (#680) --- dbt_platform_helper/constants.py | 4 + .../utils/platform_config_schema.py | 604 ++++++++++++++++++ dbt_platform_helper/utils/validation.py | 568 +--------------- .../platform_helper/utils/test_validation.py | 26 +- 4 files changed, 623 insertions(+), 579 deletions(-) create mode 100644 dbt_platform_helper/utils/platform_config_schema.py diff --git a/dbt_platform_helper/constants.py b/dbt_platform_helper/constants.py index f6b1b13b5..9c6499f84 100644 --- a/dbt_platform_helper/constants.py +++ b/dbt_platform_helper/constants.py @@ -1,6 +1,10 @@ +# Todo: Move to Config provider PLATFORM_CONFIG_FILE = "platform-config.yml" +# Todo: Can we get rid of this yet? PLATFORM_HELPER_VERSION_FILE = ".platform-helper-version" +# Todo: Move to ??? DEFAULT_TERRAFORM_PLATFORM_MODULES_VERSION = "5" +# Todo: Move to cache provider PLATFORM_HELPER_CACHE_FILE = ".platform-helper-config-cache.yml" # Keys diff --git a/dbt_platform_helper/utils/platform_config_schema.py b/dbt_platform_helper/utils/platform_config_schema.py new file mode 100644 index 000000000..7f1618aa9 --- /dev/null +++ b/dbt_platform_helper/utils/platform_config_schema.py @@ -0,0 +1,604 @@ +import ipaddress +import re + +from schema import Optional +from schema import Or +from schema import Regex +from schema import Schema +from schema import SchemaError + + +def _string_matching_regex(regex_pattern: str): + def validate(string): + if not re.match(regex_pattern, string): + # Todo: Raise suitable PlatformException? + raise SchemaError( + f"String '{string}' does not match the required pattern '{regex_pattern}'." + ) + return string + + return validate + + +def _is_integer_between(lower_limit, upper_limit): + def validate(value): + if isinstance(value, int) and lower_limit <= value <= upper_limit: + return True + # Todo: Raise suitable PlatformException? + raise SchemaError(f"should be an integer between {lower_limit} and {upper_limit}") + + return validate + + +_valid_schema_key = Regex( + r"^([a-z][a-zA-Z0-9_-]*|\*)$", + error="{} is invalid: must only contain lowercase alphanumeric characters separated by hyphen or underscore", +) + +# Todo: Make this actually validate a git branch name properly; https://git-scm.com/docs/git-check-ref-format +_valid_branch_name = _string_matching_regex(r"^((?!\*).)*(\*)?$") + +_valid_deletion_policy = Or("Delete", "Retain") + +_valid_postgres_deletion_policy = Or("Delete", "Retain", "Snapshot") + +_valid_environment_name = Regex( + r"^([a-z][a-zA-Z0-9]*|\*)$", + error="Environment name {} is invalid: names must only contain lowercase alphanumeric characters, or be the '*' default environment", + # For values the "error" parameter works and outputs the custom text. For keys the custom text doesn't get reported in the exception for some reason. +) + + +def _valid_kms_key_arn(key): + return Regex( + r"^arn:aws:kms:.*:\d{12}:(key|alias).*", + error=f"{key} must contain a valid ARN for a KMS key", + ) + + +def _valid_iam_role_arn(key): + return Regex( + r"^arn:aws:iam::\d{12}:role/.*", + error=f"{key} must contain a valid ARN for an IAM role", + ) + + +def _valid_dbt_email_address(key): + return Regex( + r"^[\w.-]+@(businessandtrade.gov.uk|digital.trade.gov.uk)$", + error=f"{key} must contain a valid DBT email address", + ) + + +_cross_environment_service_access_schema = { + "application": str, + "environment": _valid_environment_name, + "account": str, + "service": str, + "read": bool, + "write": bool, + "cyber_sign_off_by": _valid_dbt_email_address("cyber_sign_off_by"), +} + + +def _no_configuration_required_schema(schema_type): + return Schema({"type": schema_type, Optional("services"): Or("__all__", [str])}) + + +# Application load balancer.... +_valid_alb_cache_policy = { + "min_ttl": int, + "max_ttl": int, + "default_ttl": int, + "cookies_config": Or("none", "whitelist", "allExcept", "all"), + "header": Or("none", "whitelist"), + "query_string_behavior": Or("none", "whitelist", "allExcept", "all"), + Optional("cookie_list"): list, + Optional("headers_list"): list, + Optional("cache_policy_query_strings"): list, +} + +_valid_alb_paths_definition = { + Optional("default"): { + "cache": str, + "request": str, + }, + Optional("additional"): list[ + { + "path": str, + "cache": str, + "request": str, + } + ], +} + +_alb_schema = { + "type": "alb", + Optional("environments"): { + _valid_environment_name: Or( + { + Optional("additional_address_list"): list, + Optional("allowed_methods"): list, + Optional("cached_methods"): list, + Optional("cdn_compress"): bool, + Optional("cdn_domains_list"): dict, + Optional("cdn_geo_locations"): list, + Optional("cdn_geo_restriction_type"): str, + Optional("cdn_logging_bucket"): str, + Optional("cdn_logging_bucket_prefix"): str, + Optional("cdn_timeout_seconds"): int, + Optional("default_waf"): str, + Optional("domain_prefix"): str, + Optional("enable_logging"): bool, + Optional("env_root"): str, + Optional("forwarded_values_forward"): str, + Optional("forwarded_values_headers"): list, + Optional("forwarded_values_query_string"): bool, + Optional("origin_protocol_policy"): str, + Optional("origin_ssl_protocols"): list, + Optional("slack_alert_channel_alb_secret_rotation"): str, + Optional("viewer_certificate_minimum_protocol_version"): str, + Optional("viewer_certificate_ssl_support_method"): str, + Optional("viewer_protocol_policy"): str, + Optional("cache_policy"): dict({str: _valid_alb_cache_policy}), + Optional("origin_request_policy"): dict({str: {}}), + Optional("paths"): dict({str: _valid_alb_paths_definition}), + }, + None, + ) + }, +} + +# Monitoring... +_monitoring_schema = { + "type": "monitoring", + Optional("environments"): { + _valid_environment_name: { + Optional("enable_ops_center"): bool, + } + }, +} + + +# Opensearch... +class ConditionalOpensSearchSchema(Schema): + def validate(self, data, _is_conditional_schema=True): + data = super(ConditionalOpensSearchSchema, self).validate( + data, _is_conditional_schema=False + ) + if _is_conditional_schema: + default_plan = None + default_volume_size = None + + default_environment_config = data["environments"].get( + "*", data["environments"].get("default", None) + ) + if default_environment_config: + default_plan = default_environment_config.get("plan", None) + default_volume_size = default_environment_config.get("volume_size", None) + + for env in data["environments"]: + volume_size = data["environments"][env].get("volume_size", default_volume_size) + plan = data["environments"][env].get("plan", default_plan) + + if volume_size: + if not plan: + # Todo: Raise suitable PlatformException? + raise SchemaError(f"Missing key: 'plan'") + + if volume_size < _valid_opensearch_min_volume_size: + # Todo: Raise suitable PlatformException? + raise SchemaError( + f"Key 'environments' error: Key '{env}' error: Key 'volume_size' error: should be an integer greater than {_valid_opensearch_min_volume_size}" + ) + + for key in _valid_opensearch_max_volume_size: + if ( + plan == key + and not volume_size <= _valid_opensearch_max_volume_size[key] + ): + # Todo: Raise suitable PlatformException? + raise SchemaError( + f"Key 'environments' error: Key '{env}' error: Key 'volume_size' error: should be an integer between {_valid_opensearch_min_volume_size} and {_valid_opensearch_max_volume_size[key]} for plan {plan}" + ) + + return data + + +# Todo: Move to OpenSearch provider? +_valid_opensearch_plans = Or( + "tiny", "small", "small-ha", "medium", "medium-ha", "large", "large-ha", "x-large", "x-large-ha" +) +# Todo: Move to OpenSearch provider? +_valid_opensearch_min_volume_size = 10 +# Todo: Move to OpenSearch provider? +_valid_opensearch_max_volume_size = { + "tiny": 100, + "small": 200, + "small-ha": 200, + "medium": 512, + "medium-ha": 512, + "large": 1000, + "large-ha": 1000, + "x-large": 1500, + "x-large-ha": 1500, +} + +_opensearch_schema = { + "type": "opensearch", + Optional("environments"): { + _valid_environment_name: { + Optional("engine"): str, + Optional("deletion_policy"): _valid_deletion_policy, + Optional("plan"): _valid_opensearch_plans, + Optional("volume_size"): int, + Optional("ebs_throughput"): int, + Optional("ebs_volume_type"): str, + Optional("instance"): str, + Optional("instances"): int, + Optional("master"): bool, + Optional("es_app_log_retention_in_days"): int, + Optional("index_slow_log_retention_in_days"): int, + Optional("audit_log_retention_in_days"): int, + Optional("search_slow_log_retention_in_days"): int, + Optional("password_special_characters"): str, + Optional("urlencode_password"): bool, + } + }, +} + +# Prometheus... +_prometheus_policy_schema = { + "type": "prometheus-policy", + Optional("services"): Or("__all__", [str]), + Optional("environments"): { + _valid_environment_name: { + "role_arn": str, + } + }, +} + +# Postgres... +# Todo: Move to Postgres provider? +_valid_postgres_plans = Or( + "tiny", + "small", + "small-ha", + "small-high-io", + "medium", + "medium-ha", + "medium-high-io", + "large", + "large-ha", + "large-high-io", + "x-large", + "x-large-ha", + "x-large-high-io", +) + +# Todo: Move to Postgres provider? +_valid_postgres_storage_types = Or("gp2", "gp3", "io1", "io2") + +_valid_postgres_database_copy = { + "from": _valid_environment_name, + "to": _valid_environment_name, + Optional("from_account"): str, + Optional("to_account"): str, + Optional("pipeline"): {Optional("schedule"): str}, +} + +_postgres_schema = { + "type": "postgres", + "version": (Or(int, float)), + Optional("deletion_policy"): _valid_postgres_deletion_policy, + Optional("environments"): { + _valid_environment_name: { + Optional("plan"): _valid_postgres_plans, + Optional("volume_size"): _is_integer_between(20, 10000), + Optional("iops"): _is_integer_between(1000, 9950), + Optional("snapshot_id"): str, + Optional("deletion_policy"): _valid_postgres_deletion_policy, + Optional("deletion_protection"): bool, + Optional("multi_az"): bool, + Optional("storage_type"): _valid_postgres_storage_types, + Optional("backup_retention_days"): _is_integer_between(1, 35), + } + }, + Optional("database_copy"): [_valid_postgres_database_copy], + Optional("objects"): [ + { + "key": str, + Optional("body"): str, + } + ], +} + +# Redis... +# Todo move to Redis provider? +_valid_redis_plans = Or( + "micro", + "micro-ha", + "tiny", + "tiny-ha", + "small", + "small-ha", + "medium", + "medium-ha", + "large", + "large-ha", + "x-large", + "x-large-ha", +) + +_redis_schema = { + "type": "redis", + Optional("environments"): { + _valid_environment_name: { + Optional("plan"): _valid_redis_plans, + Optional("engine"): str, + Optional("replicas"): _is_integer_between(0, 5), + Optional("deletion_policy"): _valid_deletion_policy, + Optional("apply_immediately"): bool, + Optional("automatic_failover_enabled"): bool, + Optional("instance"): str, + Optional("multi_az_enabled"): bool, + } + }, +} + + +# S3 Bucket... +def _valid_s3_bucket_name(name: str): + errors = [] + if not (2 < len(name) < 64): + errors.append("Length must be between 3 and 63 characters inclusive.") + + if not re.match(r"^[a-z0-9].*[a-z0-9]$", name): + errors.append("Names must start and end with 0-9 or a-z.") + + if not re.match(r"^[a-z0-9.-]*$", name): + errors.append("Names can only contain the characters 0-9, a-z, '.' and '-'.") + + if ".." in name: + errors.append("Names cannot contain two adjacent periods.") + + try: + ipaddress.ip_address(name) + errors.append("Names cannot be IP addresses.") + except ValueError: + pass + + for prefix in ("xn--", "sthree-"): + if name.startswith(prefix): + errors.append(f"Names cannot be prefixed '{prefix}'.") + + for suffix in ("-s3alias", "--ol-s3"): + if name.endswith(suffix): + errors.append(f"Names cannot be suffixed '{suffix}'.") + + if errors: + # Todo: Raise suitable PlatformException? + raise SchemaError( + "Bucket name '{}' is invalid:\n{}".format(name, "\n".join(f" {e}" for e in errors)) + ) + + return True + + +def _valid_s3_bucket_arn(key): + return Regex( + r"^arn:aws:s3::.*", + error=f"{key} must contain a valid ARN for an S3 bucket", + ) + + +_valid_s3_data_migration = { + "import": { + Optional("source_kms_key_arn"): _valid_kms_key_arn("source_kms_key_arn"), + "source_bucket_arn": _valid_s3_bucket_arn("source_bucket_arn"), + "worker_role_arn": _valid_iam_role_arn("worker_role_arn"), + }, +} + +_valid_s3_bucket_retention_policy = Or( + None, + { + "mode": Or("GOVERNANCE", "COMPLIANCE"), + Or("days", "years", only_one=True): int, + }, +) + +_valid_s3_bucket_lifecycle_rule = { + Optional("filter_prefix"): str, + "expiration_days": int, + "enabled": bool, +} + +_valid_s3_bucket_external_role_access = { + "role_arn": _valid_iam_role_arn("role_arn"), + "read": bool, + "write": bool, + "cyber_sign_off_by": _valid_dbt_email_address("cyber_sign_off_by"), +} + +_valid_s3_bucket_external_role_access_name = Regex( + r"^([a-z][a-zA-Z0-9_-]*)$", + error="External role access block name {} is invalid: names must only contain lowercase alphanumeric characters separated by hypen or underscore", +) + +_valid_s3_base_definition = dict( + { + Optional("readonly"): bool, + Optional("serve_static_content"): bool, + Optional("services"): Or("__all__", [str]), + Optional("environments"): { + _valid_environment_name: { + "bucket_name": _valid_s3_bucket_name, + Optional("deletion_policy"): _valid_deletion_policy, + Optional("retention_policy"): _valid_s3_bucket_retention_policy, + Optional("versioning"): bool, + Optional("lifecycle_rules"): [_valid_s3_bucket_lifecycle_rule], + Optional("data_migration"): _valid_s3_data_migration, + Optional("external_role_access"): { + _valid_schema_key: _valid_s3_bucket_external_role_access + }, + Optional("cross_environment_service_access"): { + _valid_schema_key: _cross_environment_service_access_schema + }, + }, + }, + } +) + +_s3_bucket_schema = _valid_s3_base_definition | { + "type": "s3", + Optional("objects"): [{"key": str, Optional("body"): str, Optional("content_type"): str}], +} + +_s3_bucket_policy_schema = _valid_s3_base_definition | {"type": "s3-policy"} + +_default_versions_schema = { + Optional("terraform-platform-modules"): str, + Optional("platform-helper"): str, +} + +_valid_environment_specific_version_overrides = { + Optional("terraform-platform-modules"): str, +} + +_valid_pipeline_specific_version_overrides = { + Optional("platform-helper"): str, +} + +_environments_schema = { + str: Or( + None, + { + Optional("accounts"): { + "deploy": { + "name": str, + "id": str, + }, + "dns": { + "name": str, + "id": str, + }, + }, + # Todo: Is requires_approval relevant? + Optional("requires_approval"): bool, + Optional("versions"): _valid_environment_specific_version_overrides, + Optional("vpc"): str, + }, + ) +} + +# Codebase pipelines... +_codebase_pipelines_schema = [ + { + "name": str, + "repository": str, + Optional("additional_ecr_repository"): str, + Optional("deploy_repository_branch"): str, + "services": list[str], + "pipelines": [ + Or( + { + "name": str, + "branch": _valid_branch_name, + "environments": [ + { + "name": str, + Optional("requires_approval"): bool, + } + ], + }, + { + "name": str, + "tag": bool, + "environments": [ + { + "name": str, + Optional("requires_approval"): bool, + } + ], + }, + ), + ], + }, +] + +# Environment pipelines... +_environment_pipelines_schema = { + str: { + Optional("account"): str, + Optional("branch", default="main"): _valid_branch_name, + Optional("pipeline_to_trigger"): str, + Optional("versions"): _valid_pipeline_specific_version_overrides, + "slack_channel": str, + "trigger_on_push": bool, + "environments": { + str: Or( + None, + { + Optional("accounts"): { + "deploy": { + "name": str, + "id": str, + }, + "dns": { + "name": str, + "id": str, + }, + }, + Optional("requires_approval"): bool, + Optional("versions"): _valid_environment_specific_version_overrides, + Optional("vpc"): str, + }, + ) + }, + } +} + + +# Used outside this file by validate_platform_config() +PLATFORM_CONFIG_SCHEMA = Schema( + { + # The following line is for the AWS Copilot version, will be removed under DBTP-1002 + "application": str, + Optional("legacy_project", default=False): bool, + Optional("default_versions"): _default_versions_schema, + Optional("accounts"): list[str], + Optional("environments"): _environments_schema, + Optional("codebase_pipelines"): _codebase_pipelines_schema, + Optional("environment_pipelines"): _environment_pipelines_schema, + Optional("extensions"): { + str: Or( + _alb_schema, + _monitoring_schema, + _opensearch_schema, + _postgres_schema, + _prometheus_policy_schema, + _redis_schema, + _s3_bucket_schema, + _s3_bucket_policy_schema, + ) + }, + } +) + +# This is used outside this file by validate_addons() +EXTENSION_SCHEMAS = { + "alb": Schema(_alb_schema), + "appconfig-ipfilter": _no_configuration_required_schema("appconfig-ipfilter"), + "opensearch": ConditionalOpensSearchSchema(_opensearch_schema), + "postgres": Schema(_postgres_schema), + "prometheus-policy": Schema(_prometheus_policy_schema), + "redis": Schema(_redis_schema), + "s3": Schema(_s3_bucket_schema), + "s3-policy": Schema(_s3_bucket_policy_schema), + "subscription-filter": _no_configuration_required_schema("subscription-filter"), + # Todo: We think the next three are no longer relevant? + "monitoring": Schema(_monitoring_schema), + "vpc": _no_configuration_required_schema("vpc"), + "xray": _no_configuration_required_schema("xray"), +} diff --git a/dbt_platform_helper/utils/validation.py b/dbt_platform_helper/utils/validation.py index aaaf62e98..3416bd6e9 100644 --- a/dbt_platform_helper/utils/validation.py +++ b/dbt_platform_helper/utils/validation.py @@ -1,14 +1,9 @@ -import ipaddress import os import re from pathlib import Path import click import yaml -from schema import Optional -from schema import Or -from schema import Regex -from schema import Schema from schema import SchemaError from yaml.parser import ParserError from yamllint import config @@ -22,60 +17,8 @@ from dbt_platform_helper.utils.aws import get_supported_redis_versions from dbt_platform_helper.utils.files import apply_environment_defaults from dbt_platform_helper.utils.messages import abort_with_error - - -def validate_string(regex_pattern: str): - def validator(string): - if not re.match(regex_pattern, string): - raise SchemaError( - f"String '{string}' does not match the required pattern '{regex_pattern}'. For more details on valid string patterns see: https://aws.github.io/copilot-cli/docs/manifest/lb-web-service/" - ) - return string - - return validator - - -S3_BUCKET_NAME_ERROR_TEMPLATE = "Bucket name '{}' is invalid:\n{}" -AVAILABILITY_UNCERTAIN_TEMPLATE = ( - "Warning: Could not determine the availability of bucket name '{}'." -) -BUCKET_NAME_IN_USE_TEMPLATE = "Warning: Bucket name '{}' is already in use. Check your AWS accounts to see if this is a problem." - - -def validate_s3_bucket_name(name: str): - errors = [] - if not (2 < len(name) < 64): - errors.append("Length must be between 3 and 63 characters inclusive.") - - if not re.match(r"^[a-z0-9].*[a-z0-9]$", name): - errors.append("Names must start and end with 0-9 or a-z.") - - if not re.match(r"^[a-z0-9.-]*$", name): - errors.append("Names can only contain the characters 0-9, a-z, '.' and '-'.") - - if ".." in name: - errors.append("Names cannot contain two adjacent periods.") - - try: - ipaddress.ip_address(name) - errors.append("Names cannot be IP addresses.") - except ValueError: - pass - - for prefix in ("xn--", "sthree-"): - if name.startswith(prefix): - errors.append(f"Names cannot be prefixed '{prefix}'.") - - for suffix in ("-s3alias", "--ol-s3"): - if name.endswith(suffix): - errors.append(f"Names cannot be suffixed '{suffix}'.") - - if errors: - raise SchemaError( - S3_BUCKET_NAME_ERROR_TEMPLATE.format(name, "\n".join(f" {e}" for e in errors)) - ) - - return True +from dbt_platform_helper.utils.platform_config_schema import EXTENSION_SCHEMAS +from dbt_platform_helper.utils.platform_config_schema import PLATFORM_CONFIG_SCHEMA def validate_addons(addons: dict): @@ -90,7 +33,7 @@ def validate_addons(addons: dict): if not addon_type: errors[addon_name] = f"Missing addon type in addon '{addon_name}'" continue - schema = SCHEMA_MAP.get(addon_type, None) + schema = EXTENSION_SCHEMAS.get(addon_type, None) if not schema: errors[addon_name] = ( f"Unsupported addon type '{addon_type}' in addon '{addon_name}'" @@ -116,15 +59,6 @@ def validate_addons(addons: dict): return errors -def int_between(lower, upper): - def is_between(value): - if isinstance(value, int) and lower <= value <= upper: - return True - raise SchemaError(f"should be an integer between {lower} and {upper}") - - return is_between - - def float_between_with_halfstep(lower, upper): def is_between(value): is_number = isinstance(value, int) or isinstance(value, float) @@ -137,434 +71,6 @@ def is_between(value): return is_between -ENV_NAME = Regex( - r"^([a-z][a-zA-Z0-9]*|\*)$", - error="Environment name {} is invalid: names must only contain lowercase alphanumeric characters, or be the '*' default environment", - # For values the "error" parameter works and outputs the custom text. For keys the custom text doesn't get reported in the exception for some reason. -) - -range_validator = validate_string(r"^\d+-\d+$") -seconds_validator = validate_string(r"^\d+s$") -branch_wildcard_validator = validate_string(r"^((?!\*).)*(\*)?$") - -NUMBER = Or(int, float) -DELETION_POLICY = Or("Delete", "Retain") -DB_DELETION_POLICY = Or("Delete", "Retain", "Snapshot") -DELETION_PROTECTION = bool - -REDIS_PLANS = Or( - "micro", - "micro-ha", - "tiny", - "tiny-ha", - "small", - "small-ha", - "medium", - "medium-ha", - "large", - "large-ha", - "x-large", - "x-large-ha", -) - -REDIS_ENGINE_VERSIONS = str - -REDIS_DEFINITION = { - "type": "redis", - Optional("environments"): { - ENV_NAME: { - Optional("plan"): REDIS_PLANS, - Optional("engine"): REDIS_ENGINE_VERSIONS, - Optional("replicas"): int_between(0, 5), - Optional("deletion_policy"): DELETION_POLICY, - Optional("apply_immediately"): bool, - Optional("automatic_failover_enabled"): bool, - Optional("instance"): str, - Optional("multi_az_enabled"): bool, - } - }, -} - -POSTGRES_PLANS = Or( - "tiny", - "small", - "small-ha", - "small-high-io", - "medium", - "medium-ha", - "medium-high-io", - "large", - "large-ha", - "large-high-io", - "x-large", - "x-large-ha", - "x-large-high-io", -) -POSTGRES_STORAGE_TYPES = Or("gp2", "gp3", "io1", "io2") - -RETENTION_POLICY = Or( - None, - { - "mode": Or("GOVERNANCE", "COMPLIANCE"), - Or("days", "years", only_one=True): int, - }, -) - -DATABASE_COPY = { - "from": ENV_NAME, - "to": ENV_NAME, - Optional("from_account"): str, - Optional("to_account"): str, - Optional("pipeline"): {Optional("schedule"): str}, -} - -POSTGRES_DEFINITION = { - "type": "postgres", - "version": NUMBER, - Optional("deletion_policy"): DB_DELETION_POLICY, - Optional("environments"): { - ENV_NAME: { - Optional("plan"): POSTGRES_PLANS, - Optional("volume_size"): int_between(20, 10000), - Optional("iops"): int_between(1000, 9950), - Optional("snapshot_id"): str, - Optional("deletion_policy"): DB_DELETION_POLICY, - Optional("deletion_protection"): DELETION_PROTECTION, - Optional("multi_az"): bool, - Optional("storage_type"): POSTGRES_STORAGE_TYPES, - Optional("backup_retention_days"): int_between(1, 35), - } - }, - Optional("database_copy"): [DATABASE_COPY], - Optional("objects"): [ - { - "key": str, - Optional("body"): str, - } - ], -} - -LIFECYCLE_RULE = { - Optional("filter_prefix"): str, - "expiration_days": int, - "enabled": bool, -} - - -def kms_key_arn_regex(key): - return Regex( - r"^arn:aws:kms:.*:\d{12}:(key|alias).*", - error=f"{key} must contain a valid ARN for a KMS key", - ) - - -def s3_bucket_arn_regex(key): - return Regex( - r"^arn:aws:s3::.*", - error=f"{key} must contain a valid ARN for an S3 bucket", - ) - - -def iam_role_arn_regex(key): - return Regex( - r"^arn:aws:iam::\d{12}:role/.*", - error=f"{key} must contain a valid ARN for an IAM role", - ) - - -def dbt_email_address_regex(key): - return Regex( - r"^[\w.-]+@(businessandtrade.gov.uk|digital.trade.gov.uk)$", - error=f"{key} must contain a valid DBT email address", - ) - - -EXTERNAL_ROLE_ACCESS = { - "role_arn": iam_role_arn_regex("role_arn"), - "read": bool, - "write": bool, - "cyber_sign_off_by": dbt_email_address_regex("cyber_sign_off_by"), -} - -CROSS_ENVIRONMENT_SERVICE_ACCESS = { - "application": str, - "environment": ENV_NAME, - "account": str, - "service": str, - "read": bool, - "write": bool, - "cyber_sign_off_by": dbt_email_address_regex("cyber_sign_off_by"), -} - -LOWER_ALPHANUMERIC = Regex( - r"^([a-z][a-zA-Z0-9_-]*|\*)$", - error="{} is invalid: must only contain lowercase alphanumeric characters separated by hyphen or underscore", -) - -DATA_IMPORT = { - Optional("source_kms_key_arn"): kms_key_arn_regex("source_kms_key_arn"), - "source_bucket_arn": s3_bucket_arn_regex("source_bucket_arn"), - "worker_role_arn": iam_role_arn_regex("worker_role_arn"), -} - -DATA_MIGRATION = { - "import": DATA_IMPORT, -} - -S3_BASE = { - Optional("readonly"): bool, - Optional("serve_static_content"): bool, - Optional("services"): Or("__all__", [str]), - Optional("environments"): { - ENV_NAME: { - "bucket_name": validate_s3_bucket_name, - Optional("deletion_policy"): DELETION_POLICY, - Optional("retention_policy"): RETENTION_POLICY, - Optional("versioning"): bool, - Optional("lifecycle_rules"): [LIFECYCLE_RULE], - Optional("data_migration"): DATA_MIGRATION, - Optional("external_role_access"): {LOWER_ALPHANUMERIC: EXTERNAL_ROLE_ACCESS}, - Optional("cross_environment_service_access"): { - LOWER_ALPHANUMERIC: CROSS_ENVIRONMENT_SERVICE_ACCESS - }, - }, - }, -} - -S3_POLICY_DEFINITION = dict(S3_BASE) -S3_POLICY_DEFINITION.update({"type": "s3-policy"}) - -S3_DEFINITION = dict(S3_BASE) -S3_DEFINITION.update( - { - "type": "s3", - Optional("objects"): [{"key": str, Optional("body"): str, Optional("content_type"): str}], - } -) - -MONITORING_DEFINITION = { - "type": "monitoring", - Optional("environments"): { - ENV_NAME: { - Optional("enable_ops_center"): bool, - } - }, -} - -OPENSEARCH_PLANS = Or( - "tiny", "small", "small-ha", "medium", "medium-ha", "large", "large-ha", "x-large", "x-large-ha" -) -OPENSEARCH_ENGINE_VERSIONS = str -OPENSEARCH_MIN_VOLUME_SIZE = 10 -OPENSEARCH_MAX_VOLUME_SIZE = { - "tiny": 100, - "small": 200, - "small-ha": 200, - "medium": 512, - "medium-ha": 512, - "large": 1000, - "large-ha": 1000, - "x-large": 1500, - "x-large-ha": 1500, -} - -OPENSEARCH_DEFINITION = { - "type": "opensearch", - Optional("environments"): { - ENV_NAME: { - Optional("engine"): OPENSEARCH_ENGINE_VERSIONS, - Optional("deletion_policy"): DELETION_POLICY, - Optional("plan"): OPENSEARCH_PLANS, - Optional("volume_size"): int, - Optional("ebs_throughput"): int, - Optional("ebs_volume_type"): str, - Optional("instance"): str, - Optional("instances"): int, - Optional("master"): bool, - Optional("es_app_log_retention_in_days"): int, - Optional("index_slow_log_retention_in_days"): int, - Optional("audit_log_retention_in_days"): int, - Optional("search_slow_log_retention_in_days"): int, - Optional("password_special_characters"): str, - Optional("urlencode_password"): bool, - } - }, -} - -CACHE_POLICY_DEFINITION = { - "min_ttl": int, - "max_ttl": int, - "default_ttl": int, - "cookies_config": Or("none", "whitelist", "allExcept", "all"), - "header": Or("none", "whitelist"), - "query_string_behavior": Or("none", "whitelist", "allExcept", "all"), - Optional("cookie_list"): list, - Optional("headers_list"): list, - Optional("cache_policy_query_strings"): list, -} - -PATHS_DEFINITION = { - Optional("default"): { - "cache": str, - "request": str, - }, - Optional("additional"): list[ - { - "path": str, - "cache": str, - "request": str, - } - ], -} - -ALB_DEFINITION = { - "type": "alb", - Optional("environments"): { - ENV_NAME: Or( - { - Optional("additional_address_list"): list, - Optional("allowed_methods"): list, - Optional("cached_methods"): list, - Optional("cdn_compress"): bool, - Optional("cdn_domains_list"): dict, - Optional("cdn_geo_locations"): list, - Optional("cdn_geo_restriction_type"): str, - Optional("cdn_logging_bucket"): str, - Optional("cdn_logging_bucket_prefix"): str, - Optional("cdn_timeout_seconds"): int, - Optional("default_waf"): str, - Optional("domain_prefix"): str, - Optional("enable_logging"): bool, - Optional("env_root"): str, - Optional("forwarded_values_forward"): str, - Optional("forwarded_values_headers"): list, - Optional("forwarded_values_query_string"): bool, - Optional("origin_protocol_policy"): str, - Optional("origin_ssl_protocols"): list, - Optional("slack_alert_channel_alb_secret_rotation"): str, - Optional("viewer_certificate_minimum_protocol_version"): str, - Optional("viewer_certificate_ssl_support_method"): str, - Optional("viewer_protocol_policy"): str, - Optional("cache_policy"): dict({str: CACHE_POLICY_DEFINITION}), - Optional("origin_request_policy"): dict({str: {}}), - Optional("paths"): dict({str: PATHS_DEFINITION}), - }, - None, - ) - }, -} - -PROMETHEUS_POLICY_DEFINITION = { - "type": "prometheus-policy", - Optional("services"): Or("__all__", [str]), - Optional("environments"): { - ENV_NAME: { - "role_arn": str, - } - }, -} - -_DEFAULT_VERSIONS_DEFINITION = { - Optional("terraform-platform-modules"): str, - Optional("platform-helper"): str, -} -_ENVIRONMENTS_VERSIONS_OVERRIDES = { - Optional("terraform-platform-modules"): str, -} -_PIPELINE_VERSIONS_OVERRIDES = { - Optional("platform-helper"): str, -} - -_ENVIRONMENTS_PARAMS = { - Optional("accounts"): { - "deploy": { - "name": str, - "id": str, - }, - "dns": { - "name": str, - "id": str, - }, - }, - Optional("requires_approval"): bool, - Optional("versions"): _ENVIRONMENTS_VERSIONS_OVERRIDES, - Optional("vpc"): str, -} - -ENVIRONMENTS_DEFINITION = {str: Or(None, _ENVIRONMENTS_PARAMS)} - -CODEBASE_PIPELINES_DEFINITION = [ - { - "name": str, - "repository": str, - Optional("additional_ecr_repository"): str, - Optional("deploy_repository_branch"): str, - "services": list[str], - "pipelines": [ - Or( - { - "name": str, - "branch": branch_wildcard_validator, - "environments": [ - { - "name": str, - Optional("requires_approval"): bool, - } - ], - }, - { - "name": str, - "tag": bool, - "environments": [ - { - "name": str, - Optional("requires_approval"): bool, - } - ], - }, - ), - ], - }, -] - -ENVIRONMENT_PIPELINES_DEFINITION = { - str: { - Optional("account"): str, - Optional("branch", default="main"): str, - Optional("pipeline_to_trigger"): str, - Optional("versions"): _PIPELINE_VERSIONS_OVERRIDES, - "slack_channel": str, - "trigger_on_push": bool, - "environments": {str: Or(None, _ENVIRONMENTS_PARAMS)}, - } -} - -PLATFORM_CONFIG_SCHEMA = Schema( - { - # The following line is for the AWS Copilot version, will be removed under DBTP-1002 - "application": str, - Optional("legacy_project", default=False): bool, - Optional("default_versions"): _DEFAULT_VERSIONS_DEFINITION, - Optional("accounts"): list[str], - Optional("environments"): ENVIRONMENTS_DEFINITION, - Optional("codebase_pipelines"): CODEBASE_PIPELINES_DEFINITION, - Optional("extensions"): { - str: Or( - REDIS_DEFINITION, - POSTGRES_DEFINITION, - S3_DEFINITION, - S3_POLICY_DEFINITION, - MONITORING_DEFINITION, - OPENSEARCH_DEFINITION, - ALB_DEFINITION, - PROMETHEUS_POLICY_DEFINITION, - ) - }, - Optional("environment_pipelines"): ENVIRONMENT_PIPELINES_DEFINITION, - } -) - - def validate_platform_config(config): PLATFORM_CONFIG_SCHEMA.validate(config) enriched_config = apply_environment_defaults(config) @@ -846,71 +352,3 @@ def config_file_check(path=PLATFORM_CONFIG_FILE): if errors: click.secho("\n".join(errors), bg="red", fg="white") exit(1) - - -S3_SCHEMA = Schema(S3_DEFINITION) -S3_POLICY_SCHEMA = Schema(S3_POLICY_DEFINITION) -POSTGRES_SCHEMA = Schema(POSTGRES_DEFINITION) -REDIS_SCHEMA = Schema(REDIS_DEFINITION) - - -class ConditionalSchema(Schema): - def validate(self, data, _is_conditional_schema=True): - data = super(ConditionalSchema, self).validate(data, _is_conditional_schema=False) - if _is_conditional_schema: - default_plan = None - default_volume_size = None - - default_environment_config = data["environments"].get( - "*", data["environments"].get("default", None) - ) - if default_environment_config: - default_plan = default_environment_config.get("plan", None) - default_volume_size = default_environment_config.get("volume_size", None) - - for env in data["environments"]: - volume_size = data["environments"][env].get("volume_size", default_volume_size) - plan = data["environments"][env].get("plan", default_plan) - - if volume_size: - if not plan: - raise SchemaError(f"Missing key: 'plan'") - - if volume_size < OPENSEARCH_MIN_VOLUME_SIZE: - raise SchemaError( - f"Key 'environments' error: Key '{env}' error: Key 'volume_size' error: should be an integer greater than {OPENSEARCH_MIN_VOLUME_SIZE}" - ) - - for key in OPENSEARCH_MAX_VOLUME_SIZE: - if plan == key and not volume_size <= OPENSEARCH_MAX_VOLUME_SIZE[key]: - raise SchemaError( - f"Key 'environments' error: Key '{env}' error: Key 'volume_size' error: should be an integer between {OPENSEARCH_MIN_VOLUME_SIZE} and {OPENSEARCH_MAX_VOLUME_SIZE[key]} for plan {plan}" - ) - - return data - - -OPENSEARCH_SCHEMA = ConditionalSchema(OPENSEARCH_DEFINITION) -MONITORING_SCHEMA = Schema(MONITORING_DEFINITION) -ALB_SCHEMA = Schema(ALB_DEFINITION) -PROMETHEUS_POLICY_SCHEMA = Schema(PROMETHEUS_POLICY_DEFINITION) - - -def no_param_schema(schema_type): - return Schema({"type": schema_type, Optional("services"): Or("__all__", [str])}) - - -SCHEMA_MAP = { - "s3": S3_SCHEMA, - "s3-policy": S3_POLICY_SCHEMA, - "postgres": POSTGRES_SCHEMA, - "redis": REDIS_SCHEMA, - "opensearch": OPENSEARCH_SCHEMA, - "monitoring": MONITORING_SCHEMA, - "appconfig-ipfilter": no_param_schema("appconfig-ipfilter"), - "subscription-filter": no_param_schema("subscription-filter"), - "vpc": no_param_schema("vpc"), - "xray": no_param_schema("xray"), - "alb": ALB_SCHEMA, - "prometheus-policy": PROMETHEUS_POLICY_SCHEMA, -} diff --git a/tests/platform_helper/utils/test_validation.py b/tests/platform_helper/utils/test_validation.py index 42e27ea1a..fdd1f18b2 100644 --- a/tests/platform_helper/utils/test_validation.py +++ b/tests/platform_helper/utils/test_validation.py @@ -8,18 +8,17 @@ from dbt_platform_helper.constants import PLATFORM_CONFIG_FILE from dbt_platform_helper.constants import PLATFORM_HELPER_VERSION_FILE -from dbt_platform_helper.utils.validation import S3_BUCKET_NAME_ERROR_TEMPLATE +from dbt_platform_helper.utils.platform_config_schema import _is_integer_between +from dbt_platform_helper.utils.platform_config_schema import _string_matching_regex +from dbt_platform_helper.utils.platform_config_schema import _valid_s3_bucket_name from dbt_platform_helper.utils.validation import _validate_extension_supported_versions from dbt_platform_helper.utils.validation import config_file_check from dbt_platform_helper.utils.validation import float_between_with_halfstep -from dbt_platform_helper.utils.validation import int_between from dbt_platform_helper.utils.validation import lint_yaml_for_duplicate_keys from dbt_platform_helper.utils.validation import load_and_validate_platform_config from dbt_platform_helper.utils.validation import validate_addons from dbt_platform_helper.utils.validation import validate_database_copy_section from dbt_platform_helper.utils.validation import validate_platform_config -from dbt_platform_helper.utils.validation import validate_s3_bucket_name -from dbt_platform_helper.utils.validation import validate_string from tests.platform_helper.conftest import FIXTURES_DIR from tests.platform_helper.conftest import UTILS_FIXTURES_DIR @@ -35,6 +34,7 @@ def load_addons(addons_file): (r"^\d+-\d+$", ["1-10"], ["20-21-23"]), (r"^\d+s$", ["10s"], ["10seconds"]), ( + # Todo: Make this actually validate a git branch name properly; https://git-scm.com/docs/git-check-ref-format r"^((?!\*).)*(\*)?$", ["test/valid/branch", "test/valid/branch*", "test/valid/branch-other"], ["test*invalid/branch", "test*invalid/branch*"], @@ -42,7 +42,7 @@ def load_addons(addons_file): ], ) def test_validate_string(regex_pattern, valid_strings, invalid_strings): - validator = validate_string(regex_pattern) + validator = _string_matching_regex(regex_pattern) for valid_string in valid_strings: assert validator(valid_string) == valid_string @@ -53,9 +53,7 @@ def test_validate_string(regex_pattern, valid_strings, invalid_strings): assert ( err.value.args[0] - == f"String '{invalid_string}' does not match the required pattern '{regex_pattern}'. For " - "more details on valid string patterns see: " - "https://aws.github.io/copilot-cli/docs/manifest/lb-web-service/" + == f"String '{invalid_string}' does not match the required pattern '{regex_pattern}'." ) @@ -306,13 +304,13 @@ def test_validate_addons_missing_type(): @pytest.mark.parametrize("value", [5, 1, 9]) def test_between_success(value): - assert int_between(1, 9)(value) + assert _is_integer_between(1, 9)(value) @pytest.mark.parametrize("value", [-1, 10]) def test_between_raises_error(value): try: - int_between(1, 9)(value) + _is_integer_between(1, 9)(value) assert False, f"testing that {value} is between 1 and 9 failed to raise an error." except SchemaError as ex: assert ex.code == "should be an integer between 1 and 9" @@ -336,7 +334,7 @@ def test_between_with_step_raises_error(value): @pytest.mark.parametrize("bucket_name", ["abc", "a" * 63, "abc-123.xyz", "123", "257.2.2.2"]) def test_validate_s3_bucket_name_success_cases(bucket_name): - assert validate_s3_bucket_name(bucket_name) + assert _valid_s3_bucket_name(bucket_name) @pytest.mark.parametrize( @@ -359,9 +357,9 @@ def test_validate_s3_bucket_name_success_cases(bucket_name): ], ) def test_validate_s3_bucket_name_failure_cases(bucket_name, error_message): - exp_error = S3_BUCKET_NAME_ERROR_TEMPLATE.format(bucket_name, f" {error_message}") + exp_error = f"Bucket name '{bucket_name}' is invalid:\n {error_message}" with pytest.raises(SchemaError) as ex: - validate_s3_bucket_name(bucket_name) + _valid_s3_bucket_name(bucket_name) assert exp_error in str(ex.value) @@ -369,7 +367,7 @@ def test_validate_s3_bucket_name_failure_cases(bucket_name, error_message): def test_validate_s3_bucket_name_multiple_failures(): bucket_name = "xn--one-two..THREE" + "z" * 50 + "--ol-s3" with pytest.raises(SchemaError) as ex: - validate_s3_bucket_name(bucket_name) + _valid_s3_bucket_name(bucket_name) exp_errors = [ "Length must be between 3 and 63 characters inclusive.", From cb0832de64efc556f3c812a3f4291f8e45fccb70 Mon Sep 17 00:00:00 2001 From: Will Gibson <8738245+WillGibson@users.noreply.github.com> Date: Mon, 9 Dec 2024 12:33:02 +0000 Subject: [PATCH 36/38] docs: Correct entry in changelog (#674) Co-authored-by: Kate Sugden <107400614+ksugden@users.noreply.github.com> --- CHANGELOG.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 666123c8f..91133bfde 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -26,7 +26,7 @@ * DBTP-1299 - Cross account database copy ([#657](https://github.com/uktrade/platform-tools/issues/657)) ([7d35599](https://github.com/uktrade/platform-tools/commit/7d35599533b55f15fb08801c50ce538a8a32b847)) -### Reverts +### Refactor * Improving provider structure and exception handling" ([#670](https://github.com/uktrade/platform-tools/issues/670)) ([331e8b8](https://github.com/uktrade/platform-tools/commit/331e8b89d60fec4e29a9ea4473ffa44cba8e92c7)) From 58aca89a46207d4494a3981e0b678d0a43fef960 Mon Sep 17 00:00:00 2001 From: Will Gibson <8738245+WillGibson@users.noreply.github.com> Date: Mon, 9 Dec 2024 14:25:43 +0000 Subject: [PATCH 37/38] fix: DBTP-1509 Correct link to maintenance pages instructions (#686) --- dbt_platform_helper/domain/maintenance_page.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/dbt_platform_helper/domain/maintenance_page.py b/dbt_platform_helper/domain/maintenance_page.py index fc749498c..d8b90aca1 100644 --- a/dbt_platform_helper/domain/maintenance_page.py +++ b/dbt_platform_helper/domain/maintenance_page.py @@ -261,7 +261,7 @@ def add_maintenance_page( ) click.secho( - f"\nUse a browser plugin to add `Bypass-Key` header with value {bypass_value} to your requests. For more detail, visit https://platform.readme.trade.gov.uk/activities/holding-and-maintenance-pages/", + f"\nUse a browser plugin to add `Bypass-Key` header with value {bypass_value} to your requests. For more detail, visit https://platform.readme.trade.gov.uk/next-steps/put-a-service-under-maintenance/", fg="green", ) From 9878d9bc0240509731d1a69c25a3af0c16c4b0a7 Mon Sep 17 00:00:00 2001 From: Will Gibson <8738245+WillGibson@users.noreply.github.com> Date: Mon, 9 Dec 2024 14:27:26 +0000 Subject: [PATCH 38/38] chore: DBTP-1596 Move platform_config_schema.py to providers (#687) --- .../{utils => providers}/platform_config_schema.py | 0 dbt_platform_helper/utils/validation.py | 4 ++-- tests/platform_helper/utils/test_validation.py | 6 +++--- 3 files changed, 5 insertions(+), 5 deletions(-) rename dbt_platform_helper/{utils => providers}/platform_config_schema.py (100%) diff --git a/dbt_platform_helper/utils/platform_config_schema.py b/dbt_platform_helper/providers/platform_config_schema.py similarity index 100% rename from dbt_platform_helper/utils/platform_config_schema.py rename to dbt_platform_helper/providers/platform_config_schema.py diff --git a/dbt_platform_helper/utils/validation.py b/dbt_platform_helper/utils/validation.py index 3416bd6e9..0b518ad75 100644 --- a/dbt_platform_helper/utils/validation.py +++ b/dbt_platform_helper/utils/validation.py @@ -13,12 +13,12 @@ from dbt_platform_helper.constants import ENVIRONMENTS_KEY from dbt_platform_helper.constants import PLATFORM_CONFIG_FILE from dbt_platform_helper.constants import PLATFORM_HELPER_VERSION_FILE +from dbt_platform_helper.providers.platform_config_schema import EXTENSION_SCHEMAS +from dbt_platform_helper.providers.platform_config_schema import PLATFORM_CONFIG_SCHEMA from dbt_platform_helper.utils.aws import get_supported_opensearch_versions from dbt_platform_helper.utils.aws import get_supported_redis_versions from dbt_platform_helper.utils.files import apply_environment_defaults from dbt_platform_helper.utils.messages import abort_with_error -from dbt_platform_helper.utils.platform_config_schema import EXTENSION_SCHEMAS -from dbt_platform_helper.utils.platform_config_schema import PLATFORM_CONFIG_SCHEMA def validate_addons(addons: dict): diff --git a/tests/platform_helper/utils/test_validation.py b/tests/platform_helper/utils/test_validation.py index fdd1f18b2..6e14f4f26 100644 --- a/tests/platform_helper/utils/test_validation.py +++ b/tests/platform_helper/utils/test_validation.py @@ -8,9 +8,9 @@ from dbt_platform_helper.constants import PLATFORM_CONFIG_FILE from dbt_platform_helper.constants import PLATFORM_HELPER_VERSION_FILE -from dbt_platform_helper.utils.platform_config_schema import _is_integer_between -from dbt_platform_helper.utils.platform_config_schema import _string_matching_regex -from dbt_platform_helper.utils.platform_config_schema import _valid_s3_bucket_name +from dbt_platform_helper.providers.platform_config_schema import _is_integer_between +from dbt_platform_helper.providers.platform_config_schema import _string_matching_regex +from dbt_platform_helper.providers.platform_config_schema import _valid_s3_bucket_name from dbt_platform_helper.utils.validation import _validate_extension_supported_versions from dbt_platform_helper.utils.validation import config_file_check from dbt_platform_helper.utils.validation import float_between_with_halfstep