Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

refactor: DBTP-1643 - Adding exceptions in ConfigValidator #745

Merged
merged 10 commits into from
Jan 31, 2025
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
15 changes: 10 additions & 5 deletions dbt_platform_helper/domain/config_validator.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,11 +2,16 @@

import boto3

from dbt_platform_helper.platform_exception import PlatformException
from dbt_platform_helper.providers.io import ClickIOProvider
from dbt_platform_helper.providers.opensearch import OpensearchProvider
from dbt_platform_helper.providers.redis import RedisProvider


class ConfigValidatorError(PlatformException):
pass


class ConfigValidator:

def __init__(
Expand Down Expand Up @@ -110,7 +115,7 @@ def validate_environment_pipelines(self, config):
envs = detail["bad_envs"]
acc = detail["account"]
message += f" '{pipeline}' - these environments are not in the '{acc}' account: {', '.join(envs)}\n"
self.io.abort_with_error(message)
raise ConfigValidatorError(message)

def validate_environment_pipelines_triggers(self, config):
errors = []
Expand All @@ -134,7 +139,7 @@ def validate_environment_pipelines_triggers(self, config):

if errors:
error_message = "The following pipelines are misconfigured: \n"
self.io.abort_with_error(error_message + "\n ".join(errors))
raise ConfigValidatorError(error_message + "\n ".join(errors))

def validate_database_copy_section(self, config):
extensions = config.get("extensions", {})
Expand Down Expand Up @@ -220,7 +225,7 @@ def validate_database_copy_section(self, config):
)

if errors:
self.io.abort_with_error("\n".join(errors))
raise ConfigValidatorError("\n".join(errors))

def validate_database_migration_input_sources(self, config: dict):
extensions = config.get("extensions", {})
Expand All @@ -247,7 +252,7 @@ def validate_database_migration_input_sources(self, config: dict):
)
if "import" not in data_migration and "import_sources" not in data_migration:
errors.append(
f"Error in '{extension_name}.environments.{env}.data_migration': 'import_sources' property is missing."
f"'import_sources' property in '{extension_name}.environments.{env}.data_migration' is missing."
)
if errors:
self.io.abort_with_error("\n".join(errors))
raise ConfigValidatorError("\n".join(errors))
7 changes: 6 additions & 1 deletion dbt_platform_helper/providers/config.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@

from dbt_platform_helper.constants import PLATFORM_CONFIG_FILE
from dbt_platform_helper.domain.config_validator import ConfigValidator
from dbt_platform_helper.domain.config_validator import ConfigValidatorError
from dbt_platform_helper.providers.io import ClickIOProvider
from dbt_platform_helper.providers.platform_config_schema import PlatformConfigSchema
from dbt_platform_helper.providers.yaml_file import FileNotFoundException
Expand Down Expand Up @@ -35,7 +36,11 @@ def _validate_platform_config(self):
# also, we apply defaults but discard that data. Should we just apply
# defaults to config returned by load_and_validate
enriched_config = ConfigProvider.apply_environment_defaults(self.config)
self.validator.run_validations(enriched_config)
try:
self.validator.run_validations(enriched_config)
except ConfigValidatorError as exc:
print(exc)
self.io.abort_with_error(f"Config validation has failed.\n{str(exc)}")

def load_and_validate_platform_config(self, path=PLATFORM_CONFIG_FILE):
try:
Expand Down
213 changes: 162 additions & 51 deletions platform-config.yml
Original file line number Diff line number Diff line change
@@ -1,69 +1,180 @@
# This file is just here to support the tests.
# In particular, testing the Choices for the click commands as these are evaluated at compile-time (being decorators)
# and so the functions that populate the Choice objects are evaluated before tests can patch them.
# e.g. dbt_platform_helper.utils.platform_config.get_environment_pipeline_names
application: test-app

codebase_pipelines:
application:
additional_ecr_repository: public.ecr.aws/my-public-repo/test-app/application
deploy_repository_branch: feature-branch
pipelines:
- branch: main
environments:
- name: dev
name: main
- environments:
- name: staging
requires_approval: true
name: tagged
tag: true
repository: uktrade/test-app
services:
- run_order_1:
- celery-worker
- celery-beat
- web
slack_channel: OTHER_SLACK_CHANNEL_ID
default_versions:
platform-helper: 10.2.0
terraform-platform-modules: 1.2.3
environment_pipelines:
main:
account: non-prod-acc
environments:
dev: null
staging: null
pipeline_to_trigger: prod-main
slack_channel: /codebuild/notification_channel
trigger_on_push: true
prod-main:
account: prod-acc
branch: main
environments:
prod:
requires_approval: true
slack_channel: /codebuild/slack_oauth_channel
trigger_on_push: false
versions:
platform-helper: 9.0.9
test:
branch: my-feature-branch
environments:
test:
accounts:
deploy:
id: '9999999999'
name: prod-acc
dns:
id: '7777777777'
name: prod-dns-acc
requires_approval: true
vpc: testing_vpc
slack_channel: /codebuild/notification_channel
trigger_on_push: false
versions:
platform-helper: main
environments:
"*":
'*':
accounts:
deploy:
name: "non-prod-acc"
id: "1122334455"
id: '1122334455'
name: non-prod-acc
dns:
name: "non-prod-dns-acc"
id: "6677889900"
id: '6677889900'
name: non-prod-dns-acc
requires_approval: false
vpc: non-prod-vpc
dev:
test:
versions:
terraform-platform-modules: 1.2.3
staging:
invalid-key: 1.2.3
vpc: non-prod-vpc
dev: null
hotfix:
accounts:
deploy:
id: '9999999999'
name: prod-acc
dns:
id: '6677889900'
name: non-prod-dns-acc
vpc: hotfix-vpc
prod:
accounts:
deploy:
name: "prod-acc"
id: "9999999999"
id: '9999999999'
name: prod-acc
dns:
name: "prod-dns-acc"
id: "7777777777"
id: '7777777777'
name: prod-dns-acc
requires_approval: true
vpc: prod-vpc

environment_pipelines:
main:
account: non-prod-acc
slack_channel: "/codebuild/notification_channel"
trigger_on_push: true
pipeline_to_trigger: "prod-main"
environments:
dev:
staging:
staging: null
test:
branch: my-feature-branch
slack_channel: "/codebuild/notification_channel"
trigger_on_push: false
versions:
platform-helper: main
terraform-platform-modules: 1.2.3
extensions:
test-app-alb:
environments:
test:
requires_approval: true
vpc: testing_vpc
accounts:
deploy:
name: "prod-acc"
id: "9999999999"
dns:
name: "prod-dns-acc"
id: "7777777777"
prod-main:
account: prod-acc
branch: main
slack_channel: "/codebuild/slack_oauth_channel"
trigger_on_push: false
versions:
platform-helper: 9.0.9
dev:
cdn_domains_list:
dev.test-app.uktrade.digital: test-app.uktrade.digital
type: alb
test-app-monitoring:
environments:
'*':
enable_ops_center: false
type: monitoring
test-app-opensearch:
environments:
'*':
engine: '1.3'
password_special_characters: -_.,
plan: small
urlencode_password: false
volume_size: 40
type: opensearch
test-app-postgres:
database_copy:
- from: prod
to: hotfix
environments:
dev:
deletion_protection: true
hotfix:
backup_retention_days: 10
prod:
requires_approval: true
backup_retention_days: 10
staging:
deletion_policy: Retain
deletion_protection: true
type: postgres
version: 16.2
test-app-redis:
environments:
'*':
apply_immediately: true
engine: '7.1'
plan: tiny
type: redis
test-app-s3-bucket:
environments:
dev:
bucket_name: test-app-policy-dev
versioning: false
services:
- web
type: s3-policy
test-app-s3-bucket-data-migration:
environments:
dev:
bucket_name: s3-data-migration
data_migration:
import:
source_bucket_arn: arn:aws:s3:::test-app
source_kms_key_arn: arn:aws:kms::123456789012:key/test-key
worker_role_arn: arn:aws:iam::123456789012:role/test-role
versioning: false
services:
- web
type: s3
test-app-s3-bucket-with-objects:
environments:
dev:
bucket_name: test-app-dev
lifecycle_rules:
- enabled: true
expiration_days: 1
versioning: false
staging:
bucket_name: test-app-staging
versioning: false
objects:
- body: Demodjango is working.
key: healthcheck.txt
services:
- web
type: s3
26 changes: 10 additions & 16 deletions tests/platform_helper/providers/test_config.py
Original file line number Diff line number Diff line change
Expand Up @@ -109,14 +109,11 @@ def test_validate_data_migration_fails_if_neither_import_nor_import_sources_pres
config_provider.config = config
mock_io = Mock()
config_provider.io = mock_io
config_provider.validator.io = mock_io
config_provider._validate_platform_config()

message = mock_io.abort_with_error.call_args.args[0]
config_provider._validate_platform_config()

assert (
"Error in 'test-s3-bucket.environments.dev.data_migration': 'import_sources' property is missing."
in message
mock_io.abort_with_error.assert_called_with(
"""Config validation has failed.\n'import_sources' property in 'test-s3-bucket.environments.dev.data_migration' is missing."""
)


Expand Down Expand Up @@ -155,14 +152,11 @@ def test_validate_data_migration_fails_if_both_import_and_import_sources_present
config_provider.config = config
mock_io = Mock()
config_provider.io = mock_io
config_provider.validator.io = mock_io
config_provider._validate_platform_config()

message = mock_io.abort_with_error.call_args.args[0]
config_provider._validate_platform_config()

assert (
"Error in 'test-s3-bucket.environments.dev.data_migration': only the 'import_sources' property is required - 'import' is deprecated."
in message
mock_io.abort_with_error.assert_called_with(
"""Config validation has failed.\nError in 'test-s3-bucket.environments.dev.data_migration': only the 'import_sources' property is required - 'import' is deprecated."""
)


Expand Down Expand Up @@ -273,7 +267,7 @@ def test_validation_fails_if_invalid_default_version_keys_present(
),
)
def test_validation_fails_if_invalid_environment_version_override_keys_present(
invalid_key, fakefs, capsys, valid_platform_config
invalid_key, valid_platform_config
):
valid_platform_config["environments"]["*"]["versions"] = {invalid_key: "1.2.3"}
Path(PLATFORM_CONFIG_FILE).write_text(yaml.dump(valid_platform_config))
Expand All @@ -295,7 +289,7 @@ def test_validation_fails_if_invalid_environment_version_override_keys_present(
),
)
def test_validation_fails_if_invalid_pipeline_version_override_keys_present(
invalid_key, fakefs, capsys, valid_platform_config
invalid_key, valid_platform_config
):
valid_platform_config["environment_pipelines"]["test"]["versions"][invalid_key] = "1.2.3"
Path(PLATFORM_CONFIG_FILE).write_text(yaml.dump(valid_platform_config))
Expand All @@ -307,7 +301,7 @@ def test_validation_fails_if_invalid_pipeline_version_override_keys_present(
assert f"Wrong key '{invalid_key}'" in str(ex)


def test_load_and_validate_platform_config_fails_with_invalid_yaml(fakefs, capsys):
def test_load_and_validate_platform_config_fails_with_invalid_yaml(capsys):
"""Test that, given the path to an invalid yaml file,
load_and_validate_config aborts and prints an error."""

Expand All @@ -318,7 +312,7 @@ def test_load_and_validate_platform_config_fails_with_invalid_yaml(fakefs, capsy
assert f"{PLATFORM_CONFIG_FILE} is not valid YAML" in capsys.readouterr().err


def test_load_and_validate_platform_config_fails_with_missing_config_file(fakefs, capsys):
def test_load_and_validate_platform_config_fails_with_missing_config_file(capsys):
if Path(PLATFORM_CONFIG_FILE).exists():
os.remove(Path(PLATFORM_CONFIG_FILE))

Expand Down
Loading