From ee22f774214f432aa5efee9e00c1433e539afba4 Mon Sep 17 00:00:00 2001 From: Kai Schlamp Date: Sun, 23 Jun 2024 00:08:57 +0000 Subject: [PATCH 1/4] Switch from Celery to Procrastinate. Also run example project in containers now. --- Dockerfile | 77 +++++ TODO.md | 1 + .../common/management/base/celery_beat.py | 47 --- ...lery_worker.py => procrastinate_worker.py} | 36 +- adit_radis_shared/common/views.py | 13 - docker-compose.yml | 47 +++ example.env | 2 +- .../example_app/management/__init__.py | 0 .../management/commands/__init__.py | 0 .../example_app/management/commands/worker.py | 8 + .../example_project/example_app/tasks.py | 18 + .../templates/example_app/example_list.html | 3 + .../templates/example_app/example_task.html | 13 + .../templates/example_app/example_toasts.html | 1 - .../example_project/example_app/urls.py | 6 + .../example_project/example_app/views.py | 13 +- example_project/example_project/settings.py | 15 +- poetry.lock | 321 ++++-------------- pyproject.toml | 2 +- tasks.py | 23 ++ 20 files changed, 287 insertions(+), 359 deletions(-) create mode 100644 Dockerfile delete mode 100644 adit_radis_shared/common/management/base/celery_beat.py rename adit_radis_shared/common/management/base/{celery_worker.py => procrastinate_worker.py} (59%) create mode 100644 docker-compose.yml create mode 100644 example_project/example_project/example_app/management/__init__.py create mode 100644 example_project/example_project/example_app/management/commands/__init__.py create mode 100644 example_project/example_project/example_app/management/commands/worker.py create mode 100644 example_project/example_project/example_app/tasks.py create mode 100644 example_project/example_project/example_app/templates/example_app/example_task.html diff --git a/Dockerfile b/Dockerfile new file mode 100644 index 0000000..6dd05b5 --- /dev/null +++ b/Dockerfile @@ -0,0 +1,77 @@ +FROM python:3.12-bullseye as python-base + +# python +# ENV variables are also available in the later build stages +ENV PYTHONUNBUFFERED=1 \ + # prevents python creating .pyc files + PYTHONDONTWRITEBYTECODE=1 \ + \ + # pip + PIP_NO_CACHE_DIR=off \ + PIP_DISABLE_PIP_VERSION_CHECK=on \ + PIP_DEFAULT_TIMEOUT=100 \ + \ + # poetry + # https://python-poetry.org/docs/#installing-with-the-official-installer + # https://python-poetry.org/docs/configuration/#using-environment-variables + POETRY_VERSION=1.8.3 \ + # make poetry install to this location + POETRY_HOME="/opt/poetry" \ + # make poetry create the virtual environment in the project's root + # it gets named `.venv` + POETRY_VIRTUALENVS_IN_PROJECT=true \ + # do not ask any interactive question + POETRY_NO_INTERACTION=1 \ + \ + # paths + # this is where our requirements + virtual environment will live + PYSETUP_PATH="/opt/pysetup" \ + VENV_PATH="/opt/pysetup/.venv" \ + # needed for adit-radis-shared to be found + PYTHONPATH="/app" + +# prepend poetry and venv to path +ENV PATH="$POETRY_HOME/bin:$VENV_PATH/bin:$PATH" + +RUN apt-get update \ + && apt-get install --no-install-recommends -y \ + # deps for db management commands + postgresql-client + + +# `builder-base` stage is used to build deps + create our virtual environment +FROM python-base as builder-base +RUN apt-get update \ + && apt-get install --no-install-recommends -y \ + # deps for installing poetry + curl \ + # deps for building python deps + build-essential + +# install poetry - respects $POETRY_VERSION & $POETRY_HOME +RUN curl -sSL https://install.python-poetry.org | python3 - + +# copy project requirement files here to ensure they will be cached. +WORKDIR $PYSETUP_PATH +COPY poetry.lock pyproject.toml ./ + +# install runtime deps - uses $POETRY_VIRTUALENVS_IN_PROJECT internally +RUN poetry install --without dev + + +# `development` image is used during development / testing +FROM python-base as development +WORKDIR $PYSETUP_PATH + +# copy in our built poetry + venv +COPY --from=builder-base $POETRY_HOME $POETRY_HOME +COPY --from=builder-base $PYSETUP_PATH $PYSETUP_PATH + +# quicker install as runtime deps are already installed +RUN poetry install + +# Install requirements for end-to-end testing +RUN playwright install --with-deps chromium + +# will become mountpoint of our code +WORKDIR /app diff --git a/TODO.md b/TODO.md index ab2aca1..e6c9aea 100644 --- a/TODO.md +++ b/TODO.md @@ -20,3 +20,4 @@ - Use django-environ as in the docs (see shared) - Make package.json more minimal (as in shared) - Get rid of static vendor stuff (its in common app now) +- No need to do collectstatics in development container diff --git a/adit_radis_shared/common/management/base/celery_beat.py b/adit_radis_shared/common/management/base/celery_beat.py deleted file mode 100644 index 3a83ab6..0000000 --- a/adit_radis_shared/common/management/base/celery_beat.py +++ /dev/null @@ -1,47 +0,0 @@ -import logging -import shlex -import subprocess -from pathlib import Path -from typing import Literal - -from .server_command import ServerCommand - -logger = logging.getLogger(__name__) - - -class CeleryBeatCommand(ServerCommand): - project: Literal["adit", "radis"] - help = "Starts Celery beat scheduler" - server_name = "Celery beat scheduler" - - def __init__(self, *args, **kwargs): - self.beat_process = None - super().__init__(*args, **kwargs) - - def add_arguments(self, parser): - super().add_arguments(parser) - - # https://docs.celeryproject.org/en/stable/reference/cli.html - parser.add_argument( - "-l", - "--loglevel", - default="INFO", - help="Logging level.", - ) - - def run_server(self, **options): - folder_path = Path(f"/var/www/{self.project}/celery/") - folder_path.mkdir(parents=True, exist_ok=True) - schedule_path = folder_path / "celerybeat-schedule" - loglevel = options["loglevel"] - - # --pidfile= disables pidfile creation as we can control the process with subprocess - cmd = f"celery -A {self.project} beat -l {loglevel} -s {str(schedule_path)} --pidfile=" - - self.beat_process = subprocess.Popen(shlex.split(cmd)) - self.beat_process.wait() - - def on_shutdown(self): - assert self.beat_process - self.beat_process.terminate() - self.beat_process.wait() diff --git a/adit_radis_shared/common/management/base/celery_worker.py b/adit_radis_shared/common/management/base/procrastinate_worker.py similarity index 59% rename from adit_radis_shared/common/management/base/celery_worker.py rename to adit_radis_shared/common/management/base/procrastinate_worker.py index 1ef4eb3..0d750e3 100644 --- a/adit_radis_shared/common/management/base/celery_worker.py +++ b/adit_radis_shared/common/management/base/procrastinate_worker.py @@ -1,18 +1,15 @@ import logging import shlex -import socket import subprocess -from typing import Literal from .server_command import ServerCommand logger = logging.getLogger(__name__) -class CeleryWorkerCommand(ServerCommand): - project: Literal["adit", "radis"] - help = "Starts a Celery worker" - server_name = "Celery worker" +class ProcrastinateServerCommand(ServerCommand): + help = "Starts a Procrastinate worker" + server_name = "Procrastinate worker" worker_process: subprocess.Popen | None def __init__(self, *args, **kwargs): @@ -22,37 +19,38 @@ def __init__(self, *args, **kwargs): def add_arguments(self, parser): super().add_arguments(parser) - # https://docs.celeryproject.org/en/stable/reference/cli.html parser.add_argument( - "-Q", - "--queue", - required=True, - help="The celery queue.", + "-q", + "--queues", + help="Comma-separated names of the queues to listen to (empty string for all queues)", ) parser.add_argument( "-l", "--loglevel", - default="INFO", + default="warning", + choices=["warning", "info", "debug"], help="Logging level.", ) parser.add_argument( "-c", "--concurrency", type=int, - default=0, + default=1, help="Number of child processes processing the queue (defaults to number of CPUs).", ) def run_server(self, **options): - queue = options["queue"] - loglevel = options["loglevel"] - hostname = f"worker_{queue}_{socket.gethostname()}" + cmd = "./manage.py procrastinate" - cmd = f"celery -A {self.project} worker -Q {queue} -l {loglevel} -n {hostname}" + # https://procrastinate.readthedocs.io/en/stable/howto/basics/command_line.html + if options["loglevel"] == "debug": + cmd += " -v 1" + + cmd += " worker --delete-jobs=always" concurrency = options["concurrency"] - if concurrency >= 1: - cmd += f" -c {concurrency}" + if concurrency > 1: + cmd += f" --concurrency {concurrency}" self.worker_process = subprocess.Popen(shlex.split(cmd)) self.worker_process.wait() diff --git a/adit_radis_shared/common/views.py b/adit_radis_shared/common/views.py index 0735af2..e00f42a 100644 --- a/adit_radis_shared/common/views.py +++ b/adit_radis_shared/common/views.py @@ -1,6 +1,5 @@ from typing import Any, Callable -from django.conf import settings from django.contrib import messages from django.contrib.auth.mixins import ( LoginRequiredMixin, @@ -109,15 +108,3 @@ def test_func(self): @classmethod def as_url(cls): return re_path(rf"^{cls.url_prefix}/(?P.*)$", cls.as_view()) # type: ignore - - -class FlowerProxyView(AdminProxyView): - upstream = f"http://{settings.FLOWER_HOST}:{settings.FLOWER_PORT}" # type: ignore - url_prefix = "flower" - rewrite = ((rf"^/{url_prefix}$", rf"/{url_prefix}/"),) - - @classmethod - def as_url(cls): - # Flower needs a bit different setup then the other proxy views as flower - # uses a prefix itself (see docker compose service) - return re_path(rf"^(?P{cls.url_prefix}.*)$", cls.as_view()) diff --git a/docker-compose.yml b/docker-compose.yml new file mode 100644 index 0000000..8fbbdf6 --- /dev/null +++ b/docker-compose.yml @@ -0,0 +1,47 @@ +x-app: &default-app + image: adit-radis-shared + volumes: + - .:/app + depends_on: + - postgres + env_file: + - .env + environment: + USE_DOCKER: 1 + DATABASE_URL: "psql://postgres:postgres@postgres.local:5432/postgres" + pull_policy: never + +services: + web: + <<: *default-app + hostname: web.local + build: + target: development + ports: + - 8000:8000 + command: > + bash -c " + cd example_project; + wait-for-it -s postgres.local:5432 -t 60 && + ./manage.py migrate && + ./manage.py populate_users_and_groups --users 20 --groups 3 && + ./manage.py runserver 0.0.0.0:8000 + " + + worker: + <<: *default-app + hostname: worker.local + command: > + bash -c " + cd example_project; + wait-for-it -s postgres.local:5432 -t 60 && + ./manage.py worker -l debug --autoreload + " + + postgres: + image: postgres:latest + hostname: postgres.local + ports: + - 5432:5432 + environment: + POSTGRES_PASSWORD: postgres diff --git a/example.env b/example.env index 0cf2f4f..c3d14c7 100644 --- a/example.env +++ b/example.env @@ -15,7 +15,7 @@ DJANGO_ALLOWED_HOSTS=localhost,127.0.0.1 DJANGO_CSRF_TRUSTED_ORIGINS= DJANGO_EMAIL_HOST=smtp.email.test DJANGO_INTERNAL_IPS=127.0.0.1 -DJANGO_SECRET_KEY="xxr84$n*8ddo5c#fw=2(%ef1*ueq4s*#qvbeey25c0xrvhtfml" +DJANGO_SECRET_KEY="2m^wu_)x-&r%w13h0())g!-t*b^x8**c7=ped7d9*t8w(b#gu4" DJANGO_SERVER_EMAIL=adit.support@example.org FORCE_DEBUG_TOOLBAR=false POSTGRES_PASSWORD=mysecret diff --git a/example_project/example_project/example_app/management/__init__.py b/example_project/example_project/example_app/management/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/example_project/example_project/example_app/management/commands/__init__.py b/example_project/example_project/example_app/management/commands/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/example_project/example_project/example_app/management/commands/worker.py b/example_project/example_project/example_app/management/commands/worker.py new file mode 100644 index 0000000..66fac61 --- /dev/null +++ b/example_project/example_project/example_app/management/commands/worker.py @@ -0,0 +1,8 @@ +from adit_radis_shared.common.management.base.procrastinate_worker import ProcrastinateServerCommand + + +class Command(ProcrastinateServerCommand): + paths_to_watch = [ + "./example_project", + "../adit_radis_shared", + ] diff --git a/example_project/example_project/example_app/tasks.py b/example_project/example_project/example_app/tasks.py new file mode 100644 index 0000000..a8c89a7 --- /dev/null +++ b/example_project/example_project/example_app/tasks.py @@ -0,0 +1,18 @@ +import procrastinate +from procrastinate.contrib.django import app + + +@app.task(pass_context=True) +def example_task(context: procrastinate.JobContext): + job = context.job + assert job + job_id = job.id + assert job_id is not None + print(f"Hello from job {job_id}") + + +# scheduled every 5 minutes +@app.periodic(cron="*/1 * * * *") +@app.task +def periodic_example_task(timestamp: int): + print(f"A periodic hello at {timestamp}!") diff --git a/example_project/example_project/example_app/templates/example_app/example_list.html b/example_project/example_project/example_app/templates/example_app/example_list.html index 8085d2a..4d2a3af 100644 --- a/example_project/example_project/example_app/templates/example_app/example_list.html +++ b/example_project/example_project/example_app/templates/example_app/example_list.html @@ -16,5 +16,8 @@

Examples

  • Async class based view
  • +
  • + Example task +
  • {% endblock content %} diff --git a/example_project/example_project/example_app/templates/example_app/example_task.html b/example_project/example_project/example_app/templates/example_app/example_task.html new file mode 100644 index 0000000..1388068 --- /dev/null +++ b/example_project/example_project/example_app/templates/example_app/example_task.html @@ -0,0 +1,13 @@ +{% extends "example_app/example_app_layout.html" %} +{% block title %} + Task Example +{% endblock title %} +{% block heading %} +

    Task Example

    +{% endblock heading %} +{% block content %} +
    + {% csrf_token %} + +
    +{% endblock content %} diff --git a/example_project/example_project/example_app/templates/example_app/example_toasts.html b/example_project/example_project/example_app/templates/example_app/example_toasts.html index dd1215a..d984e58 100644 --- a/example_project/example_project/example_app/templates/example_app/example_toasts.html +++ b/example_project/example_project/example_app/templates/example_app/example_toasts.html @@ -1,5 +1,4 @@ {% extends "example_app/example_app_layout.html" %} -{% load static from static %} {% block title %} Toast Examples {% endblock title %} diff --git a/example_project/example_project/example_app/urls.py b/example_project/example_project/example_app/urls.py index 705a6b8..874c6b5 100644 --- a/example_project/example_project/example_app/urls.py +++ b/example_project/example_project/example_app/urls.py @@ -6,6 +6,7 @@ HomeView, admin_section, example_messages, + example_task_view, example_toasts, ) @@ -32,4 +33,9 @@ AsyncExampleClassView.as_view(), name="example_async_class_view", ), + path( + "example-task/", + example_task_view, + name="example_task", + ), ] diff --git a/example_project/example_project/example_app/views.py b/example_project/example_project/example_app/views.py index 6a8bec8..12ddfb8 100644 --- a/example_project/example_project/example_app/views.py +++ b/example_project/example_project/example_app/views.py @@ -5,13 +5,15 @@ from django.contrib.auth.decorators import login_required from django.core.exceptions import PermissionDenied from django.http import HttpRequest, HttpResponse -from django.shortcuts import render +from django.shortcuts import redirect, render from django.views import View from django.views.generic import TemplateView from adit_radis_shared.accounts.models import User from adit_radis_shared.common.views import BaseHomeView +from .tasks import example_task + class HomeView(BaseHomeView): template_name = "example_app/home.html" @@ -42,6 +44,15 @@ def example_messages(request: HttpRequest) -> HttpResponse: return render(request, "example_app/example_messages.html", {}) +def example_task_view(request: HttpRequest) -> HttpResponse: + if request.method == "POST": + job_id = example_task.defer() + messages.info(request, f"Job started with ID {job_id}!") + return redirect("example_task") + + return render(request, "example_app/example_task.html", {}) + + # Cave, LoginRequiredMixin won't work with async views! One has to implement it himself. class AsyncExampleClassView(View): async def get(self, request: HttpRequest) -> HttpResponse: diff --git a/example_project/example_project/settings.py b/example_project/example_project/settings.py index f5a0aea..9f0dcb9 100644 --- a/example_project/example_project/settings.py +++ b/example_project/example_project/settings.py @@ -80,6 +80,7 @@ "django.contrib.staticfiles", "django.contrib.humanize", "django_extensions", + "procrastinate.contrib.django", "loginas", "crispy_forms", "crispy_bootstrap5", @@ -135,13 +136,7 @@ # Database # https://docs.djangoproject.com/en/5.0/ref/settings/#databases - -DATABASES = { - "default": { - "ENGINE": "django.db.backends.sqlite3", - "NAME": BASE_DIR / "db.sqlite3", - } -} +DATABASES = {"default": env.db()} # Password validation @@ -237,9 +232,3 @@ # The salt that is used for hashing new tokens in the token authentication app. # Cave, changing the salt after some tokens were already generated makes them all invalid! TOKEN_AUTHENTICATION_SALT = env("TOKEN_AUTHENTICATION_SALT") - -# We need to define a dummy host and port for the Flower server as we setup a reverse proxy -# to access Flower in ADIT and RADIS behind the Django authentication. But we don't use -# Flower in our example project (as we don't have the Celery stuff in it). -FLOWER_HOST = "localhost" -FLOWER_PORT = 5555 diff --git a/poetry.lock b/poetry.lock index 90c6c1e..4fe1bb2 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,18 +1,4 @@ -# This file is automatically @generated by Poetry 1.8.2 and should not be changed by hand. - -[[package]] -name = "amqp" -version = "5.2.0" -description = "Low-level AMQP client for Python (fork of amqplib)." -optional = false -python-versions = ">=3.6" -files = [ - {file = "amqp-5.2.0-py3-none-any.whl", hash = "sha256:827cb12fb0baa892aad844fd95258143bce4027fdac4fccddbc43330fd281637"}, - {file = "amqp-5.2.0.tar.gz", hash = "sha256:a1ecff425ad063ad42a486c902807d1482311481c8ad95a72694b2975e75f7fd"}, -] - -[package.dependencies] -vine = ">=5.0.0,<6.0.0" +# This file is automatically @generated by Poetry 1.8.3 and should not be changed by hand. [[package]] name = "anyio" @@ -142,73 +128,6 @@ six = "*" [package.extras] visualize = ["Twisted (>=16.1.1)", "graphviz (>0.5.1)"] -[[package]] -name = "billiard" -version = "4.2.0" -description = "Python multiprocessing fork with improvements and bugfixes" -optional = false -python-versions = ">=3.7" -files = [ - {file = "billiard-4.2.0-py3-none-any.whl", hash = "sha256:07aa978b308f334ff8282bd4a746e681b3513db5c9a514cbdd810cbbdc19714d"}, - {file = "billiard-4.2.0.tar.gz", hash = "sha256:9a3c3184cb275aa17a732f93f65b20c525d3d9f253722d26a82194803ade5a2c"}, -] - -[[package]] -name = "celery" -version = "5.4.0" -description = "Distributed Task Queue." -optional = false -python-versions = ">=3.8" -files = [ - {file = "celery-5.4.0-py3-none-any.whl", hash = "sha256:369631eb580cf8c51a82721ec538684994f8277637edde2dfc0dacd73ed97f64"}, - {file = "celery-5.4.0.tar.gz", hash = "sha256:504a19140e8d3029d5acad88330c541d4c3f64c789d85f94756762d8bca7e706"}, -] - -[package.dependencies] -billiard = ">=4.2.0,<5.0" -click = ">=8.1.2,<9.0" -click-didyoumean = ">=0.3.0" -click-plugins = ">=1.1.1" -click-repl = ">=0.2.0" -kombu = ">=5.3.4,<6.0" -python-dateutil = ">=2.8.2" -tzdata = ">=2022.7" -vine = ">=5.1.0,<6.0" - -[package.extras] -arangodb = ["pyArango (>=2.0.2)"] -auth = ["cryptography (==42.0.5)"] -azureblockblob = ["azure-storage-blob (>=12.15.0)"] -brotli = ["brotli (>=1.0.0)", "brotlipy (>=0.7.0)"] -cassandra = ["cassandra-driver (>=3.25.0,<4)"] -consul = ["python-consul2 (==0.1.5)"] -cosmosdbsql = ["pydocumentdb (==2.3.5)"] -couchbase = ["couchbase (>=3.0.0)"] -couchdb = ["pycouchdb (==1.14.2)"] -django = ["Django (>=2.2.28)"] -dynamodb = ["boto3 (>=1.26.143)"] -elasticsearch = ["elastic-transport (<=8.13.0)", "elasticsearch (<=8.13.0)"] -eventlet = ["eventlet (>=0.32.0)"] -gcs = ["google-cloud-storage (>=2.10.0)"] -gevent = ["gevent (>=1.5.0)"] -librabbitmq = ["librabbitmq (>=2.0.0)"] -memcache = ["pylibmc (==1.6.3)"] -mongodb = ["pymongo[srv] (>=4.0.2)"] -msgpack = ["msgpack (==1.0.8)"] -pymemcache = ["python-memcached (>=1.61)"] -pyro = ["pyro4 (==4.82)"] -pytest = ["pytest-celery[all] (>=1.0.0)"] -redis = ["redis (>=4.5.2,!=4.5.5,<6.0.0)"] -s3 = ["boto3 (>=1.26.143)"] -slmq = ["softlayer-messaging (>=1.0.3)"] -solar = ["ephem (==4.1.5)"] -sqlalchemy = ["sqlalchemy (>=1.4.48,<2.1)"] -sqs = ["boto3 (>=1.26.143)", "kombu[sqs] (>=5.3.4)", "pycurl (>=7.43.0.5)", "urllib3 (>=1.26.16)"] -tblib = ["tblib (>=1.3.0)", "tblib (>=1.5.0)"] -yaml = ["PyYAML (>=3.10)"] -zookeeper = ["kazoo (>=1.3.1)"] -zstd = ["zstandard (==0.22.0)"] - [[package]] name = "certifi" version = "2024.6.2" @@ -416,55 +335,6 @@ files = [ [package.dependencies] colorama = {version = "*", markers = "platform_system == \"Windows\""} -[[package]] -name = "click-didyoumean" -version = "0.3.1" -description = "Enables git-like *did-you-mean* feature in click" -optional = false -python-versions = ">=3.6.2" -files = [ - {file = "click_didyoumean-0.3.1-py3-none-any.whl", hash = "sha256:5c4bb6007cfea5f2fd6583a2fb6701a22a41eb98957e63d0fac41c10e7c3117c"}, - {file = "click_didyoumean-0.3.1.tar.gz", hash = "sha256:4f82fdff0dbe64ef8ab2279bd6aa3f6a99c3b28c05aa09cbfc07c9d7fbb5a463"}, -] - -[package.dependencies] -click = ">=7" - -[[package]] -name = "click-plugins" -version = "1.1.1" -description = "An extension module for click to enable registering CLI commands via setuptools entry-points." -optional = false -python-versions = "*" -files = [ - {file = "click-plugins-1.1.1.tar.gz", hash = "sha256:46ab999744a9d831159c3411bb0c79346d94a444df9a3a3742e9ed63645f264b"}, - {file = "click_plugins-1.1.1-py2.py3-none-any.whl", hash = "sha256:5d262006d3222f5057fd81e1623d4443e41dcda5dc815c06b442aa3c02889fc8"}, -] - -[package.dependencies] -click = ">=4.0" - -[package.extras] -dev = ["coveralls", "pytest (>=3.6)", "pytest-cov", "wheel"] - -[[package]] -name = "click-repl" -version = "0.3.0" -description = "REPL plugin for Click" -optional = false -python-versions = ">=3.6" -files = [ - {file = "click-repl-0.3.0.tar.gz", hash = "sha256:17849c23dba3d667247dc4defe1757fff98694e90fe37474f3feebb69ced26a9"}, - {file = "click_repl-0.3.0-py3-none-any.whl", hash = "sha256:fb7e06deb8da8de86180a33a9da97ac316751c094c6899382da7feeeeb51b812"}, -] - -[package.dependencies] -click = ">=7.0" -prompt-toolkit = ">=3.0.36" - -[package.extras] -testing = ["pytest (>=7.2.1)", "pytest-cov (>=4.0.0)", "tox (>=4.4.3)"] - [[package]] name = "colorama" version = "0.4.6" @@ -586,6 +456,21 @@ django-crispy-forms = ">=2" [package.extras] test = ["pytest", "pytest-django"] +[[package]] +name = "croniter" +version = "2.0.5" +description = "croniter provides iteration for datetime object with cron like format" +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,>=2.6" +files = [ + {file = "croniter-2.0.5-py2.py3-none-any.whl", hash = "sha256:fdbb44920944045cc323db54599b321325141d82d14fa7453bc0699826bbe9ed"}, + {file = "croniter-2.0.5.tar.gz", hash = "sha256:f1f8ca0af64212fbe99b1bee125ee5a1b53a9c1b433968d8bca8817b79d237f3"}, +] + +[package.dependencies] +python-dateutil = "*" +pytz = ">2021.1" + [[package]] name = "cryptography" version = "42.0.7" @@ -986,28 +871,6 @@ PyYAML = ">=6.0,<7.0" regex = ">=2023.0.0,<2024.0.0" tqdm = ">=4.62.2,<5.0.0" -[[package]] -name = "docker" -version = "7.1.0" -description = "A Python library for the Docker Engine API." -optional = false -python-versions = ">=3.8" -files = [ - {file = "docker-7.1.0-py3-none-any.whl", hash = "sha256:c96b93b7f0a746f9e77d325bcfb87422a3d8bd4f03136ae8a85b37f1898d5fc0"}, - {file = "docker-7.1.0.tar.gz", hash = "sha256:ad8c70e6e3f8926cb8a92619b832b4ea5299e2831c14284663184e200546fa6c"}, -] - -[package.dependencies] -pywin32 = {version = ">=304", markers = "sys_platform == \"win32\""} -requests = ">=2.26.0" -urllib3 = ">=1.26.0" - -[package.extras] -dev = ["coverage (==7.2.7)", "pytest (==7.4.2)", "pytest-cov (==4.1.0)", "pytest-timeout (==2.1.0)", "ruff (==0.1.8)"] -docs = ["myst-parser (==0.18.0)", "sphinx (==5.1.1)"] -ssh = ["paramiko (>=2.4.3)"] -websockets = ["websocket-client (>=1.3.0)"] - [[package]] name = "docopt" version = "0.6.2" @@ -1422,38 +1285,6 @@ traitlets = ">=5.3" docs = ["myst-parser", "pydata-sphinx-theme", "sphinx-autodoc-typehints", "sphinxcontrib-github-alt", "sphinxcontrib-spelling", "traitlets"] test = ["ipykernel", "pre-commit", "pytest (<8)", "pytest-cov", "pytest-timeout"] -[[package]] -name = "kombu" -version = "5.3.7" -description = "Messaging library for Python." -optional = false -python-versions = ">=3.8" -files = [ - {file = "kombu-5.3.7-py3-none-any.whl", hash = "sha256:5634c511926309c7f9789f1433e9ed402616b56836ef9878f01bd59267b4c7a9"}, - {file = "kombu-5.3.7.tar.gz", hash = "sha256:011c4cd9a355c14a1de8d35d257314a1d2456d52b7140388561acac3cf1a97bf"}, -] - -[package.dependencies] -amqp = ">=5.1.1,<6.0.0" -vine = "*" - -[package.extras] -azureservicebus = ["azure-servicebus (>=7.10.0)"] -azurestoragequeues = ["azure-identity (>=1.12.0)", "azure-storage-queue (>=12.6.0)"] -confluentkafka = ["confluent-kafka (>=2.2.0)"] -consul = ["python-consul2"] -librabbitmq = ["librabbitmq (>=2.0.0)"] -mongodb = ["pymongo (>=4.1.1)"] -msgpack = ["msgpack"] -pyro = ["pyro4"] -qpid = ["qpid-python (>=0.26)", "qpid-tools (>=0.26)"] -redis = ["redis (>=4.5.2,!=4.5.5,!=5.0.2)"] -slmq = ["softlayer-messaging (>=1.0.3)"] -sqlalchemy = ["sqlalchemy (>=1.4.48,<2.1)"] -sqs = ["boto3 (>=1.26.143)", "pycurl (>=7.43.0.5)", "urllib3 (>=1.26.16)"] -yaml = ["PyYAML (>=3.10)"] -zookeeper = ["kazoo (>=2.8.0)"] - [[package]] name = "matplotlib-inline" version = "0.1.7" @@ -1603,6 +1434,32 @@ files = [ {file = "priority-1.3.0.tar.gz", hash = "sha256:6bc1961a6d7fcacbfc337769f1a382c8e746566aaa365e78047abe9f66b2ffbe"}, ] +[[package]] +name = "procrastinate" +version = "2.6.0" +description = "Postgres-based distributed task processing library" +optional = false +python-versions = "<4.0,>=3.8" +files = [ + {file = "procrastinate-2.6.0-py3-none-any.whl", hash = "sha256:074e47b2b402dfa2261a8dc0560cd790bc50ce895edf56740b66d1f892a4f226"}, + {file = "procrastinate-2.6.0.tar.gz", hash = "sha256:461674d280bc17d4c8a04812f0b70723c8169240f98853b3199fb6856ca5c024"}, +] + +[package.dependencies] +anyio = "*" +asgiref = "*" +attrs = "*" +croniter = "*" +django = {version = ">=2.2", optional = true, markers = "extra == \"django\""} +psycopg = {version = ">=3.1.13,<4.0.0", extras = ["pool"]} +python-dateutil = "*" + +[package.extras] +aiopg = ["aiopg", "psycopg2-binary"] +django = ["django (>=2.2)"] +psycopg2 = ["psycopg2-binary"] +sqlalchemy = ["sqlalchemy (>=2.0,<3.0)"] + [[package]] name = "prompt-toolkit" version = "3.0.45" @@ -1658,6 +1515,7 @@ files = [ [package.dependencies] psycopg-binary = {version = "3.1.19", optional = true, markers = "implementation_name != \"pypy\" and extra == \"binary\""} +psycopg-pool = {version = "*", optional = true, markers = "extra == \"pool\""} typing-extensions = ">=4.1" tzdata = {version = "*", markers = "sys_platform == \"win32\""} @@ -1741,6 +1599,20 @@ files = [ {file = "psycopg_binary-3.1.19-cp39-cp39-win_amd64.whl", hash = "sha256:76fcd33342f38e35cd6b5408f1bc117d55ab8b16e5019d99b6d3ce0356c51717"}, ] +[[package]] +name = "psycopg-pool" +version = "3.2.2" +description = "Connection Pool for Psycopg" +optional = false +python-versions = ">=3.8" +files = [ + {file = "psycopg_pool-3.2.2-py3-none-any.whl", hash = "sha256:273081d0fbfaced4f35e69200c89cb8fbddfe277c38cc86c235b90a2ec2c8153"}, + {file = "psycopg_pool-3.2.2.tar.gz", hash = "sha256:9e22c370045f6d7f2666a5ad1b0caf345f9f1912195b0b25d0d3bcc4f3a7389c"}, +] + +[package.dependencies] +typing-extensions = ">=4.4" + [[package]] name = "ptyprocess" version = "0.7.0" @@ -1766,17 +1638,6 @@ files = [ [package.extras] tests = ["pytest"] -[[package]] -name = "py" -version = "1.11.0" -description = "library with cross-python path, ini-parsing, io, code, log facilities" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" -files = [ - {file = "py-1.11.0-py2.py3-none-any.whl", hash = "sha256:607c53218732647dff4acdfcd50cb62615cedf612e72d1724fb1a0cc6405b378"}, - {file = "py-1.11.0.tar.gz", hash = "sha256:51c75c4126074b472f746a24399ad32f6053d1b34b68d2fa41e558e6f4a98719"}, -] - [[package]] name = "pyasn1" version = "0.6.0" @@ -1936,31 +1797,6 @@ requests = ">=2.9" [package.extras] test = ["black (>=22.1.0)", "flake8 (>=4.0.1)", "pre-commit (>=2.17.0)", "pytest-localserver (>=0.7.1)", "tox (>=3.24.5)"] -[[package]] -name = "pytest-celery" -version = "1.0.0" -description = "Pytest plugin for Celery" -optional = false -python-versions = "<4.0,>=3.8" -files = [ - {file = "pytest_celery-1.0.0-py3-none-any.whl", hash = "sha256:c10bc7d16daa3ae4a5784efcbd1855d610c0e087c21d185e52fa018b3a6c4249"}, - {file = "pytest_celery-1.0.0.tar.gz", hash = "sha256:17a066b1554d4fa8797d4928e8b8cda1bfb441dae4688ca29fdbde28ffa49ff7"}, -] - -[package.dependencies] -celery = "*" -debugpy = ">=1.8.1,<2.0.0" -docker = ">=7.0.0,<8.0.0" -psutil = ">=5.9.7" -pytest-docker-tools = ">=3.1.3" -retry = ">=0.9.2" -setuptools = ">=69.1.0" - -[package.extras] -all = ["python-memcached", "redis"] -memcached = ["python-memcached"] -redis = ["redis"] - [[package]] name = "pytest-cov" version = "5.0.0" @@ -1997,21 +1833,6 @@ pytest = ">=7.0.0" docs = ["sphinx", "sphinx-rtd-theme"] testing = ["Django", "django-configurations (>=2.0)"] -[[package]] -name = "pytest-docker-tools" -version = "3.1.3" -description = "Docker integration tests for pytest" -optional = false -python-versions = ">=3.7.0,<4.0.0" -files = [ - {file = "pytest_docker_tools-3.1.3-py3-none-any.whl", hash = "sha256:63e659043160f41d89f94ea42616102594bcc85682aac394fcbc14f14cd1b189"}, - {file = "pytest_docker_tools-3.1.3.tar.gz", hash = "sha256:c7e28841839d67b3ac80ad7b345b953701d5ae61ffda97586114244292aeacc0"}, -] - -[package.dependencies] -docker = ">=4.3.1" -pytest = ">=6.0.1" - [[package]] name = "pytest-mock" version = "3.14.0" @@ -2481,21 +2302,6 @@ urllib3 = ">=1.21.1,<3" socks = ["PySocks (>=1.5.6,!=1.5.7)"] use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] -[[package]] -name = "retry" -version = "0.9.2" -description = "Easy to use retry decorator." -optional = false -python-versions = "*" -files = [ - {file = "retry-0.9.2-py2.py3-none-any.whl", hash = "sha256:ccddf89761fa2c726ab29391837d4327f819ea14d244c232a1d24c67a2f98606"}, - {file = "retry-0.9.2.tar.gz", hash = "sha256:f8bfa8b99b69c4506d6f5bd3b0aabf77f98cdb17f3c9fc3f5ca820033336fba4"}, -] - -[package.dependencies] -decorator = ">=3.4.2" -py = ">=1.4.26,<2.0.0" - [[package]] name = "ruff" version = "0.4.7" @@ -2908,17 +2714,6 @@ files = [ {file = "vermin-1.6.0.tar.gz", hash = "sha256:6266ca02f55d1c2aa189a610017c132eb2d1934f09e72a955b1eb3820ee6d4ef"}, ] -[[package]] -name = "vine" -version = "5.1.0" -description = "Python promises." -optional = false -python-versions = ">=3.6" -files = [ - {file = "vine-5.1.0-py3-none-any.whl", hash = "sha256:40fdf3c48b2cfe1c38a49e9ae2da6fda88e4794c810050a728bd7413811fb1dc"}, - {file = "vine-5.1.0.tar.gz", hash = "sha256:8b62e981d35c41049211cf62a0a1242d8c1ee9bd15bb196ce38aefd6799e61e0"}, -] - [[package]] name = "wait-for-it" version = "2.2.2" @@ -3148,4 +2943,4 @@ testing = ["coverage (>=5.0.3)", "zope.event", "zope.testing"] [metadata] lock-version = "2.0" python-versions = "^3.11" -content-hash = "556fdbdc11d158639e6df5f55dc5c0328568957154724f33dadab66954d3c875" +content-hash = "8ce71edb4c16d8d3588bd27c60b5642b976855ce23d6b53f931bfa865b661e3f" diff --git a/pyproject.toml b/pyproject.toml index 7c3a9cb..e2cad48 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -22,6 +22,7 @@ django-registration-redux = "^2.10" django-revproxy = "^0.12.0" django-tables2 = "^2.3.3" djangorestframework = "^3.13.1" +procrastinate = { extras = ["django"], version = "^2.6.0" } psycopg = { extras = ["binary"], version = "^3.1.12" } python = "^3.11" pytz = "^2024.1" @@ -47,7 +48,6 @@ ipython = "^8.1.1" pyright = "^1.1.351" pytest = "^8.1.1" pytest-asyncio = "^0.23.6" -pytest-celery = "^1.0.0" pytest-cov = "^5.0.0" pytest-django = "^4.5.2" pytest-mock = "^3.10.0" diff --git a/tasks.py b/tasks.py index 458f8a8..bbfb0b9 100644 --- a/tasks.py +++ b/tasks.py @@ -14,6 +14,29 @@ manage_cmd = (project_dir / "example_project" / "manage.py").as_posix() +@task +def compose_up( + ctx: Context, + no_build: bool = False, +): + """Start example project containers""" + build_opt = "--no-build" if no_build else "--build" + cmd = f"docker compose up {build_opt} --detach" + ctx.run(cmd, pty=True) + + +@task +def compose_down( + ctx: Context, + cleanup: bool = False, +): + """Stop example project containers""" + cmd = "docker compose down" + if cleanup: + cmd += " --remove-orphans --volumes" + ctx.run(cmd, pty=True) + + @task def startdev(ctx: Context): migrate(ctx) From c816f82a8316538c8bf0c34533efb64fb5621542 Mon Sep 17 00:00:00 2001 From: Kai Schlamp Date: Sat, 29 Jun 2024 14:33:13 +0000 Subject: [PATCH 2/4] Use multi compose and settings files for example project (similar to ADIT and RADIS itself) --- .vscode/settings.json | 2 +- Dockerfile | 18 + adit_radis_shared/conftest.py | 49 ++- adit_radis_shared/invoke_tasks.py | 328 ++++++++++++++++++ compose/docker-compose.base.yml | 40 +++ .../docker-compose.dev.yml | 26 +- compose/docker-compose.prod.yml | 66 ++++ example.env | 2 +- example_project/example_project/asgi.py | 2 +- .../example_project/settings/__init__.py | 0 .../{settings.py => settings/base.py} | 60 +--- .../example_project/settings/development.py | 43 +++ .../example_project/settings/production.py | 21 ++ .../example_project/settings/test.py | 10 + example_project/example_project/wsgi.py | 2 +- example_project/manage.py | 2 +- pyproject.toml | 2 +- tasks.py | 225 +++--------- 18 files changed, 652 insertions(+), 246 deletions(-) create mode 100644 adit_radis_shared/invoke_tasks.py create mode 100644 compose/docker-compose.base.yml rename docker-compose.yml => compose/docker-compose.dev.yml (67%) create mode 100644 compose/docker-compose.prod.yml create mode 100644 example_project/example_project/settings/__init__.py rename example_project/example_project/{settings.py => settings/base.py} (78%) create mode 100644 example_project/example_project/settings/development.py create mode 100644 example_project/example_project/settings/production.py create mode 100644 example_project/example_project/settings/test.py diff --git a/.vscode/settings.json b/.vscode/settings.json index 59b1f12..2d4ed6c 100644 --- a/.vscode/settings.json +++ b/.vscode/settings.json @@ -27,7 +27,7 @@ "editor.defaultFormatter": "esbenp.prettier-vscode" }, "[dockercompose]": { - "editor.defaultFormatter": "ms-azuretools.vscode-docker" + "editor.defaultFormatter": "esbenp.prettier-vscode" }, "[dockerfile]": { "editor.defaultFormatter": "ms-azuretools.vscode-docker" diff --git a/Dockerfile b/Dockerfile index 6dd05b5..d68b57b 100644 --- a/Dockerfile +++ b/Dockerfile @@ -73,5 +73,23 @@ RUN poetry install # Install requirements for end-to-end testing RUN playwright install --with-deps chromium +# Required folders for web service +RUN mkdir -p /var/www/web/logs \ + /var/www/web/static \ + /var/www/web/ssl + # will become mountpoint of our code WORKDIR /app + + +# `production` image used for runtime +FROM python-base as production +COPY --from=builder-base $PYSETUP_PATH $PYSETUP_PATH +COPY . /app/ + +# Required folders for web service +RUN mkdir -p /var/www/web/logs \ + /var/www/web/static \ + /var/www/web/ssl + +WORKDIR /app diff --git a/adit_radis_shared/conftest.py b/adit_radis_shared/conftest.py index 186de58..bbeb19e 100644 --- a/adit_radis_shared/conftest.py +++ b/adit_radis_shared/conftest.py @@ -1,7 +1,9 @@ import time -from typing import Callable +from typing import Callable, Generator import pytest +from django.db import connection +from django_test_migrations.migrator import Migrator from playwright.sync_api import Locator, Page, Response from adit_radis_shared.accounts.factories import UserFactory @@ -61,3 +63,48 @@ def _create_and_login_user(server_url: str): return user return _create_and_login_user + + +@pytest.fixture +def migrator(migrator: Migrator) -> Generator[Migrator, None, None]: + yield migrator + + # We have to manually cleanup the Procrastinate tables, functions and types + # as otherwise the reset of django_test_migrations will fail + # See https://github.com/procrastinate-org/procrastinate/issues/1090 + with connection.cursor() as cursor: + cursor.execute(""" + DO $$ + DECLARE + prefix text := 'procrastinate'; + BEGIN + -- Drop tables + EXECUTE ( + SELECT string_agg('DROP TABLE IF EXISTS ' || quote_ident(tablename) + || ' CASCADE;', ' ') + FROM pg_tables + WHERE tablename LIKE prefix || '%' + ); + + -- Drop functions + EXECUTE ( + SELECT string_agg( + 'DROP FUNCTION IF EXISTS ' || quote_ident(n.nspname) || '.' + || quote_ident(p.proname) || '(' + || pg_catalog.pg_get_function_identity_arguments(p.oid) || ') CASCADE;', + ' ' + ) + FROM pg_proc p + LEFT JOIN pg_namespace n ON n.oid = p.pronamespace + WHERE p.proname LIKE prefix || '%' + ); + + -- Drop types + EXECUTE ( + SELECT string_agg('DROP TYPE IF EXISTS ' || quote_ident(typname) + || ' CASCADE;', ' ') + FROM pg_type + WHERE typname LIKE prefix || '%' + ); + END $$; + """) diff --git a/adit_radis_shared/invoke_tasks.py b/adit_radis_shared/invoke_tasks.py new file mode 100644 index 0000000..80c9ced --- /dev/null +++ b/adit_radis_shared/invoke_tasks.py @@ -0,0 +1,328 @@ +import os +import shutil +import sys +from pathlib import Path +from typing import Literal + +from dotenv import set_key +from invoke.context import Context + +Environments = Literal["dev", "prod"] + + +class InvokeTasks: + def __init__(self, project_name: str, project_dir: Path): + self._project_name = project_name + self._project_dir = project_dir + self._compose_dir = project_dir / "compose" + + def _get_compose_base_file(self): + return self._compose_dir / "docker-compose.base.yml" + + def _get_compose_env_file(self, env: Environments): + if env == "dev": + return self._compose_dir / "docker-compose.dev.yml" + elif env == "prod": + return self._compose_dir / "docker-compose.prod.yml" + else: + raise ValueError(f"Unknown environment: {env}") + + def _get_stack_name(self, env: Environments): + if env == "dev": + return f"{self._project_name}_dev" + elif env == "prod": + return f"{self._project_name}_prod" + else: + raise ValueError(f"Unknown environment: {env}") + + def _build_compose_cmd(self, env: Environments, profile: str | None = None): + cmd = "docker compose" + cmd += f" -f {self._get_compose_base_file()}" + cmd += f" -f {self._get_compose_env_file(env)}" + cmd += f" -p {self._get_stack_name(env)}" + if profile: + cmd += f" --profile {profile}" + return cmd + + def _check_compose_up(self, ctx: Context, env: Environments): + stack_name = self._get_stack_name(env) + result = ctx.run("docker compose ls", hide=True, warn=True) + assert result and result.ok + for line in result.stdout.splitlines(): + if line.startswith(stack_name) and line.find("running") != -1: + return True + return False + + def _find_running_container_id(self, ctx: Context, env: Environments, name: str): + stack_name = self._get_stack_name(env) + sep = "-" if env == "dev" else "_" + cmd = f"docker ps -q -f name={stack_name}{sep}{name} -f status=running" + cmd += " | head -n1" + result = ctx.run(cmd, hide=True, warn=True) + if result and result.ok: + container_id = result.stdout.strip() + if container_id: + return container_id + return None + + def _confirm(self, question: str) -> bool: + valid = {"yes": True, "y": True, "ye": True, "no": False, "n": False} + while True: + sys.stdout.write(f"{question} [y/N] ") + choice = input().lower() + if choice == "": + return False + elif choice in valid: + return valid[choice] + else: + sys.stdout.write("Please respond with 'yes' or 'no' " "(or 'y' or 'n').\n") + + def compose_up( + self, + ctx: Context, + env: Environments = "dev", + no_build: bool = False, + profile: str | None = None, + ): + """Start containers in specified environment""" + build_opt = "--no-build" if no_build else "--build" + cmd = f"{self._build_compose_cmd(env, profile)} up {build_opt} --detach" + ctx.run(cmd, pty=True) + + def compose_down( + self, + ctx: Context, + env: Environments = "dev", + profile: str | None = None, + cleanup: bool = False, + ): + """Stop containers in specified environment""" + cmd = f"{self._build_compose_cmd(env, profile)} --profile {profile} down" + if cleanup: + cmd += " --remove-orphans --volumes" + ctx.run(cmd, pty=True) + + def stack_deploy2(self, ctx: Context, env: Environments = "prod", build: bool = False): + if build: + cmd = f"{self._build_compose_cmd(env)} build" + ctx.run(cmd, pty=True) + + cmd = ( + f"docker stack deploy -c {self._get_compose_env_file(env)} {self._get_stack_name(env)}" + ) + ctx.run(cmd, pty=True) + + def stack_deploy(self, ctx: Context, env: Environments = "prod", build: bool = False): + """Deploy the stack to Docker Swarm (prod by default!). Optional build it before.""" + if build: + cmd = f"{self._build_compose_cmd(env)} build" + ctx.run(cmd, pty=True) + + cmd = "docker stack deploy --detach " + cmd += f" -c {self._get_compose_base_file()}" + cmd += f" -c {self._get_compose_env_file(env)}" + cmd += f" {self._get_stack_name(env)}" + ctx.run(cmd, pty=True) + + def stack_rm(self, ctx: Context, env: Environments = "prod"): + cmd = f"docker stack rm {self._get_stack_name(env)}" + ctx.run(cmd, pty=True) + + def web_shell(self, ctx: Context, env: Environments = "dev"): + """Open Python shell in web container of specified environment""" + cmd = f"{self._build_compose_cmd(env)} exec web python manage.py shell_plus" + ctx.run(cmd, pty=True) + + def format(self, ctx: Context): + """Format the source code with ruff and djlint""" + # Format Python code + format_code_cmd = "poetry run ruff format ." + ctx.run(format_code_cmd, pty=True) + # Sort Python imports + sort_imports_cmd = "poetry run ruff check . --fix --select I" + ctx.run(sort_imports_cmd, pty=True) + # Format Django templates + format_templates_cmd = "poetry run djlint . --reformat" + ctx.run(format_templates_cmd, pty=True) + + def lint(self, ctx: Context): + """Lint the source code (ruff, djlint, pyright)""" + cmd_ruff = "poetry run ruff check ." + ctx.run(cmd_ruff, pty=True) + cmd_djlint = "poetry run djlint . --lint" + ctx.run(cmd_djlint, pty=True) + cmd_pyright = "poetry run pyright" + ctx.run(cmd_pyright, pty=True) + + def test( + self, + ctx: Context, + path: str | None = None, + cov: bool | str = False, + html: bool = False, + keyword: str | None = None, + mark: str | None = None, + stdout: bool = False, + failfast: bool = False, + ): + """Run the test suite""" + if not self._check_compose_up(ctx, "dev"): + sys.exit( + "Integration tests need dev containers running.\nRun 'invoke compose-up' first." + ) + + cmd = ( + f"{self._build_compose_cmd('dev')} exec " + f"--env DJANGO_SETTINGS_MODULE={self._project_name}.settings.test web pytest " + ) + if cov: + cmd += "--cov " + if isinstance(cov, str): + cmd += f"={cov} " + if html: + cmd += "--cov-report=html" + if keyword: + cmd += f"-k {keyword} " + if mark: + cmd += f"-m {mark} " + if stdout: + cmd += "-s " + if failfast: + cmd += "-x " + if path: + cmd += path + ctx.run(cmd, pty=True) + + def reset_dev(self, ctx: Context): + """Reset the dev environment""" + # Wipe the database + flush_cmd = f"{self._build_compose_cmd('dev')} exec web python manage.py flush --noinput" + ctx.run(flush_cmd, pty=True) + # Re-populate the database with users and groups + populate_cmd = ( + f"{self._build_compose_cmd('dev')} exec web python manage.py " + "populate_users_and_groups" + ) + populate_cmd += " --users 20 --groups 3" + ctx.run(populate_cmd, pty=True) + + def init_workspace(self, ctx: Context): + """Initialize workspace for Github Codespaces or Gitpod""" + env_dev_file = f"{self._project_dir}/.env.dev" + if os.path.isfile(env_dev_file): + print("Workspace already initialized (.env.dev file exists).") + return + + shutil.copy(f"{self._project_dir}/example.env", env_dev_file) + + def modify_env_file(domain: str | None = None): + if domain: + url = f"https://{domain}" + hosts = f".localhost,127.0.0.1,[::1],{domain}" + set_key(env_dev_file, "DJANGO_CSRF_TRUSTED_ORIGINS", url, quote_mode="never") + set_key(env_dev_file, "DJANGO_ALLOWED_HOSTS", hosts, quote_mode="never") + set_key(env_dev_file, "DJANGO_INTERNAL_IPS", hosts, quote_mode="never") + set_key(env_dev_file, "SITE_BASE_URL", url, quote_mode="never") + set_key(env_dev_file, "SITE_DOMAIN", domain, quote_mode="never") + + set_key(env_dev_file, "FORCE_DEBUG_TOOLBAR", "true", quote_mode="never") + + if os.environ.get("CODESPACE_NAME"): + # Inside GitHub Codespaces + domain = f"{os.environ['CODESPACE_NAME']}-8000.preview.app.github.dev" + modify_env_file(domain) + elif os.environ.get("GITPOD_WORKSPACE_ID"): + # Inside Gitpod + result = ctx.run("gp url 8000", hide=True, pty=True) + assert result and result.ok + domain = result.stdout.strip().removeprefix("https://") + modify_env_file(domain) + else: + # Inside some local environment + modify_env_file() + + def show_outdated(self, ctx: Context): + """Show outdated dependencies""" + print("### Outdated Python dependencies ###") + poetry_cmd = "poetry show --outdated --top-level" + result = ctx.run(poetry_cmd, pty=True) + assert result and result.ok + print(result.stderr.strip()) + + print("### Outdated NPM dependencies ###") + npm_cmd = "npm outdated" + ctx.run(npm_cmd, pty=True) + + def try_github_actions(self, ctx: Context): + """Try Github Actions locally using Act""" + act_path = self._project_dir / "bin" / "act" + if not act_path.exists(): + print("Installing act...") + ctx.run( + "curl https://raw.githubusercontent.com/nektos/act/master/install.sh | sudo bash", + hide=True, + pty=True, + ) + ctx.run(f"{act_path} -P ubuntu-latest=catthehacker/ubuntu:act-latest", pty=True) + + def backup_db(self, ctx: Context, env: Environments = "prod"): + """Backup database + + For backup location see setting DBBACKUP_STORAGE_OPTIONS + For possible commands see: + https://django-dbbackup.readthedocs.io/en/master/commands.html + """ + settings = ( + f"{self._project_name}.settings.production" + if env == "prod" + else f"{self._project_name}.settings.development" + ) + web_container_id = self._find_running_container_id(ctx, env, "web") + cmd = ( + f"docker exec --env DJANGO_SETTINGS_MODULE={settings} " + f"{web_container_id} ./manage.py dbbackup --clean -v 2" + ) + ctx.run(cmd, pty=True) + + def restore_db(self, ctx: Context, env: Environments = "prod"): + """Restore database from backup""" + settings = ( + f"{self._project_name}.settings.production" + if env == "prod" + else f"{self._project_name}.settings.development" + ) + web_container_id = self._find_running_container_id(ctx, env, "web") + cmd = ( + f"docker exec --env DJANGO_SETTINGS_MODULE={settings} " + f"{web_container_id} ./manage.py dbrestore" + ) + ctx.run(cmd, pty=True) + + def bump_version(self, ctx: Context, rule: Literal["patch", "minor", "major"]): + """Bump version, create a tag, commit and push to GitHub""" + result = ctx.run("git status --porcelain", hide=True, pty=True) + assert result and result.ok + if result.stdout.strip(): + print("There are uncommitted changes. Aborting.") + sys.exit(1) + + ctx.run(f"poetry version {rule}", pty=True) + ctx.run("git add pyproject.toml", pty=True) + ctx.run("git commit -m 'Bump version'", pty=True) + ctx.run('git tag -a v$(poetry version -s) -m "Release v$(poetry version -s)"', pty=True) + ctx.run("git push --follow-tags", pty=True) + + def upgrade_postgresql(self, ctx: Context, env: Environments, version: str = "latest"): + print(f"Upgrading PostgreSQL database in {env} environment to {version}.") + print("Cave, make sure the whole stack is not stopped. Otherwise this will corrupt data!") + if self._confirm("Are you sure you want to proceed?"): + print("Starting docker container that upgrades the database files.") + print("Watch the output if everything went fine or if any further steps are necessary.") + volume = f"{self._get_stack_name(env)}_postgres_data" + ctx.run( + f"docker run -e POSTGRES_PASSWORD=postgres -v {volume}:/var/lib/postgresql/data " + f"pgautoupgrade/pgautoupgrade:{version}", + pty=True, + ) + else: + print("Cancelled") diff --git a/compose/docker-compose.base.yml b/compose/docker-compose.base.yml new file mode 100644 index 0000000..61ffdc8 --- /dev/null +++ b/compose/docker-compose.base.yml @@ -0,0 +1,40 @@ +x-app: &default-app + image: example_project_dev + depends_on: + - postgres + environment: + USE_DOCKER: 1 + ENABLE_REMOTE_DEBUGGING: 0 + DATABASE_URL: "psql://postgres:postgres@postgres.local:5432/postgres" + DJANGO_STATIC_ROOT: "/var/www/web/static/" + +services: + # We have to to define the init servce in base (even if we don't use it + # in development) to set the correct environment variables. + init: + <<: *default-app + hostname: init.local + volumes: + - web_data:/var/www/web + + web: + <<: *default-app + hostname: web.local + build: + context: .. + volumes: + - web_data:/var/www/web + + worker: + <<: *default-app + hostname: worker.local + + postgres: + image: postgres:latest + hostname: postgres.local + volumes: + - postgres_data:/var/lib/postgresql/data + +volumes: + web_data: + postgres_data: diff --git a/docker-compose.yml b/compose/docker-compose.dev.yml similarity index 67% rename from docker-compose.yml rename to compose/docker-compose.dev.yml index 8fbbdf6..2e8d951 100644 --- a/docker-compose.yml +++ b/compose/docker-compose.dev.yml @@ -1,24 +1,25 @@ x-app: &default-app - image: adit-radis-shared + image: example_project_dev volumes: - - .:/app - depends_on: - - postgres + - ..:/app env_file: - - .env + - ../.env.dev environment: - USE_DOCKER: 1 - DATABASE_URL: "psql://postgres:postgres@postgres.local:5432/postgres" + DJANGO_SETTINGS_MODULE: "example_project.settings.development" pull_policy: never - + services: + init: + profiles: + - never + web: <<: *default-app - hostname: web.local build: target: development ports: - - 8000:8000 + - "${WEB_HOST_PORT:-8000}:8000" + - "${WEB_DEBUG_PORT:-5678}:5678" command: > bash -c " cd example_project; @@ -30,7 +31,6 @@ services: worker: <<: *default-app - hostname: worker.local command: > bash -c " cd example_project; @@ -39,9 +39,5 @@ services: " postgres: - image: postgres:latest - hostname: postgres.local - ports: - - 5432:5432 environment: POSTGRES_PASSWORD: postgres diff --git a/compose/docker-compose.prod.yml b/compose/docker-compose.prod.yml new file mode 100644 index 0000000..0114c94 --- /dev/null +++ b/compose/docker-compose.prod.yml @@ -0,0 +1,66 @@ +x-app: &default-app + image: example_project_prod + env_file: + - ../.env.prod + environment: + DJANGO_SETTINGS_MODULE: "example_project.settings.production" + SSL_CERT_FILE: "/var/www/web/ssl/cert.pem" + SSL_KEY_FILE: "/var/www/web/ssl/key.pem" + +x-deploy: &deploy + replicas: 1 + restart_policy: + condition: on-failure + max_attempts: 3 + +services: + # We can't use those manage commands inside the web container in production because + # the web service may have multiple replicas. So we make sure to only run them once + # and wait for it to be finished by the web service containers. + init: + <<: *default-app + command: > + bash -c " + cd example_project; + wait-for-it -s postgres.local:5432 -t 60 && + ./manage.py migrate && + ./manage.py collectstatic --no-input && + ./manage.py create_admin && + ./manage.py generate_cert && + ./manage.py ok_server --host 0.0.0.0 --port 8000 + " + deploy: + <<: *deploy + + web: + <<: *default-app + build: + target: production + ports: + - "${WEB_HTTP_PORT:-80}:80" + - "${WEB_HTTPS_PORT:-443}:443" + command: > + bash -c " + cd example_project; + wait-for-it -s init.local:8000 -t 300 && + echo 'Starting web server ...' + daphne -b 0.0.0.0 -p 80 -e ssl:443:privateKey=/var/www/web/ssl/key.pem:certKey=/var/www/web/ssl/cert.pem example_project.asgi:application + " + deploy: + <<: *deploy + replicas: 3 + + worker: + <<: *default-app + command: > + bash -c " + cd example_project; + wait-for-it -s postgres.local:5432 -t 60 && + ./manage.py worker + " + + postgres: + env_file: + - ../.env.prod + deploy: + <<: *deploy diff --git a/example.env b/example.env index c3d14c7..34f48e8 100644 --- a/example.env +++ b/example.env @@ -21,7 +21,7 @@ FORCE_DEBUG_TOOLBAR=false POSTGRES_PASSWORD=mysecret SITE_BASE_URL=http://localhost:8000 SITE_DOMAIN=localhost -SITE_NAME="ADIT/RADIS Shared" +SITE_NAME="Exampe Project" SSL_HOSTNAME=localhost SSL_IP_ADDRESSES=127.0.0.1 USER_TIME_ZONE=Europe/Berlin diff --git a/example_project/example_project/asgi.py b/example_project/example_project/asgi.py index 34bd61f..480c901 100644 --- a/example_project/example_project/asgi.py +++ b/example_project/example_project/asgi.py @@ -11,6 +11,6 @@ from django.core.asgi import get_asgi_application -os.environ.setdefault("DJANGO_SETTINGS_MODULE", "example_project.settings") +os.environ.setdefault("DJANGO_SETTINGS_MODULE", "example_project.settings.development") application = get_asgi_application() diff --git a/example_project/example_project/settings/__init__.py b/example_project/example_project/settings/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/example_project/example_project/settings.py b/example_project/example_project/settings/base.py similarity index 78% rename from example_project/example_project/settings.py rename to example_project/example_project/settings/base.py index 9f0dcb9..d0fdd2b 100644 --- a/example_project/example_project/settings.py +++ b/example_project/example_project/settings/base.py @@ -15,25 +15,15 @@ import environ import toml +env = environ.Env() + # Build paths inside the project like this: BASE_DIR / 'subdir'. BASE_DIR = Path(__file__).resolve().parent.parent -env = environ.Env( - DJANGO_ALLOWED_HOSTS=(list, ["localhost"]), - DJANGO_CSRF_TRUSTED_ORIGINS=(list, []), - SITE_BASE_URL=(str, "http://localhost:8000"), - SITE_DOMAIN=(str, "localhost"), - SITE_NAME=(str, "Example Project"), - DJANGO_INTERNAL_IPS=(list, ["127.0.0.1"]), - FORCE_DEBUG_TOOLBAR=(bool, False), - USER_TIME_ZONE=(str, "Europe/Berlin"), - SERVER_EMAIL=(str, "adit.support@example.org"), - SUPPORT_EMAIL=(str, "adit.support@example.org"), - TOKEN_AUTHENTICATION_SALT=(str, "Rn4YNfgAar5dYbPu"), -) - -# Take environment variables from .env file -env.read_env(BASE_DIR / ".." / ".env") +READ_DOT_ENV_FILE = env.bool("DJANGO_READ_DOT_ENV_FILE", default=False) # type: ignore +if READ_DOT_ENV_FILE: + # OS environment variables take precedence over variables from .env + env.read_env(str(BASE_DIR / ".env")) # Read pyproject.toml to fetch current version. We do this conditionally as the # ADIT client library uses ADIT for integration tests installed as a package @@ -44,24 +34,17 @@ else: PROJECT_VERSION = "???" -# Quick-start development settings - unsuitable for production -# See https://docs.djangoproject.com/en/5.0/howto/deployment/checklist/ - # SECURITY WARNING: keep the secret key used in production secret! SECRET_KEY = "django-insecure-4q3@c!62pzy74p2dck1^=d3dyl_gc#zk1bewa@8ch3(czs3bir" # SECURITY WARNING: don't run with debug turned on in production! DEBUG = True -ALLOWED_HOSTS = env("DJANGO_ALLOWED_HOSTS") - -CSRF_TRUSTED_ORIGINS = env("DJANGO_CSRF_TRUSTED_ORIGINS") - -EMAIL_BACKEND = "django.core.mail.backends.console.EmailBackend" +CSRF_TRUSTED_ORIGINS = env.list("DJANGO_CSRF_TRUSTED_ORIGINS", default=[]) # type: ignore -SITE_BASE_URL = env("SITE_BASE_URL") -SITE_DOMAIN = env("SITE_DOMAIN") -SITE_NAME = env("SITE_NAME") +SITE_BASE_URL = env.str("SITE_BASE_URL", default="http://localhost:8000") # type: ignore +SITE_DOMAIN = env.str("SITE_DOMAIN", default="localhost") # type: ignore +SITE_NAME = env.str("SITE_NAME", default="Example Project") # type: ignore SITE_META_KEYWORDS = "ADIT,RADIS" SITE_META_DESCRIPTION = "Shared apps between ADIT and RADIS" SITE_PROJECT_URL = "https://github.com/openradx/adit-radis-shared" @@ -90,9 +73,6 @@ "adit_radis_shared.accounts.apps.AccountsConfig", "adit_radis_shared.token_authentication.apps.TokenAuthenticationConfig", "example_project.example_app.apps.ExampleAppConfig", - "debug_toolbar", - "debug_permissions", - "django_browser_reload", ] MIDDLEWARE = [ @@ -104,8 +84,6 @@ "django.contrib.messages.middleware.MessageMiddleware", "django.middleware.clickjacking.XFrameOptionsMiddleware", "django_htmx.middleware.HtmxMiddleware", - "debug_toolbar.middleware.DebugToolbarMiddleware", - "django_browser_reload.middleware.BrowserReloadMiddleware", "adit_radis_shared.accounts.middlewares.ActiveGroupMiddleware", "adit_radis_shared.common.middlewares.MaintenanceMiddleware", "adit_radis_shared.common.middlewares.TimezoneMiddleware", @@ -198,20 +176,14 @@ # https://docs.djangoproject.com/en/5.0/howto/static-files/ STATIC_URL = "static/" +STATIC_ROOT = env.str("DJANGO_STATIC_ROOT", default=(BASE_DIR / "staticfiles")) # type: ignore + # Default primary key field type # https://docs.djangoproject.com/en/5.0/ref/settings/#default-auto-field DEFAULT_AUTO_FIELD = "django.db.models.BigAutoField" -INTERNAL_IPS = env("DJANGO_INTERNAL_IPS") - -if env("FORCE_DEBUG_TOOLBAR"): - # https://github.com/jazzband/django-debug-toolbar/issues/1035 - from django.conf import settings - - DEBUG_TOOLBAR_CONFIG = {"SHOW_TOOLBAR_CALLBACK": lambda request: settings.DEBUG} - # A timezone that is used for users of the web interface. -USER_TIME_ZONE = env("USER_TIME_ZONE") +USER_TIME_ZONE = env.str("USER_TIME_ZONE", default="Europe/Berlin") # type: ignore # For crispy forms CRISPY_ALLOWED_TEMPLATE_PACKS = "bootstrap5" @@ -222,13 +194,13 @@ # An Email address used by the ADIT server to notify about finished jobs and # management notifications. -SERVER_EMAIL = env("DJANGO_SERVER_EMAIL") +SERVER_EMAIL = env.str("DJANGO_SERVER_EMAIL", default="support@openradx.test") # type: ignore DEFAULT_FROM_EMAIL = SERVER_EMAIL # A support Email address that is presented to the users where # they can get support. -SUPPORT_EMAIL = env("SUPPORT_EMAIL") +SUPPORT_EMAIL = env.str("SUPPORT_EMAIL", default=SERVER_EMAIL) # type: ignore # The salt that is used for hashing new tokens in the token authentication app. # Cave, changing the salt after some tokens were already generated makes them all invalid! -TOKEN_AUTHENTICATION_SALT = env("TOKEN_AUTHENTICATION_SALT") +TOKEN_AUTHENTICATION_SALT = env.str("TOKEN_AUTHENTICATION_SALT", default="Rn4YNfgAar5dYbPu") # type: ignore diff --git a/example_project/example_project/settings/development.py b/example_project/example_project/settings/development.py new file mode 100644 index 0000000..ae9cdef --- /dev/null +++ b/example_project/example_project/settings/development.py @@ -0,0 +1,43 @@ +from .base import * # noqa: F403 +from .base import env + +DEBUG = True + +ALLOWED_HOSTS = env.list("DJANGO_ALLOWED_HOSTS", default=[]) # type: ignore + +INTERNAL_IPS = env.list("DJANGO_INTERNAL_IPS", default=["127.0.0.1"]) # type: ignore + +ENABLE_REMOTE_DEBUGGING = env.bool("ENABLE_REMOTE_DEBUGGING", default=False) # type: ignore + +SECRET_KEY = env.str( + "DJANGO_SECRET_KEY", + default="ug+cbde301nelb)(di0^p21osy3h=t$%2$-8d&0#xlyfj8&==5", # type: ignore +) + +EMAIL_BACKEND = "django.core.mail.backends.console.EmailBackend" + +INSTALLED_APPS += [ # noqa: F405 + "debug_toolbar", + "debug_permissions", + "django_browser_reload", +] + +MIDDLEWARE += [ # noqa: F405 + "debug_toolbar.middleware.DebugToolbarMiddleware", + "django_browser_reload.middleware.BrowserReloadMiddleware", +] + +if env.bool("FORCE_DEBUG_TOOLBAR", default=False): # type: ignore + # https://github.com/jazzband/django-debug-toolbar/issues/1035 + from django.conf import settings + + DEBUG_TOOLBAR_CONFIG = {"SHOW_TOOLBAR_CALLBACK": lambda request: settings.DEBUG} + +if env.bool("USE_DOCKER", default=False): # type: ignore + import socket + + # For Debug Toolbar to show up on Docker Compose in development mode. + # This only works when browsed from the host where the containers are run. + # If viewed from somewhere else then DJANGO_INTERNAL_IPS must be set. + hostname, _, ips = socket.gethostbyname_ex(socket.gethostname()) + INTERNAL_IPS += [".".join(ip.split(".")[:-1] + ["1"]) for ip in ips] diff --git a/example_project/example_project/settings/production.py b/example_project/example_project/settings/production.py new file mode 100644 index 0000000..c4180ac --- /dev/null +++ b/example_project/example_project/settings/production.py @@ -0,0 +1,21 @@ +from .base import * # noqa: F403 +from .base import env + +# Production settings, see +# https://docs.djangoproject.com/en/3.0/howto/deployment/checklist/ + +DEBUG = False + +ALLOWED_HOSTS = env.list("DJANGO_ALLOWED_HOSTS") + +SECRET_KEY = env.str("DJANGO_SECRET_KEY") + +DATABASES["default"]["PASSWORD"] = env.str("POSTGRES_PASSWORD") # noqa: F405 + +EMAIL_BACKEND = "django.core.mail.backends.smtp.EmailBackend" +EMAIL_TIMEOUT = 60 +EMAIL_HOST = env.str("DJANGO_EMAIL_HOST") +EMAIL_PORT = env.int("DJANGO_EMAIL_PORT", default=25) # type: ignore +EMAIL_HOST_USER = env.str("DJANGO_EMAIL_HOST_USER", default="") # type: ignore +EMAIL_HOST_PASSWORD = env.str("DJANGO_EMAIL_HOST_PASSWORD", default="") # type: ignore +EMAIL_USE_TLS = env.bool("DJANGO_EMAIL_USE_TLS", default=False) # type: ignore diff --git a/example_project/example_project/settings/test.py b/example_project/example_project/settings/test.py new file mode 100644 index 0000000..8518ae9 --- /dev/null +++ b/example_project/example_project/settings/test.py @@ -0,0 +1,10 @@ +from .development import * # noqa: F403 + +# We must force the background worker that is started in a integration test +# as a subprocess to use the test database. +if not DATABASES["default"]["NAME"].startswith("test_"): # noqa: F405 + test_database = "test_" + DATABASES["default"]["NAME"] # noqa: F405 + DATABASES["default"]["NAME"] = test_database # noqa: F405 + DATABASES["default"]["TEST"] = {"NAME": test_database} # noqa: F405 + +DEBUG_TOOLBAR_CONFIG = {"SHOW_TOOLBAR_CALLBACK": lambda request: False} diff --git a/example_project/example_project/wsgi.py b/example_project/example_project/wsgi.py index 05cfe63..73a0deb 100644 --- a/example_project/example_project/wsgi.py +++ b/example_project/example_project/wsgi.py @@ -11,6 +11,6 @@ from django.core.wsgi import get_wsgi_application -os.environ.setdefault("DJANGO_SETTINGS_MODULE", "example_project.settings") +os.environ.setdefault("DJANGO_SETTINGS_MODULE", "example_project.settings.development") application = get_wsgi_application() diff --git a/example_project/manage.py b/example_project/manage.py index 15cf169..2840d2b 100755 --- a/example_project/manage.py +++ b/example_project/manage.py @@ -7,7 +7,7 @@ def main(): """Run administrative tasks.""" - os.environ.setdefault("DJANGO_SETTINGS_MODULE", "example_project.settings") + os.environ.setdefault("DJANGO_SETTINGS_MODULE", "example_project.settings.development") try: from django.core.management import execute_from_command_line except ImportError as exc: diff --git a/pyproject.toml b/pyproject.toml index e2cad48..f0ce239 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -70,7 +70,7 @@ typeCheckingMode = "basic" reportUnnecessaryTypeIgnoreComment = true [tool.pytest.ini_options] -DJANGO_SETTINGS_MODULE = "example_project.settings" +DJANGO_SETTINGS_MODULE = "example_project.settings.development" pythonpath = ["example_project"] testpaths = ["adit_radis_shared"] python_files = ["tests.py", "test_*.py", "*_tests.py"] diff --git a/tasks.py b/tasks.py index bbfb0b9..0b1b79b 100644 --- a/tasks.py +++ b/tasks.py @@ -1,28 +1,21 @@ import os import shutil -import sys -from os import environ from pathlib import Path from typing import Literal -from dotenv import set_key from invoke.context import Context from invoke.tasks import task -project_dir = Path(__file__).resolve().parent +from adit_radis_shared.invoke_tasks import Environments, InvokeTasks -manage_cmd = (project_dir / "example_project" / "manage.py").as_posix() +project_dir = Path(__file__).resolve().parent +invoke_helper = InvokeTasks("example_project", project_dir) @task -def compose_up( - ctx: Context, - no_build: bool = False, -): +def compose_up(ctx: Context, no_build=False, profile: str | None = None): """Start example project containers""" - build_opt = "--no-build" if no_build else "--build" - cmd = f"docker compose up {build_opt} --detach" - ctx.run(cmd, pty=True) + invoke_helper.compose_up(ctx, "dev", no_build=no_build, profile=profile) @task @@ -31,79 +24,31 @@ def compose_down( cleanup: bool = False, ): """Stop example project containers""" - cmd = "docker compose down" - if cleanup: - cmd += " --remove-orphans --volumes" - ctx.run(cmd, pty=True) - - -@task -def startdev(ctx: Context): - migrate(ctx) - populate_db(ctx) - runserver(ctx) - - -@task -def runserver(ctx: Context): - """Run the development server of the example project""" - ctx.run(f"{manage_cmd} runserver", pty=True) - - -@task -def makemigrations(ctx: Context): - """Make Django migrations""" - ctx.run(f"{manage_cmd} makemigrations", pty=True) - - -@task -def migrate(ctx: Context): - """Apply Django migrations""" - ctx.run(f"{manage_cmd} migrate", pty=True) + invoke_helper.compose_down(ctx, "dev", cleanup=cleanup) @task -def reset_db(ctx: Context): - """Reset the database - - Can only be done when dev server is not running and needs django_extensions installed. - """ - ctx.run(f"{manage_cmd} reset_db --no-input", pty=True) - ctx.run(f"{manage_cmd} migrate", pty=True) +def stack_deploy(ctx: Context, env: Environments = "prod", build: bool = False): + """Deploy the stack""" + invoke_helper.stack_deploy(ctx, env=env, build=build) @task -def populate_db(ctx: Context): - """Populate database with users and groups""" - cmd = f"{manage_cmd} populate_users_and_groups" - cmd += " --users 30" - cmd += " --groups 5" - ctx.run(cmd, pty=True) +def stack_rm(ctx: Context, env: Environments = "prod"): + """Remove the stack""" + invoke_helper.stack_rm(ctx, env=env) @task def format(ctx: Context): """Format the source code with ruff and djlint""" - # Format Python code - format_code_cmd = "poetry run ruff format ." - ctx.run(format_code_cmd, pty=True) - # Sort Python imports - sort_imports_cmd = "poetry run ruff check . --fix --select I" - ctx.run(sort_imports_cmd, pty=True) - # Format Django templates - format_templates_cmd = "poetry run djlint . --reformat" - ctx.run(format_templates_cmd, pty=True) + invoke_helper.format(ctx) @task def lint(ctx: Context): """Lint the source code (ruff, djlint, pyright)""" - cmd_ruff = "poetry run ruff check ." - ctx.run(cmd_ruff, pty=True) - cmd_djlint = "poetry run djlint . --lint" - ctx.run(cmd_djlint, pty=True) - cmd_pyright = "poetry run pyright" - ctx.run(cmd_pyright, pty=True) + invoke_helper.lint(ctx) @task @@ -118,31 +63,40 @@ def test( failfast: bool = False, ): """Run the test suite""" - cmd = "pytest " - if cov: - cmd += "--cov " - if isinstance(cov, str): - cmd += f"={cov} " - if html: - cmd += "--cov-report=html" - if keyword: - cmd += f"-k {keyword} " - if mark: - cmd += f"-m {mark} " - if stdout: - cmd += "-s " - if failfast: - cmd += "-x " - if path: - cmd += path - ctx.run(cmd, pty=True) + invoke_helper.test( + ctx, + path=path, + cov=cov, + html=html, + keyword=keyword, + mark=mark, + stdout=stdout, + failfast=failfast, + ) + + +@task +def init_workspace(ctx: Context): + """Initialize workspace for Github Codespaces or Gitpod""" + invoke_helper.init_workspace(ctx) + + +@task +def show_outdated(ctx: Context): + """Show outdated dependencies""" + invoke_helper.show_outdated(ctx) @task -def ci(ctx: Context): - """Run the continuous integration (linting and tests)""" - lint(ctx) - test(ctx, cov=True) +def try_github_actions(ctx: Context): + """Try Github Actions locally using Act""" + invoke_helper.try_github_actions(ctx) + + +@task +def bump_version(ctx: Context, rule: Literal["patch", "minor", "major"]): + """Bump version, create a tag, commit and push to GitHub""" + invoke_helper.bump_version(ctx, rule) @task @@ -168,92 +122,3 @@ def copy_file(file: str, filename: str | None = None): copy_file("node_modules/htmx.org/dist/htmx.js") copy_file("node_modules/htmx.org/dist/ext/ws.js", "htmx-ws.js") copy_file("node_modules/htmx.org/dist/ext/alpine-morph.js", "htmx-alpine-morph.js") - - -@task -def init_workspace(ctx: Context): - """Initialize workspace for Github Codespaces or Gitpod""" - env_dev_file = f"{project_dir}/.env" - if os.path.isfile(env_dev_file): - print("Workspace already initialized (.env.dev file exists).") - return - - shutil.copy(f"{project_dir}/example.env", env_dev_file) - - def modify_env_file(domain: str | None = None): - if domain: - url = f"https://{domain}" - hosts = f".localhost,127.0.0.1,[::1],{domain}" - set_key(env_dev_file, "DJANGO_CSRF_TRUSTED_ORIGINS", url, quote_mode="never") - set_key(env_dev_file, "DJANGO_ALLOWED_HOSTS", hosts, quote_mode="never") - set_key(env_dev_file, "DJANGO_INTERNAL_IPS", hosts, quote_mode="never") - set_key(env_dev_file, "SITE_BASE_URL", url, quote_mode="never") - set_key(env_dev_file, "SITE_DOMAIN", domain, quote_mode="never") - - set_key(env_dev_file, "FORCE_DEBUG_TOOLBAR", "true", quote_mode="never") - - if environ.get("CODESPACE_NAME"): - # Inside GitHub Codespaces - domain = f"{environ['CODESPACE_NAME']}-8000.preview.app.github.dev" - modify_env_file(domain) - elif environ.get("GITPOD_WORKSPACE_ID"): - # Inside Gitpod - result = ctx.run("gp url 8000", pty=True, hide=True) - assert result and result.ok - domain = result.stdout.strip().removeprefix("https://") - modify_env_file(domain) - else: - # Inside some local environment - modify_env_file() - - -@task -def show_outdated(ctx: Context): - """Show outdated dependencies""" - print("### Outdated Python dependencies ###") - poetry_cmd = "poetry show --outdated --top-level" - result = ctx.run(poetry_cmd, pty=True) - assert result - print(result.stderr.strip()) - - print("### Outdated NPM dependencies ###") - npm_cmd = "npm outdated" - ctx.run(npm_cmd, pty=True) - - -@task -def upgrade(ctx: Context): - """Upgrade Python and JS packages""" - ctx.run("poetry update", pty=True) - ctx.run("npm update", pty=True) - copy_statics(ctx) - - -@task -def try_github_actions(ctx: Context): - """Try Github Actions locally using Act""" - act_path = project_dir / "bin" / "act" - if not act_path.exists(): - print("Installing act...") - ctx.run( - "curl https://raw.githubusercontent.com/nektos/act/master/install.sh | sudo bash", - pty=True, - hide=True, - ) - ctx.run(f"{act_path} -P ubuntu-latest=catthehacker/ubuntu:act-latest", pty=True) - - -@task -def bump_version(ctx: Context, rule: Literal["patch", "minor", "major"]): - """Bump version, create a tag, commit and push to GitHub""" - result = ctx.run("git status --porcelain", pty=True, hide=True) - assert result - if result.stdout.strip(): - print("There are uncommitted changes. Aborting.") - sys.exit(1) - - ctx.run(f"poetry version {rule}", pty=True) - ctx.run("git add pyproject.toml", pty=True) - ctx.run("git commit -m 'Bump version'", pty=True) - ctx.run('git tag -a v$(poetry version -s) -m "Release v$(poetry version -s)"', pty=True) - ctx.run("git push --follow-tags", pty=True) From 8faa0a717130c63247c7a23873e5620f9c84ef62 Mon Sep 17 00:00:00 2001 From: Kai Schlamp Date: Sat, 29 Jun 2024 15:40:37 +0000 Subject: [PATCH 3/4] Fix Github Actions CI for multi container setup --- .github/workflows/ci.yml | 20 +++++++++++++++++--- 1 file changed, 17 insertions(+), 3 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index a500fbe..a2047a4 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -10,7 +10,7 @@ jobs: fail-fast: false matrix: python-version: ["3.12"] - poetry-version: ["1.7.1"] + poetry-version: ["1.8.3"] os: [ubuntu-latest] runs-on: ${{ matrix.os }} timeout-minutes: 15 @@ -29,8 +29,19 @@ jobs: run: poetry install --with dev - name: Configure environment run: poetry run invoke init-workspace - - name: Setup Playwright - run: poetry run playwright install --with-deps + - name: Set up Docker Buildx + uses: docker/setup-buildx-action@v3 + - name: Build and cache Docker images + uses: docker/build-push-action@v5 + with: + context: . + target: development + load: true + tags: adit_dev:latest + cache-from: type=gha + cache-to: type=gha,mode=max + - name: Start Docker containers + run: poetry run invoke compose-up --no-build - name: Run linting # https://github.com/actions/runner/issues/241#issuecomment-745902718 shell: 'script -q -e -c "bash {0}"' @@ -38,3 +49,6 @@ jobs: - name: Run tests shell: 'script -q -e -c "bash {0}"' run: poetry run invoke test --cov + - name: Stop Docker containers + if: ${{ always() }} + run: poetry run invoke compose-down From 731a2d8e83fd5384721c83eff16631e97a644b22 Mon Sep 17 00:00:00 2001 From: Kai Schlamp Date: Sat, 29 Jun 2024 16:04:13 +0000 Subject: [PATCH 4/4] Fix CI --- .github/workflows/ci.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index a2047a4..32900e1 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -37,7 +37,7 @@ jobs: context: . target: development load: true - tags: adit_dev:latest + tags: example_project_dev:latest cache-from: type=gha cache-to: type=gha,mode=max - name: Start Docker containers