diff --git a/README.md b/README.md
index 9ca9bbf9..a9c3f4c9 100644
--- a/README.md
+++ b/README.md
@@ -20,7 +20,7 @@
-![create workflow](https://raw.githubusercontent.com/Tauffer-Consulting/domino/main/docs/source/_static/media/7_create_workflow.gif)
+![create-workflow](https://github.com/Tauffer-Consulting/domino/assets/54302847/34d619fa-4b6c-4761-8b24-3ca829cfc28c)
# Table of contents
- [About](#about)
@@ -119,8 +119,8 @@ The Domino frontend service is a React application that provides the GUI for eas
Create Workflows by dragging and dropping Pieces to the canvas, and connecting them.
- ![create workflow](https://raw.githubusercontent.com/Tauffer-Consulting/domino/main/docs/source/_static/media/7_create_workflow.gif)
-
+ ![create-workflow](https://github.com/Tauffer-Consulting/domino/assets/54302847/34d619fa-4b6c-4761-8b24-3ca829cfc28c)
+
@@ -129,7 +129,7 @@ The Domino frontend service is a React application that provides the GUI for eas
Edit Pieces by changing their input. Outputs from upstream Pieces are automatically available as inputs for downstream Pieces. Pieces can pass forward any type of data, from simple strings to heavy files, all automatically handled by Domino shared storage system.
- ![edit pieces](https://raw.githubusercontent.com/Tauffer-Consulting/domino/main/docs/source/_static/media/8_edit_pieces.gif)
+ ![edit pieces](https://github.com/Tauffer-Consulting/domino/assets/54302847/d453ac81-5485-4159-b2f3-bf57eb969906)
@@ -139,7 +139,8 @@ The Domino frontend service is a React application that provides the GUI for eas
Schedule Workflows to run periodically, at a specific date/time, or trigger them manually.
- ![schedule workflows](https://raw.githubusercontent.com/Tauffer-Consulting/domino/main/docs/source/_static/media/9_edit_workflow.gif)
+ ![schedule workflows](https://github.com/Tauffer-Consulting/domino/assets/54302847/e881d225-e8e0-4344-bc3f-c170cb820274)
+
@@ -148,7 +149,7 @@ The Domino frontend service is a React application that provides the GUI for eas
Monitor Workflows in real time, including the status of each Piece, the logs and results of each run.
- ![monitor workflow](https://raw.githubusercontent.com/Tauffer-Consulting/domino/main/docs/source/_static/media/10_monitor_workflow.gif)
+ ![run-pieces7](https://github.com/Tauffer-Consulting/domino/assets/54302847/fb5a30c5-0314-4271-bb46-81a159ab2696)
diff --git a/docker-compose-dev.yaml b/docker-compose-dev.yaml
index d19f79a6..c9696f06 100644
--- a/docker-compose-dev.yaml
+++ b/docker-compose-dev.yaml
@@ -225,7 +225,7 @@ services:
# Modified Airflow Scheduler with Domino
airflow-scheduler:
<<: *airflow-common
- image: ghcr.io/tauffer-consulting/domino-airflow-base:latest
+ # # image: ghcr.io/tauffer-consulting/domino-airflow-base:latest
build:
context: .
dockerfile: Dockerfile-airflow-domino.dev
@@ -260,7 +260,7 @@ services:
# Modified Airflow Worker with Domino
airflow-worker:
<<: *airflow-common
- image: ghcr.io/tauffer-consulting/domino-airflow-base:latest
+ # image: ghcr.io/tauffer-consulting/domino-airflow-base:latest
build:
context: .
dockerfile: Dockerfile-airflow-domino.dev
@@ -309,7 +309,7 @@ services:
- DOMINO_DB_HOST=domino_postgres
- DOMINO_DB_PORT=5432
- DOMINO_DB_NAME=postgres
- - DOMINO_DEFAULT_PIECES_REPOSITORY_VERSION=0.3.12
+ - DOMINO_DEFAULT_PIECES_REPOSITORY_VERSION=0.3.14
- DOMINO_DEFAULT_PIECES_REPOSITORY_TOKEN=${DOMINO_DEFAULT_PIECES_REPOSITORY_TOKEN}
- DOMINO_GITHUB_ACCESS_TOKEN_WORKFLOWS=${DOMINO_GITHUB_ACCESS_TOKEN_WORKFLOWS}
- DOMINO_GITHUB_WORKFLOWS_REPOSITORY=${DOMINO_GITHUB_WORKFLOWS_REPOSITORY}
diff --git a/frontend/index.html b/frontend/index.html
index 7c77f587..4dc16fa1 100644
--- a/frontend/index.html
+++ b/frontend/index.html
@@ -5,7 +5,6 @@
-
@@ -16,6 +15,7 @@
+
Domino
diff --git a/frontend/package.json b/frontend/package.json
index a3e87d2c..8dbc47b0 100644
--- a/frontend/package.json
+++ b/frontend/package.json
@@ -8,6 +8,7 @@
"dependencies": {
"@emotion/react": "^11.10.5",
"@emotion/styled": "^11.10.5",
+ "@iconify/react": "^4.1.1",
"@import-meta-env/cli": "^0.6.6",
"@import-meta-env/unplugin": "^0.4.10",
"@material-ui/core": "^4.12.4",
diff --git a/frontend/src/components/CodeEditorInput/index.tsx b/frontend/src/components/CodeEditorInput/index.tsx
index 717026a9..d787245d 100644
--- a/frontend/src/components/CodeEditorInput/index.tsx
+++ b/frontend/src/components/CodeEditorInput/index.tsx
@@ -7,26 +7,29 @@ import {
useFormContext,
} from "react-hook-form";
-const CodeEditorItem = React.forwardRef(({ ...register }) => (
-
-));
+const CodeEditorItem = React.forwardRef(
+ ({ ...register }, ref) => (
+
+ ),
+);
CodeEditorItem.displayName = "CodeEditorItem";
diff --git a/frontend/src/components/WorkflowPanel/DefaultNode/index.tsx b/frontend/src/components/WorkflowPanel/DefaultNode/index.tsx
index 2cf2d826..420dcee7 100644
--- a/frontend/src/components/WorkflowPanel/DefaultNode/index.tsx
+++ b/frontend/src/components/WorkflowPanel/DefaultNode/index.tsx
@@ -1,3 +1,4 @@
+import { Icon } from "@iconify/react";
import { Paper, Typography } from "@mui/material";
import theme from "providers/theme.config";
import React, { type CSSProperties, memo, useMemo } from "react";
@@ -74,6 +75,8 @@ export const CustomNode = memo(({ id, data, selected }) => {
alignItems: "center",
position: "relative",
+ padding: 1,
+ textAlign: "center",
width: 150,
height: 70,
lineHeight: "60px",
@@ -103,6 +106,20 @@ export const CustomNode = memo(({ id, data, selected }) => {
[data],
);
+ const icon = useMemo(() => {
+ if (data.style.useIcon) {
+ const name = data.style.iconClassName;
+ return {
+ name,
+ style: {
+ width: "20px",
+ height: "20px",
+ ...data.style.iconStyle,
+ },
+ };
+ }
+ }, [data]);
+
return (
<>
{nodeTypeRenderHandleMap[extendedClassExt].renderSourceHandle && (
@@ -122,6 +139,7 @@ export const CustomNode = memo(({ id, data, selected }) => {
/>
)}
+ {icon && }
(({ id, data, selected }) => {
[data],
);
+ const icon = useMemo(() => {
+ if (data.style.useIcon) {
+ const name = data.style.iconClassName;
+ return {
+ name,
+ style: {
+ width: "20px",
+ height: "20px",
+ ...data.style.iconStyle,
+ },
+ };
+ }
+ }, [data]);
+
return (
<>
{nodeTypeRenderHandleMap[extendedClassExt].renderSourceHandle && (
@@ -136,6 +151,7 @@ const RunNode = memo
(({ id, data, selected }) => {
/>
)}
+ {icon && }
= ({
const authenticate = useCallback(
async (email: string, password: string) => {
setAuthLoading(true);
- postAuthLogin({ email, password })
+ void postAuthLogin({ email, password })
.then((res) => {
if (res.status === 200) {
login(res.data.access_token, res.data.user_id);
}
})
- .catch((e) => {
- if (e instanceof AxiosError) {
- toast.error(
- e.response?.data?.detail ??
- "Error on login, please review your inputs and try again",
- );
- }
- })
.finally(() => {
setAuthLoading(false);
});
diff --git a/frontend/src/features/workflowEditor/components/WorkflowEditor.tsx b/frontend/src/features/workflowEditor/components/WorkflowEditor.tsx
index 7673001a..58c21f8e 100644
--- a/frontend/src/features/workflowEditor/components/WorkflowEditor.tsx
+++ b/frontend/src/features/workflowEditor/components/WorkflowEditor.tsx
@@ -162,7 +162,7 @@ export const WorkflowsEditorComponent: React.FC = () => {
} catch (err) {
setBackdropIsOpen(false);
if (err instanceof AxiosError) {
- toast.error(JSON.stringify(err?.response?.data));
+ console.log(err);
} else if (err instanceof Error) {
console.log(err);
toast.error(
diff --git a/frontend/src/features/workflowEditor/context/types/workflowPieceData.ts b/frontend/src/features/workflowEditor/context/types/workflowPieceData.ts
index 6046a3e1..0423aebc 100644
--- a/frontend/src/features/workflowEditor/context/types/workflowPieceData.ts
+++ b/frontend/src/features/workflowEditor/context/types/workflowPieceData.ts
@@ -36,9 +36,14 @@ interface UiSchema {
interface WorkflowSharedStorageDataModel {
source: StorageSourcesLocal | StorageSourcesAWS;
- base_folder?: string;
+
mode: StorageAccessModes;
- provider_options?: Record
;
+ provider_options?: ProviderOptionS3;
+}
+
+interface ProviderOptionS3 {
+ bucket?: string;
+ base_folder?: string;
}
interface SystemRequirementsModel {
diff --git a/frontend/src/features/workflowEditor/context/workflowsEditor.tsx b/frontend/src/features/workflowEditor/context/workflowsEditor.tsx
index 713fb351..01188487 100644
--- a/frontend/src/features/workflowEditor/context/workflowsEditor.tsx
+++ b/frontend/src/features/workflowEditor/context/workflowsEditor.tsx
@@ -167,11 +167,10 @@ const WorkflowsEditorProvider: FC<{ children?: React.ReactNode }> = ({
const workflowSharedStorage = {
source: storageSource,
- ...(baseFolder !== "" ? { base_folder: baseFolder } : {}),
...{ mode: elementData?.storage?.storageAccessMode },
provider_options: {
...(providerOptions && providerOptions.bucket !== ""
- ? { bucket: providerOptions.bucket }
+ ? { bucket: providerOptions.bucket, base_folder: baseFolder }
: {}),
},
};
diff --git a/frontend/src/features/workflows/api/workflow/deleteWorkflowId.ts b/frontend/src/features/workflows/api/workflow/deleteWorkflowId.ts
index 0b8ebf20..a3521b4c 100644
--- a/frontend/src/features/workflows/api/workflow/deleteWorkflowId.ts
+++ b/frontend/src/features/workflows/api/workflow/deleteWorkflowId.ts
@@ -44,16 +44,7 @@ export const useAuthenticatedDeleteWorkflowId = () => {
})
.catch((e) => {
if (e instanceof AxiosError) {
- if (e?.response?.status === 403) {
- toast.error("You are not allowed to delete this workflow.");
- } else if (e?.response?.status === 404) {
- toast.error("Workflow not found.");
- } else if (e?.response?.status === 409) {
- toast.error("Workflow is not in a valid state. ");
- } else {
- console.error(e);
- toast.error("Something went wrong. ");
- }
+ console.error(e);
} else {
throw e;
}
diff --git a/frontend/src/features/workflows/api/workflow/postWorkflowRunId.ts b/frontend/src/features/workflows/api/workflow/postWorkflowRunId.ts
index a8a5be4a..e322a05b 100644
--- a/frontend/src/features/workflows/api/workflow/postWorkflowRunId.ts
+++ b/frontend/src/features/workflows/api/workflow/postWorkflowRunId.ts
@@ -49,16 +49,7 @@ export const useAuthenticatedPostWorkflowRunId = () => {
})
.catch((e) => {
if (e instanceof AxiosError) {
- if (e?.response?.status === 403) {
- toast.error("You are not allowed to run this workflow.");
- } else if (e?.response?.status === 404) {
- toast.error("Workflow not found.");
- } else if (e?.response?.status === 409) {
- toast.error("Workflow is not in a valid state. ");
- } else {
- console.error(e);
- toast.error("Something went wrong. ");
- }
+ console.error(e);
} else {
throw e;
}
diff --git a/frontend/src/features/workflows/components/WorkflowDetail/CustomTabMenu/TaskResult.tsx b/frontend/src/features/workflows/components/WorkflowDetail/CustomTabMenu/TaskResult.tsx
index 4096a22f..0ef46f1f 100644
--- a/frontend/src/features/workflows/components/WorkflowDetail/CustomTabMenu/TaskResult.tsx
+++ b/frontend/src/features/workflows/components/WorkflowDetail/CustomTabMenu/TaskResult.tsx
@@ -26,19 +26,7 @@ export const TaskResult = (props: ITaskResultProps) => {
}
if (!base64_content || !file_type) {
- return (
-
- No content
-
- );
+ return No content ;
}
switch (file_type) {
@@ -106,6 +94,7 @@ export const TaskResult = (props: ITaskResultProps) => {
height: "90%",
width: "100%",
display: "flex",
+ flexDirection: "column",
alignItems: "center",
justifyContent: "center",
}}
diff --git a/frontend/src/services/clients/domino.client.ts b/frontend/src/services/clients/domino.client.ts
index 086fafa3..b8f7ad76 100644
--- a/frontend/src/services/clients/domino.client.ts
+++ b/frontend/src/services/clients/domino.client.ts
@@ -1,6 +1,7 @@
import axios from "axios";
import { environment } from "config/environment.config";
import { dispatchLogout } from "context/authentication";
+import { toast } from "react-toastify";
import { endpoints } from "../config/endpoints.config";
@@ -26,6 +27,19 @@ dominoApiClient.interceptors.response.use(
if (error.response.status === 401) {
dispatchLogout();
}
+
+ const message =
+ error.response?.data?.detail ||
+ error.response?.data?.message ||
+ error?.message ||
+ "Something went wrong";
+
+ if (Array.isArray(message)) {
+ toast.error(message[0].msg);
+ } else {
+ toast.error(message);
+ }
+
return await Promise.reject(error);
},
);
diff --git a/frontend/yarn.lock b/frontend/yarn.lock
index ad67ca5c..9216bb06 100644
--- a/frontend/yarn.lock
+++ b/frontend/yarn.lock
@@ -525,6 +525,18 @@
resolved "https://registry.yarnpkg.com/@humanwhocodes/object-schema/-/object-schema-1.2.1.tgz#b520529ec21d8e5945a1851dfd1c32e94e39ff45"
integrity sha512-ZnQMnLV4e7hDlUvw8H+U8ASL02SS2Gn6+9Ac3wGGLIe7+je2AeAOxPY+izIPJDfFDb7eDjev0Us8MO1iFRN8hA==
+"@iconify/react@^4.1.1":
+ version "4.1.1"
+ resolved "https://registry.yarnpkg.com/@iconify/react/-/react-4.1.1.tgz#da1bf03cdca9427f07cf22cf5b63fa8f02db4722"
+ integrity sha512-jed14EjvKjee8mc0eoscGxlg7mSQRkwQG3iX3cPBCO7UlOjz0DtlvTqxqEcHUJGh+z1VJ31Yhu5B9PxfO0zbdg==
+ dependencies:
+ "@iconify/types" "^2.0.0"
+
+"@iconify/types@^2.0.0":
+ version "2.0.0"
+ resolved "https://registry.yarnpkg.com/@iconify/types/-/types-2.0.0.tgz#ab0e9ea681d6c8a1214f30cd741fe3a20cc57f57"
+ integrity sha512-+wluvCrRhXrhyOmRDJ3q8mux9JkKy5SJ/v8ol2tu4FVjyYvtEzkc/3pK15ET6RKg4b4w4BmTk1+gsCUhf21Ykg==
+
"@import-meta-env/cli@^0.6.6":
version "0.6.6"
resolved "https://registry.yarnpkg.com/@import-meta-env/cli/-/cli-0.6.6.tgz#bab4d574f940caacd85d9879a0931d653dfbd610"
diff --git a/helm/domino/templates/domino-database-deployment.yml b/helm/domino/templates/domino-database-deployment.yml
index ea875c76..b6dd3a5c 100644
--- a/helm/domino/templates/domino-database-deployment.yml
+++ b/helm/domino/templates/domino-database-deployment.yml
@@ -1,4 +1,5 @@
# Domino Database resource
+{{- if .Values.database.enabled }}
apiVersion: apps/v1
kind: Deployment
metadata:
@@ -37,9 +38,10 @@ spec:
- hostPath:
path: "/home/docker/pgdata"
name: pgdata
-
---
+{{- end }}
# Domino Postgres Service
+{{- if .Values.database.enabled }}
apiVersion: v1
kind: Service
metadata:
@@ -52,3 +54,4 @@ spec:
- protocol: "TCP"
port: 5432
targetPort: 5432
+{{- end }}
\ No newline at end of file
diff --git a/helm/domino/templates/domino-rest-deployment.yml b/helm/domino/templates/domino-rest-deployment.yml
index 2853de67..b1fcdb34 100644
--- a/helm/domino/templates/domino-rest-deployment.yml
+++ b/helm/domino/templates/domino-rest-deployment.yml
@@ -35,7 +35,7 @@ spec:
- name: DOMINO_DEPLOY_MODE
value: {{ .Values.rest.deployMode }}
- name: DOMINO_DB_HOST
- value: {{ .Release.Name }}-postgres-service
+ value: {{ if .Values.database.enabled }}"{{ .Release.Name }}-postgres-service"{{ else }}{{ .Values.database.host }}{{ end }}
- name: DOMINO_DB_NAME
value: {{ .Values.database.name }}
- name: DOMINO_DB_USER
@@ -43,7 +43,7 @@ spec:
- name: DOMINO_DB_PASSWORD
value: {{ .Values.database.password }}
- name: DOMINO_DB_PORT
- value: "5432"
+ value: "{{ .Values.database.port }}"
- name: "DOMINO_DEFAULT_PIECES_REPOSITORY_TOKEN"
valueFrom:
secretKeyRef:
diff --git a/helm/domino/templates/jobs/domino-migrations.yml b/helm/domino/templates/jobs/domino-migrations.yml
index d11a5f42..0c3545c7 100644
--- a/helm/domino/templates/jobs/domino-migrations.yml
+++ b/helm/domino/templates/jobs/domino-migrations.yml
@@ -1,3 +1,4 @@
+{{- if .Values.database.enabled }}
apiVersion: batch/v1
kind: Job
metadata:
@@ -26,4 +27,4 @@ spec:
value: {{ .Values.database.password }}
- name: DOMINO_DB_PORT
value: "5432"
-
+{{- end }}
\ No newline at end of file
diff --git a/helm/domino/values.yaml b/helm/domino/values.yaml
index 2f59187b..5f9f3a89 100644
--- a/helm/domino/values.yaml
+++ b/helm/domino/values.yaml
@@ -20,7 +20,9 @@ rest:
# Change this if using external Database
database:
+ enabled: true
image: postgres:13
name: postgres
user: postgres
password: postgres
+ port: "5432"
diff --git a/media/diagram/domino b/media/diagram/domino
new file mode 100644
index 00000000..955cbf67
--- /dev/null
+++ b/media/diagram/domino
@@ -0,0 +1,167 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/rest/core/settings.py b/rest/core/settings.py
index cee9db72..a093a293 100644
--- a/rest/core/settings.py
+++ b/rest/core/settings.py
@@ -50,7 +50,7 @@ class Settings(BaseSettings):
# Default domino pieces repository
DOMINO_DEFAULT_PIECES_REPOSITORY = os.environ.get('DOMINO_DEFAULT_PIECES_REPOSITORY', "Tauffer-Consulting/default_domino_pieces")
- DOMINO_DEFAULT_PIECES_REPOSITORY_VERSION = os.environ.get('DOMINO_DEFAULT_PIECES_REPOSITORY_VERSION', "0.3.13")
+ DOMINO_DEFAULT_PIECES_REPOSITORY_VERSION = os.environ.get('DOMINO_DEFAULT_PIECES_REPOSITORY_VERSION', "0.3.14")
DOMINO_DEFAULT_PIECES_REPOSITORY_SOURCE = os.environ.get('DOMINO_DEFAULT_PIECES_REPOSITORY_SOURCE', "github")
DOMINO_DEFAULT_PIECES_REPOSITORY_TOKEN: EmptyStrToNone = os.environ.get('DOMINO_DEFAULT_PIECES_REPOSITORY_TOKEN', "")
DOMINO_DEFAULT_PIECES_REPOSITORY_URL: str = os.environ.get('DOMINO_DEFAULT_PIECES_REPOSITORY_URL', 'https://github.com/Tauffer-Consulting/default_domino_pieces')
diff --git a/rest/schemas/requests/workflow.py b/rest/schemas/requests/workflow.py
index 1cc2221f..671c1cbe 100644
--- a/rest/schemas/requests/workflow.py
+++ b/rest/schemas/requests/workflow.py
@@ -27,10 +27,6 @@ class UiSchema(BaseModel):
edges: List[Dict]
-class WorkflowStorage(BaseModel):
- storage_source: Optional[str] # TODO use enum ?
- base_folder: Optional[str]
-
class SelectEndDate(str, Enum):
never = "never"
user_defined = "User defined"
@@ -108,7 +104,6 @@ class WorkflowSharedStorageModeEnum(str, Enum):
class WorkflowSharedStorageDataModel(BaseModel):
source: Optional[WorkflowSharedStorageSourceEnum]
- base_folder: Optional[str]
mode: Optional[WorkflowSharedStorageModeEnum]
provider_options: Optional[Dict]
diff --git a/src/domino/VERSION b/src/domino/VERSION
index dc2b74e6..b35b9dd0 100644
--- a/src/domino/VERSION
+++ b/src/domino/VERSION
@@ -1 +1 @@
-0.5.7
\ No newline at end of file
+0.5.8
\ No newline at end of file
diff --git a/src/domino/cli/utils/config-domino-local.toml b/src/domino/cli/utils/config-domino-local.toml
index bf522305..ad7c1a9d 100644
--- a/src/domino/cli/utils/config-domino-local.toml
+++ b/src/domino/cli/utils/config-domino-local.toml
@@ -14,6 +14,7 @@ DOMINO_GITHUB_WORKFLOWS_SSH_PRIVATE_KEY = ""
DOMINO_GITHUB_WORKFLOWS_SSH_PUBLIC_KEY = ""
[domino_db]
+DOMINO_CREATE_DATABASE = true # Changing to false will use the existing database defined by below variables
DOMINO_DB_HOST = "postgres"
DOMINO_DB_PORT = "postgres"
DOMINO_DB_USER = "postgres"
diff --git a/src/domino/cli/utils/docker-compose-without-database.yaml b/src/domino/cli/utils/docker-compose-without-database.yaml
new file mode 100644
index 00000000..8d5a03e6
--- /dev/null
+++ b/src/domino/cli/utils/docker-compose-without-database.yaml
@@ -0,0 +1,338 @@
+# Basic Airflow cluster configuration for CeleryExecutor with Redis and PostgreSQL.
+# WARNING: This configuration is for local development. Do not use it in a production deployment.
+---
+version: '3.8'
+x-airflow-common:
+ &airflow-common
+ image: apache/airflow:2.6.3-python3.9
+ environment:
+ &airflow-common-env
+ AIRFLOW__CORE__EXECUTOR: CeleryExecutor
+ AIRFLOW__DATABASE__SQL_ALCHEMY_CONN: postgresql+psycopg2://airflow:airflow@postgres/airflow
+ AIRFLOW__CORE__SQL_ALCHEMY_CONN: postgresql+psycopg2://airflow:airflow@postgres/airflow
+ AIRFLOW__CELERY__RESULT_BACKEND: db+postgresql://airflow:airflow@postgres/airflow
+ AIRFLOW__CELERY__BROKER_URL: redis://:@redis:6379/0
+ AIRFLOW__CORE__FERNET_KEY: ''
+ AIRFLOW__CORE__DAGS_ARE_PAUSED_AT_CREATION: 'true'
+ AIRFLOW__CORE__LOAD_EXAMPLES: 'false'
+ AIRFLOW__API__AUTH_BACKENDS: 'airflow.api.auth.backend.basic_auth,airflow.api.auth.backend.session'
+ AIRFLOW__SCHEDULER__ENABLE_HEALTH_CHECK: 'true'
+ AIRFLOW__SCHEDULER__DAG_DIR_LIST_INTERVAL: 10
+ _PIP_ADDITIONAL_REQUIREMENTS: ${_PIP_ADDITIONAL_REQUIREMENTS:-}
+ volumes:
+ - ${AIRFLOW_PROJ_DIR:-./airflow}/dags:/opt/airflow/dags
+ - ${AIRFLOW_PROJ_DIR:-./airflow}/logs:/opt/airflow/logs
+ - ${AIRFLOW_PROJ_DIR:-./airflow}/plugins:/opt/airflow/plugins
+ user: "${AIRFLOW_UID:-50000}:0"
+ depends_on:
+ &airflow-common-depends-on
+ redis:
+ condition: service_healthy
+ postgres:
+ condition: service_healthy
+
+services:
+ postgres:
+ image: postgres:13
+ container_name: airflow-postgres
+ environment:
+ POSTGRES_USER: airflow
+ POSTGRES_PASSWORD: airflow
+ POSTGRES_DB: airflow
+ volumes:
+ - postgres-db-volume:/var/lib/postgresql/data
+ healthcheck:
+ test: [ "CMD", "pg_isready", "-U", "airflow" ]
+ interval: 10s
+ retries: 5
+ start_period: 5s
+ restart: always
+
+ redis:
+ image: redis:latest
+ container_name: airflow-redis
+ expose:
+ - 6379
+ healthcheck:
+ test: [ "CMD", "redis-cli", "ping" ]
+ interval: 10s
+ timeout: 30s
+ retries: 50
+ start_period: 30s
+ restart: always
+
+ airflow-webserver:
+ <<: *airflow-common
+ container_name: airflow-webserver
+ command: webserver
+ ports:
+ - "8080:8080"
+ healthcheck:
+ test:
+ [
+ "CMD",
+ "curl",
+ "--fail",
+ "http://localhost:8080/health"
+ ]
+ interval: 30s
+ timeout: 10s
+ retries: 5
+ start_period: 30s
+ restart: always
+ depends_on:
+ <<: *airflow-common-depends-on
+ airflow-init:
+ condition: service_completed_successfully
+
+ airflow-triggerer:
+ <<: *airflow-common
+ container_name: airflow-triggerer
+ command: triggerer
+ healthcheck:
+ test:
+ [
+ "CMD-SHELL",
+ 'airflow jobs check --job-type TriggererJob --hostname "$${HOSTNAME}"'
+ ]
+ interval: 30s
+ timeout: 10s
+ retries: 5
+ start_period: 30s
+ restart: always
+ depends_on:
+ <<: *airflow-common-depends-on
+ airflow-init:
+ condition: service_completed_successfully
+
+ airflow-init:
+ <<: *airflow-common
+ container_name: airflow-init
+ entrypoint: /bin/bash
+ # yamllint disable rule:line-length
+ command:
+ - -c
+ - |
+ function ver() {
+ printf "%04d%04d%04d%04d" $${1//./ }
+ }
+ airflow_version=$$(AIRFLOW__LOGGING__LOGGING_LEVEL=INFO && gosu airflow airflow version)
+ airflow_version_comparable=$$(ver $${airflow_version})
+ min_airflow_version=2.2.0
+ min_airflow_version_comparable=$$(ver $${min_airflow_version})
+ if (( airflow_version_comparable < min_airflow_version_comparable )); then
+ echo
+ echo -e "\033[1;31mERROR!!!: Too old Airflow version $${airflow_version}!\e[0m"
+ echo "The minimum Airflow version supported: $${min_airflow_version}. Only use this or higher!"
+ echo
+ exit 1
+ fi
+ if [[ -z "${AIRFLOW_UID}" ]]; then
+ echo
+ echo -e "\033[1;33mWARNING!!!: AIRFLOW_UID not set!\e[0m"
+ echo "If you are on Linux, you SHOULD follow the instructions below to set "
+ echo "AIRFLOW_UID environment variable, otherwise files will be owned by root."
+ echo "For other operating systems you can get rid of the warning with manually created .env file:"
+ echo " See: https://airflow.apache.org/docs/apache-airflow/stable/howto/docker-compose/index.html#setting-the-right-airflow-user"
+ echo
+ fi
+ one_meg=1048576
+ mem_available=$$(($$(getconf _PHYS_PAGES) * $$(getconf PAGE_SIZE) / one_meg))
+ cpus_available=$$(grep -cE 'cpu[0-9]+' /proc/stat)
+ disk_available=$$(df / | tail -1 | awk '{print $$4}')
+ warning_resources="false"
+ if (( mem_available < 4000 )) ; then
+ echo
+ echo -e "\033[1;33mWARNING!!!: Not enough memory available for Docker.\e[0m"
+ echo "At least 4GB of memory required. You have $$(numfmt --to iec $$((mem_available * one_meg)))"
+ echo
+ warning_resources="true"
+ fi
+ if (( cpus_available < 2 )); then
+ echo
+ echo -e "\033[1;33mWARNING!!!: Not enough CPUS available for Docker.\e[0m"
+ echo "At least 2 CPUs recommended. You have $${cpus_available}"
+ echo
+ warning_resources="true"
+ fi
+ if (( disk_available < one_meg * 10 )); then
+ echo
+ echo -e "\033[1;33mWARNING!!!: Not enough Disk space available for Docker.\e[0m"
+ echo "At least 10 GBs recommended. You have $$(numfmt --to iec $$((disk_available * 1024 )))"
+ echo
+ warning_resources="true"
+ fi
+ if [[ $${warning_resources} == "true" ]]; then
+ echo
+ echo -e "\033[1;33mWARNING!!!: You have not enough resources to run Airflow (see above)!\e[0m"
+ echo "Please follow the instructions to increase amount of resources available:"
+ echo " https://airflow.apache.org/docs/apache-airflow/stable/howto/docker-compose/index.html#before-you-begin"
+ echo
+ fi
+ mkdir -p /sources/logs /sources/dags /sources/plugins
+ chown -R "${AIRFLOW_UID}:0" /sources/{logs,dags,plugins}
+ exec /entrypoint airflow version
+ # yamllint enable rule:line-length
+ environment:
+ <<: *airflow-common-env
+ _AIRFLOW_DB_UPGRADE: 'true'
+ _AIRFLOW_WWW_USER_CREATE: 'true'
+ _AIRFLOW_WWW_USER_USERNAME: ${_AIRFLOW_WWW_USER_USERNAME:-airflow}
+ _AIRFLOW_WWW_USER_PASSWORD: ${_AIRFLOW_WWW_USER_PASSWORD:-airflow}
+ _PIP_ADDITIONAL_REQUIREMENTS: ''
+ user: "0:0"
+ volumes:
+ - ${AIRFLOW_PROJ_DIR:-./airflow}:/sources
+
+ airflow-cli:
+ <<: *airflow-common
+ container_name: airflow-cli
+ profiles:
+ - debug
+ environment:
+ <<: *airflow-common-env
+ CONNECTION_CHECK_MAX_COUNT: "0"
+ # Workaround for entrypoint issue. See: https://github.com/apache/airflow/issues/16252
+ command:
+ - bash
+ - -c
+ - airflow
+
+ # You can enable flower by adding "--profile flower" option e.g. docker-compose --profile flower up
+ # or by explicitly targeted on the command line e.g. docker-compose up flower.
+ # See: https://docs.docker.com/compose/profiles/
+ flower:
+ <<: *airflow-common
+ container_name: airflow-flower
+ command: celery flower
+ profiles:
+ - flower
+ ports:
+ - "5555:5555"
+ healthcheck:
+ test: [ "CMD", "curl", "--fail", "http://localhost:5555/" ]
+ interval: 30s
+ timeout: 10s
+ retries: 5
+ start_period: 30s
+ restart: always
+ depends_on:
+ <<: *airflow-common-depends-on
+ airflow-init:
+ condition: service_completed_successfully
+
+ # Modified Airflow Scheduler with Domino
+ airflow-scheduler:
+ <<: *airflow-common
+ image: ghcr.io/tauffer-consulting/domino-airflow-base:latest${DOMINO_COMPOSE_DEV}
+ container_name: airflow-domino-scheduler
+ command: scheduler
+ healthcheck:
+ test:
+ [
+ "CMD",
+ "curl",
+ "--fail",
+ "http://localhost:8974/health"
+ ]
+ interval: 30s
+ timeout: 10s
+ retries: 5
+ start_period: 30s
+ restart: always
+ environment:
+ <<: *airflow-common-env
+ DOMINO_DEPLOY_MODE: local-compose
+ volumes:
+ - ${AIRFLOW_PROJ_DIR:-./airflow}/dags:/opt/airflow/dags
+ - ${AIRFLOW_PROJ_DIR:-./airflow}/logs:/opt/airflow/logs
+ - ${AIRFLOW_PROJ_DIR:-./airflow}/plugins:/opt/airflow/plugins
+ depends_on:
+ <<: *airflow-common-depends-on
+ airflow-init:
+ condition: service_completed_successfully
+
+ # Modified Airflow Worker with Domino
+ airflow-worker:
+ <<: *airflow-common
+ image: ghcr.io/tauffer-consulting/domino-airflow-base:latest${DOMINO_COMPOSE_DEV}
+ container_name: airflow-domino-worker
+ command: celery worker
+ healthcheck:
+ test:
+ - "CMD-SHELL"
+ - 'celery --app airflow.executors.celery_executor.app inspect ping -d "celery@$${HOSTNAME}"'
+ interval: 30s
+ timeout: 10s
+ retries: 5
+ start_period: 30s
+ environment:
+ <<: *airflow-common-env
+ # Required to handle warm shutdown of the celery workers properly
+ # See https://airflow.apache.org/docs/docker-stack/entrypoint.html#signal-propagation
+ DUMB_INIT_SETSID: "0"
+ DOMINO_DEPLOY_MODE: local-compose
+ LOCAL_DOMINO_SHARED_DATA_PATH: ${PWD}/domino_data
+ restart: always
+
+ volumes:
+ - ${AIRFLOW_PROJ_DIR:-./airflow}/dags:/opt/airflow/dags
+ - ${AIRFLOW_PROJ_DIR:-./airflow}/logs:/opt/airflow/logs
+ - ${AIRFLOW_PROJ_DIR:-./airflow}/plugins:/opt/airflow/plugins
+ - ${PWD}/domino_data:/home/shared_storage
+ depends_on:
+ <<: *airflow-common-depends-on
+ airflow-init:
+ condition: service_completed_successfully
+
+ # Domino REST Api
+ domino_rest:
+ image: ghcr.io/tauffer-consulting/domino-rest:latest${DOMINO_COMPOSE_DEV}
+ container_name: domino-rest
+ command: bash -c "uvicorn main:app --reload --workers 1 --host 0.0.0.0 --port 8000"
+ ports:
+ - 8000:8000
+ environment:
+ - DOMINO_DB_USER=${DOMINO_DB_USER}
+ - DOMINO_DB_PASSWORD=${DOMINO_DB_PASSWORD}
+ - DOMINO_DB_HOST=${DOMINO_DB_HOST}
+ - DOMINO_DB_PORT=${DOMINO_DB_PORT}
+ - DOMINO_DB_NAME=${DOMINO_DB_NAME}
+ - DOMINO_DEFAULT_PIECES_REPOSITORY_TOKEN=${DOMINO_DEFAULT_PIECES_REPOSITORY_TOKEN}
+ - DOMINO_DEPLOY_MODE=local-compose
+ - AIRFLOW_ADMIN_USERNAME=airflow
+ - AIRFLOW_ADMIN_PASSWORD=airflow
+ network_mode: ${NETWORK_MODE}
+ volumes:
+ - ${AIRFLOW_PROJ_DIR:-./airflow}/dags:/opt/airflow/dags
+
+ # Domino Frontend
+ domino_frontend:
+ image: ghcr.io/tauffer-consulting/domino-frontend:compose${DOMINO_COMPOSE_DEV}
+ container_name: domino-frontend
+ ports:
+ - "3000:80"
+ depends_on:
+ domino_rest:
+ condition: service_started
+ environment:
+ - API_ENV=local
+ - DOMINO_DEPLOY_MODE=local-compose
+
+ # Domino Docker proxy
+ docker-proxy:
+ image: bobrik/socat
+ container_name: domino-docker-proxy
+ command: "TCP4-LISTEN:2375,fork,reuseaddr UNIX-CONNECT:/var/run/docker.sock"
+ ports:
+ - "2376:2375"
+ volumes:
+ - /var/run/docker.sock:/var/run/docker.sock
+
+volumes:
+ postgres-db-volume: null
+ domino-postgres-volume: null
+
+networks:
+ domino-postgres-network:
+ driver: host
diff --git a/src/domino/cli/utils/platform.py b/src/domino/cli/utils/platform.py
index e9a660a4..9fe9d157 100644
--- a/src/domino/cli/utils/platform.py
+++ b/src/domino/cli/utils/platform.py
@@ -274,8 +274,10 @@ def create_platform(install_airflow: bool = True, use_gpu: bool = False) -> None
# We don't need driver as we are using kind and our host machine already has nvidia driver that is why we are disabling it.
nvidia_plugis_install_command = "helm install --wait --generate-name -n gpu-operator --create-namespace nvidia/gpu-operator --set driver.enabled=false"
subprocess.run(nvidia_plugis_install_command, shell=True)
-
+
+
# Override values for Domino Helm chart
+ db_enabled = platform_config['domino_db'].get("DOMINO_CREATE_DATABASE", True)
token_pieces = platform_config["github"]["DOMINO_DEFAULT_PIECES_REPOSITORY_TOKEN"]
token_workflows = platform_config["github"]["DOMINO_GITHUB_ACCESS_TOKEN_WORKFLOWS"]
domino_values_override_config = {
@@ -292,8 +294,28 @@ def create_platform(install_airflow: bool = True, use_gpu: bool = False) -> None
"image": domino_rest_image,
"workflowsRepository": platform_config['github']['DOMINO_GITHUB_WORKFLOWS_REPOSITORY'],
},
+ "database": {
+ "enabled": db_enabled,
+ "image": "postgres:13",
+ "name": "postgres",
+ "user": "postgres",
+ "password": "postgres",
+ "port": "5432"
+ }
}
+ # Only add database values if database is enabled
+ # If not enabled will use always the default values
+ if not db_enabled:
+ domino_values_override_config['database'] = {
+ **domino_values_override_config['database'],
+ "host": platform_config['domino_db']["DOMINO_DB_HOST"],
+ "name": platform_config['domino_db']["DOMINO_DB_NAME"],
+ "user": platform_config['domino_db']["DOMINO_DB_USER"],
+ "password": platform_config['domino_db']["DOMINO_DB_PASSWORD"],
+ "port": str(platform_config['domino_db'].get("DOMINO_DB_PORT", 5432))
+ }
+
# Override values for Airflow Helm chart
airflow_ssh_config = dict(
gitSshKey=f"{platform_config['github']['DOMINO_GITHUB_WORKFLOWS_SSH_PRIVATE_KEY']}",
@@ -413,8 +435,6 @@ def create_platform(install_airflow: bool = True, use_gpu: bool = False) -> None
"helm",
"pull",
DOMINO_HELM_PATH,
- "--version",
- DOMINO_HELM_VERSION,
"--untar",
"-d",
tmp_dir
@@ -614,13 +634,28 @@ def destroy_platform() -> None:
def run_platform_compose(detached: bool = False, use_config_file: bool = False, dev: bool = False) -> None:
+ # Database default settings
+ create_database = True
if use_config_file:
console.print("Using config file...")
with open("config-domino-local.toml", "rb") as f:
platform_config = tomli.load(f)
token_pieces = platform_config["github"].get("DOMINO_DEFAULT_PIECES_REPOSITORY_TOKEN")
os.environ['DOMINO_DEFAULT_PIECES_REPOSITORY_TOKEN'] = token_pieces
+ create_database = platform_config['domino_db'].get('DOMINO_CREATE_DATABASE', True)
+ if not create_database:
+ os.environ['DOMINO_DB_HOST'] = platform_config['domino_db'].get("DOMINO_DB_HOST", 'postgres')
+ os.environ['DOMINO_DB_PORT'] = platform_config['domino_db'].get("DOMINO_DB_PORT", 5432)
+ os.environ['DOMINO_DB_USER'] = platform_config['domino_db'].get("DOMINO_DB_USER", 'postgres')
+ os.environ['DOMINO_DB_PASSWORD'] = platform_config['domino_db'].get("DOMINO_DB_PASSWORD", 'postgres')
+ os.environ['DOMINO_DB_NAME'] = platform_config['domino_db'].get("DOMINO_DB_NAME", 'postgres')
+ os.environ['NETWORK_MODE'] = 'bridge'
+
+ # If running database in an external local container, set network mode to host
+ if platform_config['domino_db'].get('DOMINO_DB_HOST') in ['localhost', '0.0.0.0', '127.0.0.1']:
+ os.environ['NETWORK_MODE'] = 'host'
+
# Create local directories
local_path = Path(".").resolve()
domino_dir = local_path / "domino_data"
@@ -635,9 +670,11 @@ def run_platform_compose(detached: bool = False, use_config_file: bool = False,
subprocess.run(["chmod", "-R", "777", "airflow"])
# Copy docker-compose.yaml file from package to local path
- docker_compose_path = Path(__file__).resolve().parent / "docker-compose.yaml"
- subprocess.run(["cp", str(docker_compose_path), "."])
-
+ if create_database:
+ docker_compose_path = Path(__file__).resolve().parent / "docker-compose.yaml"
+ else:
+ docker_compose_path = Path(__file__).resolve().parent / "docker-compose-without-database.yaml"
+ subprocess.run(["cp", str(docker_compose_path), "./docker-compose.yaml"])
# Run docker-compose up
cmd = [
"docker",
diff --git a/src/domino/custom_operators/sidecar/Dockerfile b/src/domino/custom_operators/sidecar/Dockerfile
index dcaffcb3..07d5c743 100644
--- a/src/domino/custom_operators/sidecar/Dockerfile
+++ b/src/domino/custom_operators/sidecar/Dockerfile
@@ -1,11 +1,14 @@
-FROM bitnami/python:3.8
+FROM bitnami/python:3.10
RUN apt update \
&& apt -y install fuse3 \
&& apt install curl -y \
&& apt install p7zip-full -y \
&& apt-get install psmisc -y \
- && curl https://rclone.org/install.sh | bash
+ && apt-get install -y man-db \
+ && curl -O https://downloads.rclone.org/v1.64.2/rclone-v1.64.2-linux-amd64.zip && unzip rclone-v1.64.2-linux-amd64.zip && cd rclone-*-linux-amd64 && cp rclone /usr/bin/ \
+ && chown root:root /usr/bin/rclone && chmod 755 /usr/bin/rclone \
+ && mkdir -p /usr/local/share/man/man1 && cp rclone.1 /usr/local/share/man/man1/ && mandb
RUN mkdir -p /.config/rclone
COPY rclone.conf /.config/rclone/rclone.conf
@@ -13,6 +16,4 @@ COPY fuse.conf /etc/fuse.conf
COPY sidecar_lifecycle.sh .
RUN chmod u+x sidecar_lifecycle.sh
COPY logger.py .
-COPY mount.py .
-
-CMD ["bash", "-c", "./sidecar_lifecycle.sh"]
\ No newline at end of file
+COPY mount.py .
\ No newline at end of file