Skip to content

new workflow

new workflow #8

Workflow file for this run

name: Our first dbt PR job
on:
pull_request:
types:
- opened
- reopened
- synchronize
- ready_for_review
push:
branches:
- '!main'
jobs:
dbt_ci:
runs-on: ubuntu-latest
steps:
# Step 1: Checkout the code
- name: Checkout Code
uses: actions/checkout@v2
# Step 2: Set up Python
- name: Set Up Python
uses: actions/setup-python@v2
with:
python-version: '3.8'
# Step 3: Add SSH private key and configure known_hosts
- name: Add SSH private key
run: |
mkdir -p ~/.ssh
echo "${{ secrets.SSH_PRIVATE_KEY }}" > ~/.ssh/production_ddp
chmod 600 ~/.ssh/production_ddp
- name: Add remote host to known_hosts
run: ssh-keyscan -H ${{ secrets.SSH_HOST }} >> ~/.ssh/known_hosts
# Step 4: Debug SSH connection
- name: Debug SSH connection
run: |
ssh -i ~/.ssh/production_ddp -o StrictHostKeyChecking=no ${{ secrets.DB_USER }}@${{ secrets.SSH_HOST }} echo "SSH connection successful"
# Step 5: Establish SSH tunnel
- name: Establish SSH tunnel
run: |
ssh -fN \
-L 5432:${{ secrets.POSTGRES_HOST }}:5432 \
-i ~/.ssh/production_ddp \
${{ secrets.DB_USER }}@${{ secrets.SSH_HOST }}
# Step 6: Install Python dependencies
- name: Install requirements
run: pip install -r requirements.txt
# Step 7: Install dbt dependencies
- name: Install dbt dependencies
run: dbt deps
# Step 8: Run dbt build
- name: dbt build
run: dbt build --full-refresh --profiles-dir ./
env:
POSTGRES_DBNAME: ${{ secrets.POSTGRES_DBNAME }}
POSTGRES_USER: ${{ secrets.POSTGRES_USER }}
POSTGRES_PASSWORD: ${{ secrets.POSTGRES_PASSWORD }}
POSTGRES_HOST: 127.0.0.1 # Use localhost since the SSH tunnel forwards the port
# Step 9: Benchmark models using sqlfluff
- name: Benchmarking models
run: sqlfluff lint models --bench
# Step 10: Lint models using sqlfluff
- name: Lint models
run: sqlfluff lint models -f human