Skip to content

Commit

Permalink
Add smoke tests for terra-jupyter-python and terra-jupyter-aou (DataB…
Browse files Browse the repository at this point in the history
…iosphere#199)

* Initial commit of terra-jupyter-python smoke tests.

* Fix test volume.

* Remove flag for BigQuery magic, it isn't needed until the next PR.

* Disable tests that are expected to fail.

* Remove debugging code from workflow.

* Capture results from all notebook-based tests as a workflow artifact, regardless of success or failure of those tests.

* Add smoke tests for terra-jupyter-aou.

* Build base images, if needed.

Also move tests into a subdir.

* Add errexit.

* Remove the workflow triggers added for testing purposes.
  • Loading branch information
deflaux authored Feb 25, 2021
1 parent ac907b6 commit a86cb30
Show file tree
Hide file tree
Showing 8 changed files with 727 additions and 1 deletion.
106 changes: 106 additions & 0 deletions .github/workflows/test-terra-jupyter-aou.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,106 @@
name: Test terra-jupyter-aou
# Perform smoke tests on the terra-jupyter-aou Docker image to have some amount of confidence that
# Python package versions are compatible.
#
# To configure the minimal auth needed for these tests to be able to read public data from Google Cloud Platform:
# Step 1: Create a service account per these instructions:
# https://github.com/google-github-actions/setup-gcloud/blob/master/setup-gcloud/README.md
# Step 2: Give the service account the following permissions within the project: BigQuery User
# Step 3: Store its key and project id as GitHub repository secrets GCP_SA_KEY and GCP_PROJECT_ID.
# https://docs.github.com/en/free-pro-team@latest/actions/reference/encrypted-secrets#creating-encrypted-secrets-for-a-repository

on:
pull_request:
branches: [ master ]
paths:
- 'terra-jupyter-aou/**'
- '.github/workflows/test-terra-jupyter-aou.yml'
# Note: secrets are not passed to pull requests from forks, so the dev team will need to use the manual workflow
# dispatch trigger when receiving community contributions.

workflow_dispatch:
# Allows manually triggering of workflow on a selected branch via the GitHub Actions tab.
# GitHub blog demo: https://github.blog/changelog/2020-07-06-github-actions-manual-triggers-with-workflow_dispatch/.

env:
GOOGLE_PROJECT: ${{ secrets.GCP_PROJECT_ID }}

jobs:

test_docker_image:
runs-on: ubuntu-latest

steps:
- name: Checkout
uses: actions/checkout@v2

- name: Set up Python
uses: actions/setup-python@v2
with:
python-version: 3.7

- name: Set up Cloud SDK
uses: google-github-actions/setup-gcloud@master
with:
project_id: ${{ secrets.GCP_PROJECT_ID }}
service_account_key: ${{ secrets.GCP_SA_KEY }}
export_default_credentials: true

- name: Build Docker image and base images too, if needed
run: |
gcloud auth configure-docker
./build_smoke_test_image.sh terra-jupyter-aou
- name: Run Python code specific to notebooks with nbconvert
# Run all notebooks from start to finish, regardles of error, so that we can capture the
# result as a workflow artifact.
# See also https://github.com/marketplace/actions/run-notebook if a more complicated
# workflow for notebooks is needed in the future.
run: |
chmod a+w -R $GITHUB_WORKSPACE
docker run \
--env GOOGLE_PROJECT \
--volume "${{ env.GOOGLE_APPLICATION_CREDENTIALS }}:/tmp/credentials.json:ro" \
--env GOOGLE_APPLICATION_CREDENTIALS="/tmp/credentials.json" \
--volume $GITHUB_WORKSPACE:/tests \
--workdir=/tests \
--entrypoint="" \
terra-jupyter-aou:smoke-test \
/bin/bash -c 'for nb in {terra-jupyter-python/tests,terra-jupyter-aou/tests}/*ipynb ; do jupyter nbconvert --to html --ExecutePreprocessor.allow_errors=True --execute "${nb}" ; done'
- name: Upload workflow artifacts
uses: actions/upload-artifact@v2
with:
name: notebook-execution-results
path: |
terra-jupyter-python/tests/*.html
terra-jupyter-aou/tests/*.html
retention-days: 30

- name: Test Python code with pytest
run: |
docker run \
--env GOOGLE_PROJECT \
--volume "${{ env.GOOGLE_APPLICATION_CREDENTIALS }}:/tmp/credentials.json:ro" \
--env GOOGLE_APPLICATION_CREDENTIALS="/tmp/credentials.json" \
--volume $GITHUB_WORKSPACE:/tests \
--workdir=/tests \
--entrypoint="" \
terra-jupyter-aou:smoke-test \
/bin/bash -c 'pip3 install pytest ; pytest terra-jupyter-python/tests/ terra-jupyter-aou/tests/'
- name: Test Python code specific to notebooks with nbconvert
# Simply 'Cell -> Run All` these notebooks and expect no errors in the case of a successful run of the test suite.
# If the tests throw any exceptions, execution of the notebooks will halt at that point. Look at the workflow
# artifacts to understand if there are more failures than just the one that caused this task to halt.
run: |
docker run \
--env GOOGLE_PROJECT \
--volume "${{ env.GOOGLE_APPLICATION_CREDENTIALS }}:/tmp/credentials.json:ro" \
--env GOOGLE_APPLICATION_CREDENTIALS="/tmp/credentials.json" \
--volume $GITHUB_WORKSPACE:/tests \
--workdir=/tests \
--entrypoint="" \
terra-jupyter-aou:smoke-test \
/bin/bash -c 'for nb in {terra-jupyter-python/tests,terra-jupyter-aou/tests}/*ipynb ; do jupyter nbconvert --to html --execute "${nb}" ; done'
104 changes: 104 additions & 0 deletions .github/workflows/test-terra-jupyter-python.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,104 @@
name: Test terra-jupyter-python
# Perform smoke tests on the terra-jupyter-python Docker image to have some amount of confidence that
# Python package versions are compatible.
#
# To configure the minimal auth needed for these tests to be able to read public data from Google Cloud Platform:
# Step 1: Create a service account per these instructions:
# https://github.com/google-github-actions/setup-gcloud/blob/master/setup-gcloud/README.md
# Step 2: Give the service account the following permissions within the project: BigQuery User
# Step 3: Store its key and project id as GitHub repository secrets GCP_SA_KEY and GCP_PROJECT_ID.
# https://docs.github.com/en/free-pro-team@latest/actions/reference/encrypted-secrets#creating-encrypted-secrets-for-a-repository

on:
pull_request:
branches: [ master ]
paths:
- 'terra-jupyter-python/**'
- '.github/workflows/test-terra-jupyter-python.yml'
# Note: secrets are not passed to pull requests from forks, so the dev team will need to use the manual workflow
# dispatch trigger when receiving community contributions.

workflow_dispatch:
# Allows manually triggering of workflow on a selected branch via the GitHub Actions tab.
# GitHub blog demo: https://github.blog/changelog/2020-07-06-github-actions-manual-triggers-with-workflow_dispatch/.

env:
GOOGLE_PROJECT: ${{ secrets.GCP_PROJECT_ID }}

jobs:

test_docker_image:
runs-on: ubuntu-latest

steps:
- name: Checkout
uses: actions/checkout@v2

- name: Set up Python
uses: actions/setup-python@v2
with:
python-version: 3.7

- name: Set up Cloud SDK
uses: google-github-actions/setup-gcloud@master
with:
project_id: ${{ secrets.GCP_PROJECT_ID }}
service_account_key: ${{ secrets.GCP_SA_KEY }}
export_default_credentials: true

- name: Build Docker image and base images too, if needed
run: |
gcloud auth configure-docker
./build_smoke_test_image.sh terra-jupyter-python
- name: Run Python code specific to notebooks with nbconvert
# Run all notebooks from start to finish, regardles of error, so that we can capture the
# result as a workflow artifact.
# See also https://github.com/marketplace/actions/run-notebook if a more complicated
# workflow for notebooks is needed in the future.
run: |
chmod a+w $GITHUB_WORKSPACE/terra-jupyter-python/tests
docker run \
--env GOOGLE_PROJECT \
--volume "${{ env.GOOGLE_APPLICATION_CREDENTIALS }}:/tmp/credentials.json:ro" \
--env GOOGLE_APPLICATION_CREDENTIALS="/tmp/credentials.json" \
--volume $GITHUB_WORKSPACE/terra-jupyter-python/tests:/tests \
--workdir=/tests \
--entrypoint="" \
terra-jupyter-python:smoke-test \
/bin/sh -c 'for nb in *ipynb ; do jupyter nbconvert --to html --ExecutePreprocessor.allow_errors=True --execute "${nb}" ; done'
- name: Upload workflow artifacts
uses: actions/upload-artifact@v2
with:
name: notebook-execution-results
path: terra-jupyter-python/tests/*.html
retention-days: 30

- name: Test Python code with pytest
run: |
docker run \
--env GOOGLE_PROJECT \
--volume "${{ env.GOOGLE_APPLICATION_CREDENTIALS }}:/tmp/credentials.json:ro" \
--env GOOGLE_APPLICATION_CREDENTIALS="/tmp/credentials.json" \
--volume $GITHUB_WORKSPACE/terra-jupyter-python/tests:/tests \
--workdir=/tests \
--entrypoint="" \
terra-jupyter-python:smoke-test \
/bin/sh -c "pip3 install pytest; pytest"
- name: Test Python code specific to notebooks with nbconvert
# Simply 'Cell -> Run All` these notebooks and expect no errors in the case of a successful run of the test suite.
# If the tests throw any exceptions, execution of the notebooks will halt at that point. Look at the workflow
# artifacts to understand if there are more failures than just the one that caused this task to halt.
run: |
chmod a+w $GITHUB_WORKSPACE/terra-jupyter-python
docker run \
--env GOOGLE_PROJECT \
--volume "${{ env.GOOGLE_APPLICATION_CREDENTIALS }}:/tmp/credentials.json:ro" \
--env GOOGLE_APPLICATION_CREDENTIALS="/tmp/credentials.json" \
--volume $GITHUB_WORKSPACE/terra-jupyter-python/tests:/tests \
--workdir=/tests \
--entrypoint="" \
terra-jupyter-python:smoke-test \
/bin/sh -c 'for nb in *ipynb ; do jupyter nbconvert --to html --execute "${nb}" ; done'
4 changes: 3 additions & 1 deletion .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -3,4 +3,6 @@
.DS_Store
node_modules/
package-lock.json
.dotty-ide-disabled
.dotty-ide-disabled
.ipynb_checkpoints
*html
43 changes: 43 additions & 0 deletions build_smoke_test_image.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,43 @@
#!/bin/bash
# Perform a local build of an image for smoke-testing purposes. Also build its tree of base images, when missing.
#
# Example: ./build_smoke_test_image terra-jupyter-python
#
# Current working directory must be the directory in which this script resides (one level above all the Dockerfiles).
# It walks down each relevant image directory, making changes to the FROM statements,
# as needed, to refer to locally built base images.

set -o errexit
set -o pipefail
set -o nounset
set -o xtrace

build_smoke_test_image() {
local IMAGE_TYPE=$1
pushd ${IMAGE_TYPE}
local BASE_IMAGES=$( egrep '^FROM (\S+)' Dockerfile |tr -s ' ' | cut -d ' ' -f 2 )

local BASE_IMAGE
for BASE_IMAGE in ${BASE_IMAGES}; do
local PULL_RESULT=$( docker pull -q $BASE_IMAGE )

if [[ -n "${PULL_RESULT}" ]]; then
echo "${BASE_IMAGE} exists"
else
echo "${BASE_IMAGE} does not exist, building it locally"
local BASE_IMAGE_TYPE=$( echo ${BASE_IMAGE} | cut -d '/' -f 3 | cut -d ':' -f 1)
local LOCAL_BASE_IMAGE=${BASE_IMAGE_TYPE}:smoke-test
# Call this function recursively to build base images for however many levels down we need to go.
popd
build_smoke_test_image ${BASE_IMAGE_TYPE}
pushd ${IMAGE_TYPE}
# Edit our local copy of the Dockerfile to use the locally built base image.
sed -i "s;${BASE_IMAGE};${LOCAL_BASE_IMAGE};g" Dockerfile
fi
done

docker build . --file Dockerfile --tag ${IMAGE_TYPE}:smoke-test
popd
}

build_smoke_test_image $1
Loading

0 comments on commit a86cb30

Please sign in to comment.