Skip to content

Publish HDF Data to S3 and MDF #31

Publish HDF Data to S3 and MDF

Publish HDF Data to S3 and MDF #31

Workflow file for this run

name: Publish HDF Data to S3 and MDF
env:
AWS_ENDPOINT_URL: ${{ secrets.AWS_S3_ENDPOINT }}
AWS_S3_BUCKET: ${{ secrets.AWS_S3_BUCKET }}
AWS_S3_BUCKET_PATH: ${{ secrets. AWS_S3_BUCKET_PATH }}
AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
GLOBUS_CLIENT_ID: ${{ secrets.GLOBUS_CLIENT_ID }}
GLOBUS_CLIENT_SECRET: ${{ secrets.GLOBUS_CLIENT_SECRET }}
AWS_S3_USE_PATH_STYLE_ENDPOINT: true
AWS_EC2_METADATA_DISABLED: true
on:
release:
types: [published]
jobs:
push_publish_job:
runs-on: ubuntu-latest
name: Job to Push Data to Object Store and Publish to MDF
steps:
- name: Checkout
uses: actions/checkout@v4
- name: Run pwd
run: |
pwd
- name: Push and Publish Step
uses: materials-data-facility/publish-action@v1
with:
globus_auth_client_id: ${{ env.GLOBUS_CLIENT_ID }}
globus_auth_secret: ${{ env.GLOBUS_CLIENT_SECRET }}
files_to_publish: "./O_interlayer_energy/data/qmc.csv"
mdf_source_id: ""
mdf_title: "Test"
mdf_authors: "Shriram, Ben"
mdf_affiliations: "UIUC, NCSA"
mdf_publication_year: "2023"
staging_object_store_url: ${{ env.AWS_ENDPOINT_URL }}
aws_access_key_id: ${{ env.AWS_ACCESS_KEY_ID }}
aws_secret_access_key: ${{ env.AWS_SECRET_ACCESS_KEY }}
s3_bucket_id: ${{ env.AWS_S3_BUCKET }}
s3_bucket_path: ${{ env.AWS_S3_BUCKET_PATH }}