Skip to content

Merge pull request #914 from zacharyburnett/ci/data_url #17

Merge pull request #914 from zacharyburnett/ci/data_url

Merge pull request #914 from zacharyburnett/ci/data_url #17

Workflow file for this run

# downloads WebbPSF dataset and caches it to GitHub cache,
# making a new cache for a new data version
# the cache key is in the form `webbpsf-data-mini-1.3.0`,
# `webbpsf-data-full-1.2.6`, etc.
#
# To provide your own test workflow with the WebbPSF dataset,
# you should use this workflow to set up a cache in your repository,
# and then use `retrieve_cache.yml` to retrieve that cache so you
# don't have to download the entire dataset every time.
#
# to set up a cache of WebbPSF data in your own repository,
# create a workflow like the following:
#
# # .github/workflows/download_webbpsf.yml
# name: download and cache WebbPSF data
# on:
# schedule:
# - cron: "0 0 * * 0"
# jobs:
# download_webbpsf:
# uses: spacetelescope/webbpsf/.github/workflows/download_data.yml@develop
# with:
# minimal: true
name: download and cache data
on:
workflow_call:
inputs:
url:
description: URL to gzip file
type: string
required: false
default: https://stsci.box.com/shared/static/0dt9z6b927iqgtify2a4cvls9hvapi6k.gz
minimal:
description: dataset is minimal (as opposed to full)
type: boolean
required: false
default: true
outputs:
version:
value: ${{ jobs.download.outputs.version }}
cache_path:
value: ${{ jobs.download.outputs.cache_path }}
cache_key:
value: ${{ jobs.download.outputs.cache_key }}
workflow_dispatch:
inputs:
url:
description: URL to gzip file
type: string
required: false
default: https://stsci.box.com/shared/static/0dt9z6b927iqgtify2a4cvls9hvapi6k.gz
minimal:
description: dataset is minimal (as opposed to full)
type: boolean
required: false
default: true
schedule:
- cron: "0 0 * * 0"
release:
push:
branches:
- develop
env:
FULL_DATA_URL: https://stsci.box.com/shared/static/qxpiaxsjwo15ml6m4pkhtk36c9jgj70k.gz
MINIMAL_DATA_URL: https://stsci.box.com/shared/static/0dt9z6b927iqgtify2a4cvls9hvapi6k.gz
jobs:
download:
name: download and cache WebbPSF data
runs-on: ubuntu-latest
steps:
- run: wget ${{ (github.event_name == 'workflow_dispatch' || github.event_name == 'workflow_call') && inputs.url || env.MINIMAL_DATA_URL }} -O ${{ runner.temp }}/webbpsf-data.tar.gz
- run: mkdir ${{ runner.temp }}/data/
- run: tar -xzvf ${{ runner.temp }}/webbpsf-data.tar.gz -C ${{ runner.temp }}/data/
- id: cache_path
run: echo cache_path=${{ runner.temp }}/data/ >> $GITHUB_OUTPUT
- id: version
run: echo "version=$(cat ${{ steps.cache_path.outputs.cache_path }}/webbpsf-data/version.txt)" >> $GITHUB_OUTPUT
- id: cache_key
run: echo "cache_key=webbpsf-data-${{ (github.event_name == 'schedule' || github.event_name == 'release') && 'mini' || inputs.minimal && 'mini' || 'full' }}-${{ steps.version.outputs.version }}" >> $GITHUB_OUTPUT
- uses: actions/cache/save@v4
with:
path: ${{ runner.temp }}/data/
key: ${{ steps.cache_key.outputs.cache_key }}
outputs:
version: ${{ steps.version.outputs.version }}
cache_path: ${{ steps.cache_path.outputs.cache_path }}
cache_key: ${{ steps.cache_key.outputs.cache_key }}