This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
name: Deploy development environment | |
on: | |
push: | |
# branches: | |
# - main | |
# paths: | |
# - 'config/**' | |
# - 'modules/**' | |
# - 'env-dev.yml' | |
repository_dispatch: | |
types: [prerelease] | |
env: | |
ENV_NAME: env-dev.yml | |
VERSION: dev | |
DEPENDENCY_REPOS: replace_landsurface era5_grib_parallel | |
ARCH: noarch | |
concurrency: | |
# Since there is only one development conda environmnent deployed at a time, | |
# we can cancel the previous deployment if a new one is triggered. | |
group: deploy-development | |
cancel-in-progress: true | |
jobs: | |
check-dispatch-token: | |
name: Check dispatch token | |
runs-on: ubuntu-latest | |
if: ${{ github.event_name == 'repository_dispatch' }} | |
steps: | |
- name: Check token | |
run: | | |
if [[ ${{ github.event.client_payload.token}} != ${{ secrets.REPO_DISPATCH_VERIFICATION_TOKEN }} ]]; then | |
echo "Invalid repo dispatch token" | |
exit 1 | |
fi | |
get-deployment-sites: | |
name: Get Deployment Sites | |
runs-on: ubuntu-latest | |
needs: check-dispatch-token | |
# Don't run if the event is triggered by a repository dispatch with a non-valid token, | |
# but trigget if the event is not a repository dispatch | |
if: ${{ !cancelled() && !(github.event_name == 'repository_dispatch' && needs.check-dispatch-token.result != 'success') }} | |
outputs: | |
deployment-sites: ${{ steps.get-sites.outputs.deployment-sites }} | |
steps: | |
- name: Checkout config | |
uses: actions/checkout@v4 | |
- name: Get sites | |
id: get-sites | |
run: | | |
sites=$(jq --compact-output '.sites' ./config/deployment-sites.json) | |
echo "Found the following deployment sites: $sites" | |
echo "deployment-sites=$sites" >> $GITHUB_OUTPUT | |
test: | |
runs-on: ubuntu-latest | |
needs: get-deployment-sites | |
if: ${{ !cancelled() && needs.get-deployment-sites.result == 'success' }} | |
steps: | |
- name: test | |
run: | | |
echo ${{ needs.get-deployment-sites.outputs.deployment-sites }} | |
# pack: | |
# name: Pack environment | |
# runs-on: ubuntu-latest | |
# needs: check-dispatch-token | |
# # Don't run if the event is triggered by a repository dispatch with a non-valid token, | |
# # but trigget if the event is not a repository dispatch | |
# if: ${{ !cancelled() && !(github.event_name == 'repository_dispatch' && needs.check-dispatch-token.result != 'success') }} | |
# outputs: | |
# full-name: ${{ steps.get-env-name.outputs.full-name }} | |
# dependency-names: ${{ steps.get-dependencies.outputs.dependency-names }} | |
# pr-numbers: ${{ steps.get-dependencies.outputs.pr-numbers }} | |
# env: | |
# GH_TOKEN: ${{ secrets.REPO_CONTENT_READ_TOKEN }} | |
# CHANNEL_SUFFIX: _channel | |
# steps: | |
# - uses: actions/checkout@v4 | |
# - name: Get environment name | |
# id: get-env-name | |
# run: echo "full-name=$(yq '.name' < ${{env.ENV_NAME}})-${{env.VERSION}}" >> $GITHUB_OUTPUT | |
# # Search for the latest open PR (not a draft) in the replace_landsurface and era5_grib_parallel repos with | |
# # changes in specific files. If a PR is found, download the artifact from the PR latest | |
# # (successful) workflow run. If a PR is not found, download the lates released version. | |
# - name: Get latest dependencies | |
# id: get-dependencies | |
# run: | | |
# download_latest_release() { | |
# gh release download $(gh release view --repo ${{ github.repository_owner }}/$1 --json tagName --jq '.tagName') \ | |
# --repo ${{ github.repository_owner }}/$1 --pattern *.tar.bz2 -D ${1}${{env.CHANNEL_SUFFIX}}/${{env.ARCH}} | |
# if [ $? -eq 0 ]; then | |
# echo "${1}: Successfully downloaded latest release: \"$(basename ${1}${{env.CHANNEL_SUFFIX}}/${{env.ARCH}}/*.tar.bz2)\"." | |
# fi | |
# } | |
# download_run_artifact() { | |
# if gh run download $run_id --repo ${{ github.repository_owner }}/$1 -D ${1}${{env.CHANNEL_SUFFIX}}/${{env.ARCH}}; then | |
# echo "${1}: Successfully downloaded artifact \"$(basename ${1}${{env.CHANNEL_SUFFIX}}/${{env.ARCH}}/artifact/*.tar.bz2)\"." | |
# mv ${1}${{env.CHANNEL_SUFFIX}}/${{env.ARCH}}/artifact/*.tar.bz2 ${1}${{env.CHANNEL_SUFFIX}}/${{env.ARCH}} | |
# else | |
# echo "${1}: No valid artifact found. Dependency will be installed from the latest release." | |
# download_latest_release ${1} | |
# fi | |
# } | |
# for dependency_repo in ${{env.DEPENDENCY_REPOS}}; do | |
# echo "Getting latest \"${dependency_repo}\" dependency ..." | |
# # Get sha and number of latest open PR that changes either of the following files: | |
# # [src/**, setup.py, pyproject.toml, .conda/**] | |
# pr=$(gh pr list --repo ${{ github.repository_owner }}/${dependency_repo} --state open --draft=False \ | |
# --json headRefOid,files,number,url \ | |
# --jq '.[] | select(.files[].path | | |
# (startswith("src/") or (. == "setup.py") or (. == "pyproject.toml") or (startswith(".conda/"))))' \ | |
# | head -n 1) | |
# # if a PR is found, find the ID of the latest successful workflow run | |
# if [[ -n "$pr" ]]; then | |
# pr_sha=$(jq -r '.headRefOid' <<< $pr) | |
# pr_number=$(jq '.number' <<< $pr) | |
# pr_numbers+=($pr_number) | |
# pr_url=$(jq -r '.url' <<< $pr) | |
# echo "${dependency_repo}: Found PR #${pr_number}." | |
# echo " PR url: $pr_url" | |
# echo " Commit ref: $pr_sha" | |
# run=$(gh run list --repo ${{ github.repository_owner }}/${dependency_repo} \ | |
# --json databaseId,headSha,event,status,url \ | |
# --jq ".[] | select(.event == \"pull_request\" and .status == \"completed\" and .headSha == \"$pr_sha\")"\ | |
# | head -n 1) | |
# # if a successful workflow run is found, download its artifact | |
# if [[ -n "$run" ]]; then | |
# run_id=$(jq '.databaseId' <<< $run) | |
# run_url=$(jq -r '.url' <<< $run) | |
# echo "${dependency_repo}: Found successful workflow run for the PR." | |
# echo " Run ID: $run_id" | |
# echo " Run url: $run_url" | |
# download_run_artifact ${dependency_repo} | |
# else | |
# echo "${dependency_repo}: No successful workflow run found. Dependency will be installed from the latest release." | |
# download_latest_release ${dependency_repo} | |
# fi | |
# else | |
# echo "${dependency_repo}: No useful open PR found. Dependency will be installed from the latest release." | |
# download_latest_release ${dependency_repo} | |
# fi | |
# # Get the dependency name as text after "name = " in the "content" field of the pyproject.toml file | |
# dependency_names+=($(gh api -X GET -H "Accept: application/vnd.github.v3.raw" \ | |
# repos/${{github.repository_owner}}/${dependency_repo}/contents/pyproject.toml -F ref=${pr_sha:-main} \ | |
# | grep -Po "^name\s*=\s*\K(.*)" | tr -d '"')) | |
# done | |
# echo "dependency-names=${dependency_names[@]}" >> $GITHUB_OUTPUT | |
# echo "pr-numbers=$(sed 's/ /,/' <<< [${pr_numbers[@]}])" >> $GITHUB_OUTPUT | |
# - name: Setup Micromamba | |
# uses: mamba-org/setup-micromamba@f8b8a1e23a26f60a44c853292711bacfd3eac822 #v1.9.0 | |
# with: | |
# micromamba-version: '1.5.8-0' | |
# environment-file: ${{env.ENV_NAME}} | |
# environment-name: ${{ steps.get-env-name.outputs.full-name }} | |
# generate-run-shell: true | |
# # This step is needed to install local conda packages along with their dependencies (https://stackoverflow.com/a/68131606/21024780) | |
# - name: Create conda channels | |
# shell: micromamba-shell {0} | |
# run: | | |
# for dependency_repo in ${{env.DEPENDENCY_REPOS}}; do | |
# cmd="conda index ${dependency_repo}${{env.CHANNEL_SUFFIX}}" | |
# echo "$cmd" | |
# eval "$cmd" | |
# done | |
# - name: Install dependencies | |
# shell: micromamba-shell {0} | |
# run: | | |
# channels=$(for dependency_repo in ${{env.DEPENDENCY_REPOS}}; do \ | |
# echo -n "-c file://${{github.workspace}}/${dependency_repo}${{env.CHANNEL_SUFFIX}} "; done) | |
# cmd="micromamba install ${{steps.get-dependencies.outputs.dependency-names}} \ | |
# -c accessnri -c conda-forge -c coecms $channels -y" | |
# echo "$cmd" | |
# eval "$cmd" | |
# - name: Create Pack | |
# shell: micromamba-shell {0} | |
# run: | | |
# conda pack -o ${{ steps.get-env-name.outputs.full-name }}.tar.gz | |
# - name: Upload Artifact | |
# uses: actions/upload-artifact@v4 | |
# with: | |
# name: ${{ steps.get-env-name.outputs.full-name }} | |
# if-no-files-found: error | |
# path: ${{ steps.get-env-name.outputs.full-name }}.tar.gz | |
# deploy: | |
# runs-on: ubuntu-latest | |
# needs: | |
# - get-deployment-sites | |
# - pack | |
# strategy: | |
# fail-fast: false | |
# matrix: | |
# deployment-site: ${{ needs.get-deployment-sites.outputs.deployment-sites }} | |
# environment: ${{ matrix.deployment-site }} | |
# permissions: | |
# contents: write | |
# steps: | |
# - uses: actions/download-artifact@v4 | |
# with: | |
# name: ${{ needs.pack.outputs.full-name }} | |
# - name: Set up SSH | |
# uses: access-nri/actions/.github/actions/setup-ssh@main | |
# id: ssh | |
# with: | |
# hosts: | | |
# ${{ secrets.HOST_DATA }} | |
# ${{ secrets.HOST }} | |
# private-key: ${{ secrets.SSH_KEY }} | |
# - name: Copy to ${{ matrix.deployment-site }} | |
# run: | | |
# rsync -v -e 'ssh -i ${{ steps.ssh.outputs.private-key-path }}' \ | |
# ${{ needs.pack.outputs.full-name }}.tar.gz \ | |
# ${{ secrets.USER }}@${{ secrets.HOST_DATA }}:${{ vars.PACK_DIR }} | |
# - name: Deploy to ${{ matrix.deployment-site }} | |
# env: | |
# MODULE_DEPLOYMENT_DIR: ${{ vars.ROOT_DIR_PRERELEASE }}/${{vars.MODULES_DIR}}/${{vars.MODULE_NAME}}/${{env.VERSION}} | |
# APP_DEPLOYMENT_DIR: ${{ vars.ROOT_DIR_PRERELEASE }}/${{vars.APPS_DIR}}/${{vars.MODULE_NAME}}/${{env.VERSION}} | |
# run: | | |
# ssh ${{ secrets.USER }}@${{ secrets.HOST }} -i ${{ steps.ssh.outputs.private-key-path }} /bin/bash <<'EOT' | |
# mkdir -p ${{ env.APP_DEPLOYMENT_DIR }} | |
# tar -xzf ${{ vars.PACK_DIR }}/${{ needs.pack.outputs.full-name }}.tar.gz -C ${{ env.APP_DEPLOYMENT_DIR }} | |
# source ${{ env.APP_DEPLOYMENT_DIR }}/bin/activate | |
# conda-unpack | |
# source ${{ env.APP_DEPLOYMENT_DIR }}/bin/deactivate | |
# ln -sf $(dirname ${{env.MODULE_DEPLOYMENT_DIR}})/.common ${{env.MODULE_DEPLOYMENT_DIR}} | |
# EOT |