Automatic update and deploy for release and prerelease #7
Workflow file for this run
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
name: Deploy development environment (workflow to delete) | |
on: | |
pull_request: | |
env: | |
ENV_NAME: env-dev.yml | |
VERSION: dev | |
DEPENDENCY_REPOS: replace_landsurface era5_grib_parallel | |
ARCH: noarch | |
CHANNEL_SUFFIX: _channel | |
concurrency: | |
# Since there is only one development conda environmnent deployed at a time, | |
# we can cancel the previous deployment if a new one is triggered. | |
group: deploy-development | |
cancel-in-progress: true | |
jobs: | |
get-deployment-sites: | |
name: Get Deployment Sites | |
runs-on: ubuntu-latest | |
outputs: | |
deployment-sites: ${{ steps.get-deployment-sites.outputs.deployment-sites }} | |
steps: | |
- name: Checkout config | |
uses: actions/checkout@v4 | |
- name: Get sites | |
id: get-deployment-sites | |
run: echo "deployment-sites=$(jq --compact-output '.sites' ./config/deployment-sites.json)" >> $GITHUB_OUTPUT | |
pack: | |
name: Pack environment | |
runs-on: ubuntu-latest | |
outputs: | |
full-name: ${{ steps.get-env-name.outputs.full-name }} | |
dependency-names: ${{ steps.get-dependencies.outputs.dependency-names }} | |
pr-numbers: ${{ steps.get-dependencies.outputs.pr-numbers }} | |
env: | |
GH_TOKEN: ${{ secrets.ADMIN_TOKEN }} | |
steps: | |
- uses: actions/checkout@v4 | |
- name: Get environment name | |
id: get-env-name | |
run: echo "full-name=$(yq '.name' < ${{env.ENV_NAME}})-${{env.VERSION}}" >> $GITHUB_OUTPUT | |
# Search for the latest open PR (not a draft) in the replace_landsurface and era5_grib_parallel repos with | |
# changes in specific files. If a PR is found, download the artifact from the PR latest | |
# (successful) workflow run. If a PR is not found, download the lates released version. | |
- name: Get latest dependencies | |
id: get-dependencies | |
run: | | |
download_latest_release() { | |
gh release download $(gh release view --repo ${{ github.repository_owner }}/$1 --json tagName --jq '.tagName') \ | |
--repo ${{ github.repository_owner }}/$1 --pattern *.tar.bz2 -D ${1}${{env.CHANNEL_SUFFIX}}/${{env.ARCH}} | |
if [ $? -eq 0 ]; then | |
echo "${1}: Successfully downloaded latest release: \"$(basename ${1}${{env.CHANNEL_SUFFIX}}/${{env.ARCH}}/*.tar.bz2)\"." | |
fi | |
} | |
download_run_artifact() { | |
if gh run download $run_id --repo ${{ github.repository_owner }}/$1 -D ${1}${{env.CHANNEL_SUFFIX}}/${{env.ARCH}}; then | |
echo "${1}: Successfully downloaded artifact \"$(basename ${1}${{env.CHANNEL_SUFFIX}}/${{env.ARCH}}/artifact/*.tar.bz2)\"." | |
mv ${1}${{env.CHANNEL_SUFFIX}}/${{env.ARCH}}/artifact/*.tar.bz2 ${1}${{env.CHANNEL_SUFFIX}}/${{env.ARCH}} | |
else | |
echo "${1}: No valid artifact found. Dependency will be installed from the latest release." | |
download_latest_release ${1} | |
fi | |
} | |
for dependency_repo in ${{env.DEPENDENCY_REPOS}}; do | |
echo "Getting latest \"${dependency_repo}\" dependency ..." | |
# Get sha and number of latest open PR that changes either of the following files: | |
# [src/**, setup.py, pyproject.toml, .conda/**] | |
pr=$(gh pr list --repo ${{ github.repository_owner }}/${dependency_repo} --state open --draft=False \ | |
--json headRefOid,files,number,url \ | |
--jq '.[] | select(.files[].path | | |
(startswith("src/") or (. == "setup.py") or (. == "pyproject.toml") or (startswith(".conda/"))))' \ | |
| head -n 1) | |
# if a PR is found, find the ID of the latest successful workflow run | |
if [[ -n "$pr" ]]; then | |
pr_sha=$(jq -r '.headRefOid' <<< $pr) | |
pr_number=$(jq '.number' <<< $pr) | |
pr_numbers+=($pr_number) | |
pr_url=$(jq -r '.url' <<< $pr) | |
echo "${dependency_repo}: Found PR #${pr_number}." | |
echo " PR url: $pr_url" | |
echo " Commit ref: $pr_sha" | |
run=$(gh run list --repo ${{ github.repository_owner }}/${dependency_repo} \ | |
--json databaseId,headSha,event,status,url \ | |
--jq ".[] | select(.event == \"pull_request\" and .status == \"completed\" and .headSha == \"$pr_sha\")"\ | |
| head -n 1) | |
# if a successful workflow run is found, download its artifact | |
if [[ -n "$run" ]]; then | |
run_id=$(jq '.databaseId' <<< $run) | |
run_url=$(jq -r '.url' <<< $run) | |
echo "${dependency_repo}: Found successful workflow run for the PR." | |
echo " Run ID: $run_id" | |
echo " Run url: $run_url" | |
download_run_artifact ${dependency_repo} | |
else | |
echo "${dependency_repo}: No successful workflow run found. Dependency will be installed from the latest release." | |
download_latest_release ${dependency_repo} | |
fi | |
else | |
echo "${dependency_repo}: No useful open PR found. Dependency will be installed from the latest release." | |
download_latest_release ${dependency_repo} | |
fi | |
# Get the dependency name as text after "name = " in the "content" field of the pyproject.toml file | |
dependency_names+=($(gh api repos/${{github.repository_owner}}/${dependency_repo}/contents/pyproject.toml --jq '.content' | \ | |
base64 --decode | grep -Po "^name\s*=\s*\K(.*)" | tr -d '"')) | |
done | |
echo "dependency-names=${dependency_names[@]}" >> $GITHUB_OUTPUT | |
echo "pr-numbers=$(sed 's/ /,/' <<< [${pr_numbers[@]}])" >> $GITHUB_OUTPUT | |
- name: Setup Micromamba | |
uses: mamba-org/setup-micromamba@f8b8a1e23a26f60a44c853292711bacfd3eac822 #v1.9.0 | |
with: | |
micromamba-version: '1.5.8-0' | |
environment-file: ${{env.ENV_NAME}} | |
environment-name: ${{ steps.get-env-name.outputs.full-name }} | |
generate-run-shell: true | |
# This step is needed to install local conda packages along with their dependencies (https://stackoverflow.com/a/68131606/21024780) | |
- name: Create conda channels | |
shell: micromamba-shell {0} | |
run: | | |
for dependency_repo in ${{env.DEPENDENCY_REPOS}}; do | |
cmd="conda index ${dependency_repo}${{env.CHANNEL_SUFFIX}}" | |
echo "$cmd" | |
eval "$cmd" | |
done | |
- name: Install dependencies | |
shell: micromamba-shell {0} | |
run: | | |
channels=$(for dependency_repo in ${{env.DEPENDENCY_REPOS}}; do \ | |
echo -n "-c file://${{github.workspace}}/${dependency_repo}${{env.CHANNEL_SUFFIX}} "; done) | |
cmd="micromamba install ${{steps.get-dependencies.outputs.dependency-names}} \ | |
-c accessnri -c conda-forge -c coecms $channels -y" | |
echo "$cmd" | |
eval "$cmd" | |
- name: Create Pack | |
shell: micromamba-shell {0} | |
run: | | |
conda pack -o ${{ steps.get-env-name.outputs.full-name }}.tar.gz | |
- name: Upload Artifact | |
uses: actions/upload-artifact@v4 | |
with: | |
name: ${{ steps.get-env-name.outputs.full-name }} | |
if-no-files-found: error | |
path: ${{ steps.get-env-name.outputs.full-name }}.tar.gz | |
deploy: | |
runs-on: ubuntu-latest | |
needs: | |
- get-deployment-sites | |
- pack | |
strategy: | |
fail-fast: false | |
matrix: | |
deployment-sites: ${{ fromJson(needs.get-deployment-sites.outputs.deployment-sites) }} | |
environment: ${{ matrix.deployment-sites }} | |
permissions: | |
contents: write | |
steps: | |
- uses: actions/download-artifact@v4 | |
with: | |
name: ${{ needs.pack.outputs.full-name }} | |
- name: Set up SSH | |
uses: access-nri/actions/.github/actions/setup-ssh@main | |
id: ssh | |
with: | |
hosts: | | |
${{ secrets.HOST_DATA }} | |
${{ secrets.HOST }} | |
private-key: ${{ secrets.SSH_KEY }} | |
- name: Copy to ${{ matrix.deployment-sites }} | |
run: | | |
rsync -v -e 'ssh -i ${{ steps.ssh.outputs.private-key-path }}' \ | |
${{ needs.pack.outputs.full-name }}.tar.gz \ | |
${{ secrets.USER }}@${{ secrets.HOST_DATA }}:${{ vars.PACK_DIR }} | |
- name: Deploy to ${{ matrix.deployment-sites }} | |
env: | |
DEPLOYMENT_DIR: ${{ vars.ENVIRONMENT_DIR }}/${{ env.VERSION }} | |
run: | | |
ssh ${{ secrets.USER }}@${{ secrets.HOST }} -i ${{ steps.ssh.outputs.private-key-path }} /bin/bash <<'EOT' | |
mkdir -p ${{ env.DEPLOYMENT_DIR }} | |
tar -xzf ${{ vars.PACK_DIR }}/${{ needs.pack.outputs.full-name }}.tar.gz -C ${{ env.DEPLOYMENT_DIR }} | |
source ${{ env.DEPLOYMENT_DIR }}/bin/activate | |
conda-unpack | |
source ${{ env.DEPLOYMENT_DIR }}/bin/deactivate | |
ln -sf ${{ vars.MODULE_DIR }}/.common ${{ vars.MODULE_DIR }}/${{ env.VERSION }} | |
EOT |