Skip to content

Commit

Permalink
Merge branch 'NOAA-EMC:develop' into csps-rocky8
Browse files Browse the repository at this point in the history
  • Loading branch information
weihuang-jedi authored Nov 7, 2024
2 parents d5f755c + 152bb45 commit 56353c2
Show file tree
Hide file tree
Showing 11 changed files with 52 additions and 40 deletions.
45 changes: 41 additions & 4 deletions .github/workflows/pw_aws_ci.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@ on:
workflow_dispatch:
inputs:
pr_number:
description: 'Pull Request Number (use 0 for non-PR)'
description: 'PR Number (use 0 for non-PR)'
required: true
default: '0'
os:
Expand All @@ -31,24 +31,60 @@ env:
MACHINE_ID: noaacloud

jobs:

run-start-clusters:
runs-on: ubuntu-latest
env:
PW_PLATFORM_HOST: noaa.parallel.works
steps:
- name: Checkout pw-cluster-automation repository
uses: actions/checkout@v4
with:
repository: TerrenceMcGuinness-NOAA/pw-cluster-automation
path: pw-cluster-automation
ref: pw_cluster_noaa

- name: Run startClusters
run: |
mkdir -p ~/.ssh
echo "${{ secrets.ID_RSA_AWS }}" > ~/.ssh/id_rsa
echo "${{ secrets.PW_API_KEY }}" > ~/.ssh/pw_api.key
chmod 700 ~/.ssh
chmod 600 ~/.ssh/id_rsa
chmod 600 ~/.ssh/pw_api.key
if [ "${{ github.event.inputs.os }}" == "rocky" ]; then
clustername="globalworkflowciplatformrocky8"
elif [ "${{ github.event.inputs.os }}" == "centos" ]; then
clustername="awsemctmcgc7i48xlargeciplatform"
fi
python3 pw-cluster-automation/startClusters.py $clustername
fetch-branch:
needs: run-start-clusters
runs-on: ubuntu-latest
env:
GH_TOKEN: ${{ secrets.GITHUBTOKEN }}
outputs:
branch: ${{ steps.get-branch.outputs.branch }}
repo: ${{ steps.get-branch.outputs.repo }}
steps:
- name: Fetch branch name for PR
- name: Fetch branch name and repo for PR
id: get-branch
run: |
pr_number=${{ github.event.inputs.pr_number }}
repo=${{ github.repository }}
if [ "$pr_number" -eq "0" ]; then
branch=${{ github.event.inputs.ref }}
repo=${{ github.repository }}
else
branch=$(gh pr view $pr_number --repo $repo --json headRefName --jq '.headRefName')
repo_owner=$(gh pr view $pr_number --repo $repo --json headRepositoryOwner --jq '.headRepositoryOwner.login')
repo_name=$(gh pr view $pr_number --repo $repo --json headRepository --jq '.headRepository.name')
repo="$repo_owner/$repo_name"
fi
echo "::set-output name=branch::$branch"
{
echo "branch=$branch"
echo "repo=$repo"
} >> $GITHUB_OUTPUT
checkout:
needs: fetch-branch
Expand All @@ -64,6 +100,7 @@ jobs:
with:
path: ${{ github.run_id }}/HOMEgfs
submodules: 'recursive'
repository: ${{ needs.fetch-branch.outputs.repo }}
ref: ${{ needs.fetch-branch.outputs.branch }}

build-link:
Expand Down
5 changes: 0 additions & 5 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -171,11 +171,6 @@ ush/bufr2ioda_insitu*
versions/build.ver
versions/run.ver

# wxflow checkout and symlinks
ush/python/wxflow
workflow/wxflow
ci/scripts/wxflow

# jcb checkout and symlinks
ush/python/jcb
workflow/jcb
Expand Down
1 change: 1 addition & 0 deletions ci/scripts/wxflow
2 changes: 1 addition & 1 deletion parm/post/oceanice_products_gefs.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,7 @@ ocean:
{% elif model_grid == 'mx500' %}
ocean_levels: [5, 15, 25, 35, 45, 55, 65, 75, 85, 95, 105, 115, 125, 135, 145, 155, 165, 175, 185, 195, 205, 215, 226, 241, 267]
{% endif %}
subset: ['SSH', 'SST', 'SSS', 'speed', 'MLD_003', 'latent', 'sensible', 'SW', 'LW', 'LwLatSens', 'Heat_PmE', 'SSU', 'SSV', 'taux', 'tauy', 'temp', 'so', 'uo', 'vo']
subset: ['SSH', 'SST', 'SSS', 'speed', 'MLD_003', 'latent', 'sensible', 'SW', 'LW', 'LwLatSens', 'Heat_PmE', 'SSU', 'SSV', 'taux', 'tauy', 'temp', 'tob', 'so', 'uo', 'vo']
data_in:
copy:
- ["{{ COM_OCEAN_HISTORY }}/{{ RUN }}.ocean.t{{ current_cycle | strftime('%H') }}z.{{ interval }}hr_avg.f{{ '%03d' % forecast_hour }}.nc", "{{ DATA }}/ocean.nc"]
Expand Down
1 change: 1 addition & 0 deletions parm/product/gefs.0p25.fFFF.paramlist.a.txt
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,7 @@
:CIN:180-0 mb above ground:
:CIN:surface:
:HLCY:3000-0 m above ground:
:PEVPR:surface:
:TCDC:entire atmosphere (considered as a single layer):
:WEASD:surface:
:SNOD:surface:
Expand Down
1 change: 0 additions & 1 deletion parm/product/gefs.0p25.fFFF.paramlist.b.txt
Original file line number Diff line number Diff line change
Expand Up @@ -151,7 +151,6 @@
:O3MR:5 mb:
:O3MR:70 mb:
:O3MR:7 mb:
:PEVPR:surface:
:PLI:30-0 mb above ground:
:PLPL:255-0 mb above ground:
:POT:0.995 sigma level:
Expand Down
18 changes: 0 additions & 18 deletions sorc/link_workflow.sh
Original file line number Diff line number Diff line change
Expand Up @@ -86,15 +86,6 @@ esac
# Source fix version file
source "${HOMEgfs}/versions/fix.ver"

# Link python pacakges in ush/python
# TODO: This will be unnecessary when these are part of the virtualenv
packages=("wxflow")
for package in "${packages[@]}"; do
cd "${HOMEgfs}/ush/python" || exit 1
[[ -s "${package}" ]] && rm -f "${package}"
${LINK} "${HOMEgfs}/sorc/${package}/src/${package}" .
done

# Link GDASapp python packages in ush/python
packages=("jcb")
for package in "${packages[@]}"; do
Expand All @@ -103,15 +94,6 @@ for package in "${packages[@]}"; do
${LINK} "${HOMEgfs}/sorc/gdas.cd/sorc/${package}/src/${package}" .
done

# Link wxflow in workflow and ci/scripts
# TODO: This will be unnecessary when wxflow is part of the virtualenv
cd "${HOMEgfs}/workflow" || exit 1
[[ -s "wxflow" ]] && rm -f wxflow
${LINK} "${HOMEgfs}/sorc/wxflow/src/wxflow" .
cd "${HOMEgfs}/ci/scripts" || exit 1
[[ -s "wxflow" ]] && rm -f wxflow
${LINK} "${HOMEgfs}/sorc/wxflow/src/wxflow" .

# Link fix directories
if [[ -n "${FIX_DIR}" ]]; then
if [[ ! -d "${HOMEgfs}/fix" ]]; then mkdir "${HOMEgfs}/fix" || exit 1; fi
Expand Down
15 changes: 5 additions & 10 deletions ush/python/pygfs/task/archive.py
Original file line number Diff line number Diff line change
Expand Up @@ -88,11 +88,6 @@ def configure(self, arch_dict: Dict[str, Any]) -> (Dict[str, Any], List[Dict[str
if not os.path.isdir(arch_dict.ROTDIR):
raise FileNotFoundError(f"FATAL ERROR: The ROTDIR ({arch_dict.ROTDIR}) does not exist!")

if arch_dict.RUN in ["gdas", "gfs"]:

# Copy the cyclone track files and rename the experiments
Archive._rename_cyclone_expt(arch_dict)

# Collect datasets that need to be archived
# Each dataset represents one tarball

Expand Down Expand Up @@ -371,14 +366,14 @@ def _rename_cyclone_expt(arch_dict) -> None:

if run == "gfs":
in_track_file = (track_dir_in + "/avno.t" +
cycle_HH + "z.cycle.trackatcfunix")
cycle_HH + "z.cyclone.trackatcfunix")
in_track_p_file = (track_dir_in + "/avnop.t" +
cycle_HH + "z.cycle.trackatcfunixp")
cycle_HH + "z.cyclone.trackatcfunix")
elif run == "gdas":
in_track_file = (track_dir_in + "/gdas.t" +
cycle_HH + "z.cycle.trackatcfunix")
cycle_HH + "z.cyclone.trackatcfunix")
in_track_p_file = (track_dir_in + "/gdasp.t" +
cycle_HH + "z.cycle.trackatcfunixp")
cycle_HH + "z.cyclone.trackatcfunix")

if not os.path.isfile(in_track_file):
# Do not attempt to archive the outputs
Expand Down Expand Up @@ -416,7 +411,7 @@ def replace_string_from_to_file(filename_in, filename_out, search_str, replace_s
with open("/tmp/track_file", "w") as new_file:
new_file.writelines(out_lines)

shutil.move("tmp/track_file", filename_out)
shutil.move("/tmp/track_file", filename_out)

replace_string_from_to_file(in_track_file, out_track_file, "AVNO", pslot4)
replace_string_from_to_file(in_track_p_file, out_track_p_file, "AVNO", pslot4)
Expand Down
1 change: 1 addition & 0 deletions ush/python/wxflow
1 change: 1 addition & 0 deletions workflow/wxflow

0 comments on commit 56353c2

Please sign in to comment.