Skip to content

Commit

Permalink
Add developmental test cases: idealized baroclinic wave and 2020 July…
Browse files Browse the repository at this point in the history
… CAPE cases + #2459 (#2461)

* UFSWM - Add tests-dev ATM-only idealized dry baroclinic wave test and a 2020 July CAPE case
* UFSWM - Update modulefile to support Rocky 8 on CSPs, with ParallelWorks

---------

Co-authored-by: Wei Huang <[email protected]>
Co-authored-by: Jong Kim <[email protected]>
  • Loading branch information
3 people authored Nov 12, 2024
1 parent 058f073 commit 29c2703
Show file tree
Hide file tree
Showing 25 changed files with 3,010 additions and 2,130 deletions.
20 changes: 12 additions & 8 deletions modulefiles/ufs_noaacloud.intel.lua
Original file line number Diff line number Diff line change
Expand Up @@ -4,23 +4,27 @@ loads UFS Model prerequisites for NOAA Parallelworks/Intel

prepend_path("MODULEPATH", "/contrib/spack-stack-rocky8/spack-stack-1.6.0/envs/fms-2024.01/install/modulefiles/Core")
prepend_path("MODULEPATH", "/apps/modules/modulefiles")
prepend_path("PATH", "/contrib/EPIC/bin")
load("gnu")
load("stack-intel")
load("stack-intel-oneapi-mpi")

stack_intel_ver=os.getenv("stack_intel_ver") or "2021.10.0"
gnu_ver=os.getenv("gnu_ver") or ""
load(pathJoin("gnu", gnu_ver))

stack_intel_ver=os.getenv("stack_intel_ver") or ""
load(pathJoin("stack-intel", stack_intel_ver))

stack_impi_ver=os.getenv("stack_impi_ver") or "2021.10.0"
load(pathJoin("stack-intel-oneapi-mpi", stack_impi_ver))
unload("gnu")
stack_intel_oneapi_mpi_ver=os.getenv("stack_intel_oneapi_mpi_ver") or ""
load(pathJoin("stack-intel-oneapi-mpi", stack_intel_oneapi_mpi_ver))

gnu_ver=os.getenv("gnu_ver") or ""
unload(pathJoin("gnu", gnu_ver))

cmake_ver=os.getenv("cmake_ver") or "3.23.1"
load(pathJoin("cmake", cmake_ver))

load("ufs_common")

nccmp_ver=os.getenv("nccmp_ver") or "1.9.0.1"
load(pathJoin("nccmp", nccmp_ver))

setenv("CC", "mpiicc")
setenv("CXX", "mpiicpc")
setenv("FC", "mpiifort")
Expand Down
18 changes: 9 additions & 9 deletions tests-dev/baseline_setup.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -54,15 +54,15 @@ jet:
QUEUE: batch
COMPILE_QUEUE: batch
PARTITION: xjet
dprefix: /mnt/lfs4/HFIP/hfv3gfs/${USER}
DISKNM: /mnt/lfs4/HFIP/hfv3gfs/role.epic/RT
STMP: /mnt/lfs4/HFIP/hfv3gfs/${USER}/RT_BASELINE
PTMP: /mnt/lfs4/HFIP/hfv3gfs/${USER}/RT_RUNDIRS
dprefix: /lfs5/HFIP/hfv3gfs/${USER}
DISKNM: /lfs5/HFIP/hfv3gfs/role.epic/RT
STMP: /lfs5/HFIP/hfv3gfs/${USER}/RT_BASELINE
PTMP: /lfs5/HFIP/hfv3gfs/${USER}/RT_RUNDIRS
RUNDIR_ROOT:
SCHEDULER: slurm
INPUTDATA_ROOT: /mnt/lfs4/HFIP/hfv3gfs/role.epic/RT/NEMSfv3gfs/input-data-20240501
INPUTDATA_ROOT_WW3: /mnt/lfs4/HFIP/hfv3gfs/role.epic/RT/NEMSfv3gfs/input-data-20240501/WW3_input_data_20240214
INPUTDATA_ROOT_BMIC: /mnt/lfs4/HFIP/hfv3gfs/role.epic/RT/NEMSfv3gfs/BM_IC-20220207
INPUTDATA_ROOT: /lfs5/HFIP/hfv3gfs/role.epic/RT/NEMSfv3gfs/input-data-20240501
INPUTDATA_ROOT_WW3: /lfs5/HFIP/hfv3gfs/role.epic/RT/NEMSfv3gfs/input-data-20240501/WW3_input_data_20240214
INPUTDATA_ROOT_BMIC: /lfs5/HFIP/hfv3gfs/role.epic/RT/NEMSfv3gfs/BM_IC-20220207
derecho:
QUEUE: main
COMPILE_QUEUE: main
Expand All @@ -82,8 +82,8 @@ noaacloud:
PARTITION:
dprefix: /lustre
DISKNM: /contrib/ufs-weather-model/RT
STMP: /lustre/stmp4
PTMP: /lustre/stmp2
STMP: /lustre/stmp
PTMP: /lustre/stmp
RUNDIR_ROOT:
SCHEDULER: slurm
INPUTDATA_ROOT: /contrib/ufs-weather-model/RT/NEMSfv3gfs/input-data-20240501
Expand Down
153 changes: 89 additions & 64 deletions tests-dev/create_log.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,10 +3,27 @@
import subprocess
import yaml
from datetime import datetime
#import datetime
from ufs_test_utils import get_testcase, write_logfile, delete_files, machine_check_off

def get_timestamps(path):
"""Obtain experiment starting and ending time marks through file timestamps
Args:
path (str): experiment log directory
Returns:
str: experiment starting and ending time strings
"""
dir_list = os.listdir(path)
dt = []
for f in dir_list:
m_time = os.path.getmtime(path+f)
dt.append(datetime.fromtimestamp(m_time))
dtsort=sorted(dt)
return str(dtsort[0]),str(dtsort[-1])

def finish_log():
"""Collects regression test results and generates log file.
"""Collect regression test results and generate log file.
"""
UFS_TEST_YAML = str(os.getenv('UFS_TEST_YAML'))
PATHRT = os.getenv('PATHRT')
Expand Down Expand Up @@ -40,40 +57,46 @@ def finish_log():
COMPILE_ID = apps
COMPILE_LOG = 'compile_'+COMPILE_ID+'.log'
COMPILE_LOG_TIME ='compile_'+COMPILE_ID+'_timestamp.txt'
with open('./logs/log_'+MACHINE_ID+'/'+COMPILE_LOG) as f:
if "[100%] Linking Fortran executable" in f.read():
COMPILE_PASS += 1
f.seek(0)
for line in f:
if 'export RUNDIR_ROOT=' in line:
RUNDIR_ROOT=line.split("=")[1]
break
compile_err = RUNDIR_ROOT.strip('\n')+'/compile_'+COMPILE_ID+'/err'
with open(compile_err) as ferr:
contents = ferr.read()
count_warning = contents.count(": warning #")
count_remarks = contents.count(": remark #")
ferr.close()
warning_log = ""
if count_warning > 0:
warning_log = "("+str(count_warning)+" warnings"
if count_remarks > 0:
warning_log+= ","+str(count_remarks)+" remarks)"
flog = open('./logs/log_'+MACHINE_ID+'/'+COMPILE_LOG_TIME)
timing_data = flog.read()
first_line = timing_data.split('\n', 1)[0]
etime = int(first_line.split(",")[4].strip()) - int(first_line.split(",")[1].strip())
btime = int(first_line.split(",")[3].strip()) - int(first_line.split(",")[2].strip())
etime_min, etime_sec = divmod(int(etime), 60)
etime_min = f"{etime_min:02}"; etime_sec = f"{etime_sec:02}"
btime_min, btime_sec = divmod(int(btime), 60)
btime_min = f"{btime_min:02}"; btime_sec = f"{btime_sec:02}"
time_log = " ["+etime_min+':'+etime_sec+', '+btime_min+':'+btime_sec+"]"
flog.close()
compile_log = "PASS -- COMPILE "+COMPILE_ID+time_log+warning_log+"\n"
else:
compile_log = "FAIL -- COMPILE "+COMPILE_ID+"\n"
f.close()
COMPILE_CHECK1 ='Compile '+COMPILE_ID+' Completed'
COMPILE_CHECK2 ='[100%] Linking Fortran executable'
try:
with open('./logs/log_'+MACHINE_ID+'/'+COMPILE_LOG) as f:
if COMPILE_CHECK1 in f.read() or COMPILE_CHECK2 in f.read():
COMPILE_PASS += 1
f.seek(0)
for line in f:
if 'export RUNDIR_ROOT=' in line:
RUNDIR_ROOT=line.split("=")[1]
break
compile_err = RUNDIR_ROOT.strip('\n')+'/compile_'+COMPILE_ID+'/err'
with open(compile_err) as ferr:
contents = ferr.read()
count_warning = contents.count(": warning #")
count_remarks = contents.count(": remark #")
ferr.close()
warning_log = ""
if count_warning > 0:
warning_log = "("+str(count_warning)+" warnings"
if count_remarks > 0:
warning_log+= ","+str(count_remarks)+" remarks)"
flog = open('./logs/log_'+MACHINE_ID+'/'+COMPILE_LOG_TIME)
timing_data = flog.read()
first_line = timing_data.split('\n', 1)[0]
etime = int(first_line.split(",")[4].strip()) - int(first_line.split(",")[1].strip())
btime = int(first_line.split(",")[3].strip()) - int(first_line.split(",")[2].strip())
etime_min, etime_sec = divmod(int(etime), 60)
etime_min = f"{etime_min:02}"; etime_sec = f"{etime_sec:02}"
btime_min, btime_sec = divmod(int(btime), 60)
btime_min = f"{btime_min:02}"; btime_sec = f"{btime_sec:02}"
time_log = " ["+etime_min+':'+etime_sec+', '+btime_min+':'+btime_sec+"]"
flog.close()
compile_log = "PASS -- COMPILE "+COMPILE_ID+time_log+warning_log+"\n"
else:
compile_log = "FAIL -- COMPILE "+COMPILE_ID+"\n"
f.close()
except FileNotFoundError:
compile_log = "FAIL -- COMPILE "+COMPILE_ID+"\n"
print('./logs/log_'+MACHINE_ID+'/'+COMPILE_LOG+': does not exist')
run_logs += compile_log
else:
PASS_TESTS = True
Expand All @@ -94,47 +117,49 @@ def finish_log():
PASS_CHECK = 'Test '+TEST_ID+' PASS'
MAXS_CHECK = 'The maximum resident set size (KB)'
pass_flag = False
create_dep_flag = False
if (CREATE_BASELINE == 'true' and not DEP_RUN == ""):
create_dep_flag = True
if not create_dep_flag:
try:
with open('./logs/log_'+MACHINE_ID+'/'+TEST_LOG) as f:
if PASS_CHECK in f.read():
pass_flag = True
f.close()
if pass_flag:
f = open('./logs/log_'+MACHINE_ID+'/'+TEST_LOG_TIME)
timing_data = f.read()
first_line = timing_data.split('\n', 1)[0]
etime = str(int(first_line.split(",")[4].strip()) - int(first_line.split(",")[1].strip()))
rtime = str(int(first_line.split(",")[3].strip()) - int(first_line.split(",")[2].strip()))
etime_min, etime_sec = divmod(int(etime), 60)
etime_min = f"{etime_min:02}"; etime_sec = f"{etime_sec:02}"
rtime_min, rtime_sec = divmod(int(rtime), 60)
rtime_min = f"{rtime_min:02}"; rtime_sec = f"{rtime_sec:02}"
time_log = " ["+etime_min+':'+etime_sec+', '+rtime_min+':'+rtime_sec+"]"
f.close()
with open('./logs/log_'+MACHINE_ID+'/'+TEST_LOG) as f:
if pass_flag :
f.close()
except FileNotFoundError:
print('./logs/log_'+MACHINE_ID+'/'+TEST_LOG+': does not exist')
if pass_flag:
f = open('./logs/log_'+MACHINE_ID+'/'+TEST_LOG_TIME)
timing_data = f.read()
first_line = timing_data.split('\n', 1)[0]
etime = str(int(first_line.split(",")[4].strip()) - int(first_line.split(",")[1].strip()))
rtime = str(int(first_line.split(",")[3].strip()) - int(first_line.split(",")[2].strip()))
etime_min, etime_sec = divmod(int(etime), 60)
etime_min = f"{etime_min:02}"; etime_sec = f"{etime_sec:02}"
rtime_min, rtime_sec = divmod(int(rtime), 60)
rtime_min = f"{rtime_min:02}"; rtime_sec = f"{rtime_sec:02}"
time_log = " ["+etime_min+':'+etime_sec+', '+rtime_min+':'+rtime_sec+"]"
f.close()
if pass_flag :
with open('./logs/log_'+MACHINE_ID+'/'+TEST_LOG) as f:
rtlog_file = f.readlines()
for line in rtlog_file:
if MAXS_CHECK in line:
memsize= line.split('=')[1].strip()
test_log = 'PASS -- TEST '+TEST_ID+time_log+' ('+memsize+' MB)\n'
PASS_NR += 1
else:
test_log = 'FAIL -- TEST '+TEST_ID+'\n'
failed_list.append(TEST_NAME+' '+RT_COMPILER)
FAIL_NR += 1
run_logs += test_log
f.close()
f.close()
else:
test_log = 'FAIL -- TEST '+TEST_ID+'\n'
failed_list.append(TEST_NAME+' '+RT_COMPILER)
FAIL_NR += 1
run_logs += test_log
run_logs += '\n'
write_logfile(filename, "a", output=run_logs)

TEST_START_TIME = os.getenv('TEST_START_TIME')
TEST_END_TIME = os.getenv('TEST_END_TIME')
start_time = datetime.strptime(TEST_START_TIME, "%Y%m%d %H:%M:%S")
end_time = datetime.strptime(TEST_END_TIME, "%Y%m%d %H:%M:%S")
TEST_START_TIME, TEST_END_TIME = get_timestamps('./logs/log_'+MACHINE_ID+'/')

clean_START_TIME= TEST_START_TIME.split('.')[0]
start_time = datetime.strptime(clean_START_TIME, "%Y-%m-%d %H:%M:%S")
clean_END_TIME= TEST_END_TIME.split('.')[0]
end_time = datetime.strptime(clean_END_TIME, "%Y-%m-%d %H:%M:%S")

hours, remainder= divmod((end_time - start_time).total_seconds(), 3600)
minutes, seconds= divmod(remainder, 60)
hours = int(hours); minutes=int(minutes); seconds =int(seconds)
Expand Down
6 changes: 4 additions & 2 deletions tests-dev/create_xml.py
Original file line number Diff line number Diff line change
Expand Up @@ -32,6 +32,7 @@ def rocoto_create_entries(RTPWD,MACHINE_ID,INPUTDATA_ROOT,INPUTDATA_ROOT_WW3,INP
<!ENTITY INPUTDATA_ROOT "{INPUTDATA_ROOT}">
<!ENTITY INPUTDATA_ROOT_WW3 "{INPUTDATA_ROOT_WW3}">
<!ENTITY INPUTDATA_ROOT_BMIC "{INPUTDATA_ROOT_BMIC}">
<!ENTITY INPUTDATA_LM4 "{INPUTDATA_ROOT}/LM4_input_data">
<!ENTITY RUNDIR_ROOT "{RUNDIR_ROOT}">
<!ENTITY NEW_BASELINE "{NEW_BASELINE}">
]>
Expand Down Expand Up @@ -195,6 +196,7 @@ def write_runtest_env():
export INPUTDATA_ROOT={INPUTDATA_ROOT}
export INPUTDATA_ROOT_WW3={INPUTDATA_ROOT_WW3}
export INPUTDATA_ROOT_BMIC={INPUTDATA_ROOT_BMIC}
export INPUTDATA_LM4={INPUTDATA_ROOT}/LM4_input_data
export PATHRT={PATHRT}
export PATHTR={PATHTR}
export NEW_BASELINE={NEW_BASELINE}
Expand All @@ -216,8 +218,8 @@ def write_runtest_env():
export RTVERBOSE=false
"""
if ( MACHINE_ID == 'jet' ):
runtest_envs+="export PATH=/lfs4/HFIP/hfv3gfs/software/miniconda3/4.8.3/envs/ufs-weather-model/bin:/lfs4/HFIP/hfv3gfs/software/miniconda3/4.8.3/bin:$PATH"
runtest_envs+="export PYTHONPATH=/lfs4/HFIP/hfv3gfs/software/miniconda3/4.8.3/envs/ufs-weather-model/lib/python3.8/site-packages:/lfs4/HFIP/hfv3gfs/software/miniconda3/4.8.3/lib/python3.8/site-packages"
runtest_envs += f"export PATH=/lfs4/HFIP/hfv3gfs/software/miniconda3/4.8.3/envs/ufs-weather-model/bin:/lfs4/HFIP/hfv3gfs/software/miniconda3/4.8.3/bin:$PATH\n"
runtest_envs += f"export PYTHONPATH=/lfs4/HFIP/hfv3gfs/software/miniconda3/4.8.3/envs/ufs-weather-model/lib/python3.8/site-packages:/lfs4/HFIP/hfv3gfs/software/miniconda3/4.8.3/lib/python3.8/site-packages\n"

with open(filename,"w+") as f:
f.writelines(runtest_envs)
Expand Down
Loading

0 comments on commit 29c2703

Please sign in to comment.