Skip to content

Feat prms tests

Feat prms tests #800

Workflow file for this run

name: CI
on:
push:
branches:
- "*"
- "!v[0-9]+.[0-9]+.[0-9]+*"
pull_request:
branches:
- "*"
- "!v[0-9]+.[0-9]+.[0-9]+*"
workflow_dispatch:
inputs:
debug_enabled:
type: boolean
description: 'Run the build with tmate debugging enabled (https://github.com/marketplace/actions/debugging-with-tmate)'
required: false
default: false
jobs:
pyws_setup:
name: standard installation
runs-on: ubuntu-latest
strategy:
fail-fast: false
defaults:
run:
shell: bash
steps:
- name: Checkout repo
uses: actions/checkout@v3
- name: Setup Python
uses: actions/setup-python@v4
with:
python-version: 3.9
- name: Upgrade pip and install build and twine
run: |
python -m pip install --upgrade pip
pip install wheel build twine
- name: Base installation
run: |
pip --verbose install .
- name: Print pyhmn version
run: |
python -c "import pywatershed; print(pywatershed.__version__)"
- name: Build pywatershed, check dist outputs
run: |
python -m build
twine check --strict dist/*
pyws_lint:
name: linting
runs-on: ubuntu-latest
strategy:
fail-fast: false
defaults:
run:
shell: bash
steps:
- name: Checkout repo
uses: actions/checkout@v3
- name: Setup Python
uses: actions/setup-python@v4
with:
python-version: 3.9
- name: Install dependencies
run: |
pip install wheel
pip install .
pip install ".[lint]"
- name: Version info
run: |
pip -V
pip list
- name: Run isort
run: |
echo "if isort check fails update isort using"
echo " pip install isort --upgrade"
echo "and run"
echo " isort ./pywatershed ./autotest"
echo "and then commit the changes."
isort --check --diff ./pywatershed ./autotest
- name: Run black
run: |
echo "if black check fails update black using"
echo " pip install black --upgrade"
echo "and run"
echo " black ./pywatershed ./autotest"
echo "and then commit the changes."
black --check --diff ./pywatershed ./autotest
- name: Run flake8
run: |
flake8 --count --show-source --exit-zero ./pywatershed ./autotest
- name: Run pylint
run: |
pylint --jobs=2 --errors-only --exit-zero ./pywatershed ./autotest
test:
name: ${{ matrix.os }} py${{ matrix.python-version }}
runs-on: ${{ matrix.os }}
defaults:
run:
shell: bash -l {0}
strategy:
fail-fast: false
matrix:
os: [ "ubuntu-latest", "macos-latest", "windows-latest" ]
python-version: ["3.9", "3.10"]
steps:
- name: Checkout repo
uses: actions/checkout@v3
- name: Setup tmate session
uses: mxschmitt/action-tmate@v3
if: ${{ github.event_name == 'workflow_dispatch' && inputs.debug_enabled }}
- name: Set environment variables
run: |
echo "PYTHON_VERSION=${{ matrix.python-version }}" >> $GITHUB_ENV
echo "PYWS_FORTRAN=true" >> $GITHUB_ENV
echo 'SETUPTOOLS_ENABLE_FEATURES="legacy-editable"' >> $GITHUB_ENV
cat .mf6_ci_ref_remote >> $GITHUB_ENV
- name: Enforce MF6 ref and remote merge to main
if: github.base_ref == 'main'
run: |
echo Merge commit: GITHUB_BASE_REF = $GITHUB_BASE_REF
req_ref=feat-snf # if not develop, submit an issue
echo $MF6_REF
if [[ "$MF6_REF" != "$req_ref" ]]; then exit 1; fi
req_remote=langevin-usgs/modflow6 # if not MODFLOW-USGS/modflow6, submit an issue
echo $MF6_REMOTE
if [[ "$MF6_REMOTE" != "$req_remote" ]]; then exit 1; fi
- name: Setup gfortran
uses: fortran-lang/setup-fortran@v1
with:
compiler: gcc
version: 11
- name: Link gfortran dylibs on Mac
if: runner.os == 'macOS'
run: .github/scripts/symlink_gfortran_mac.sh
- name: Install Dependencies via Micromamba
uses: mamba-org/setup-micromamba@v1
with:
environment-file: environment.yml
cache-environment: true
cache-downloads: true
create-args: >-
python=${{matrix.python-version}}
conda
- name: Checkout MODFLOW 6
uses: actions/checkout@v3
with:
repository: ${{ env.MF6_REMOTE }}
ref: ${{ env.MF6_REF }}
path: modflow6
- name: Update flopy MODFLOW 6 classes
working-directory: modflow6/autotest
run: |
python update_flopy.py
- name: Install pywatershed
run: |
pip install .
- name: Version info
run: |
pip -V
pip list
- name: hru_1 - generate and manage test data domain, run PRMS and convert csv output to NetCDF
working-directory: test_data/generate
run: |
pytest -vv -n=2 --durations=0 run_prms_domains.py --domain=hru_1
pytest -vv -n=auto --durations=0 convert_prms_output_to_nc.py --domain=hru_1
pytest -vv -n=auto --durations=0 remove_prms_csvs.py
- name: hru_1 - list netcdf input files
working-directory: test_data
run: |
find hru_1/output/ -name '*.nc'
- name: hru_1 - pywatershed tests
working-directory: autotest
run: pytest
-vv
-n=auto
--domain_yaml=../test_data/hru_1/hru_1.yaml
--durations=0
--cov=pywatershed
--cov-report=xml
--junitxml=pytest_hru_1.xml
- name: drb_2yr - generate and manage test data
working-directory: test_data/generate
run: |
pytest -vv remove_output_dirs.py --domain=hru_1
pytest -vv -n=2 run_prms_domains.py --domain=drb_2yr
pytest -vv -n=auto convert_prms_output_to_nc.py --domain=drb_2yr
pytest -vv -n=auto remove_prms_csvs.py
- name: drb_2yr - list netcdf input files
working-directory: test_data
run: |
find drb_2yr/output/ -name '*.nc'
- name: drb_2yr - pywatershed tests
working-directory: autotest
run: pytest
-vv
-n=auto
--domain_yaml=../test_data/drb_2yr/drb_2yr.yaml
--durations=0
--cov=pywatershed
--cov-report=xml
--junitxml=pytest_drb_2yr.xml
- name: ucb_2yr - generate and manage test data
working-directory: test_data/generate
run: |
pytest -vv remove_output_dirs.py --domain=drb_2yr
pytest -vv -n=2 run_prms_domains.py --domain=ucb_2yr
pytest -vv -n=auto convert_prms_output_to_nc.py --domain=ucb_2yr
pytest -vv -n=auto remove_prms_csvs.py
- name: ucb_2yr - list netcdf input files
working-directory: test_data
run: |
find ucb_2yr/output/ -name '*.nc'
- name: ucb_2yr - pywatershed tests
working-directory: autotest
run: pytest
-vv
-n=auto
--domain_yaml=../test_data/ucb_2yr/ucb_2yr.yaml
--durations=0
--cov=pywatershed
--cov-report=xml
--junitxml=pytest_ucb_2yr.xml
- name: Upload test results
if: always()
uses: actions/upload-artifact@v3
with:
name: Test results for ${{ runner.os }}-${{ matrix.python-version }}
path: |
./autotest/pytest_hru_1.xml
./autotest/pytest_drb_2yr.xml
./autotest/pytest_ucb_2yr.xml
- name: Upload code coverage to Codecov
uses: codecov/codecov-action@v3
with:
file: ./autotest/coverage.xml # should be just the ucb result
# flags: unittests
env_vars: RUNNER_OS,PYTHON_VERSION
# name: codecov-umbrella
fail_ci_if_error: false
version: "v0.1.15"