Skip to content

Merge pull request #312 from jmccreight/feat_3099 #1158

Merge pull request #312 from jmccreight/feat_3099

Merge pull request #312 from jmccreight/feat_3099 #1158

Workflow file for this run

name: CI
on:
push:
branches:
- "*"
- "!v[0-9]+.[0-9]+.[0-9]+*"
pull_request:
branches:
- "*"
- "!v[0-9]+.[0-9]+.[0-9]+*"
workflow_dispatch:
inputs:
debug_enabled:
type: boolean
description: 'Run the build with tmate debugging enabled (https://github.com/marketplace/actions/debugging-with-tmate)'
required: false
default: false
jobs:
pyws_setup:
name: standard installation
runs-on: ubuntu-latest
strategy:
fail-fast: false
defaults:
run:
shell: bash
steps:
- name: Checkout repo
uses: actions/checkout@v4
- name: Setup Python
uses: actions/setup-python@v5
with:
python-version: "3.10"
- name: Upgrade pip and install build and twine
run: |
python -m pip install --upgrade pip
pip install wheel build 'twine<5.0.0' 'importlib_metadata<=7.0.1' 'setuptools<=72.2.0' 'numpy<2.0'
- name: Base installation
run: |
pip --verbose install .
- name: Print pyhmn version
run: |
python -c "import pywatershed; print(pywatershed.__version__)"
- name: Build pywatershed, check dist outputs
run: |
python -m build
twine check --strict dist/*
pyws_lint:
name: linting
runs-on: ubuntu-latest
strategy:
fail-fast: false
defaults:
run:
shell: bash
steps:
- name: Checkout repo
uses: actions/checkout@v4
- name: Setup Python
uses: actions/setup-python@v5
with:
python-version: "3.10"
- name: Install dependencies
run: |
pip install wheel
pip install .
pip install ".[lint]"
- name: Version info
run: |
pip -V
pip list
- name: Lint
run: ruff check .
- name: Format
run: ruff format --check .
test:
name: ${{ matrix.os }} py${{ matrix.python-version }}
runs-on: ${{ matrix.os }}
defaults:
run:
shell: bash -l {0}
strategy:
fail-fast: false
matrix:
os: [ "ubuntu-latest", "macos-latest", "windows-latest" ]
python-version: ["3.10", "3.11"]
steps:
- name: Free Disk Space (Ubuntu)
if: runner.os == 'Linux'
uses: jlumbroso/free-disk-space@main
with:
tool-cache: false
android: true
dotnet: true
haskell: true
large-packages: true
docker-images: true
swap-storage: true
- name: Checkout repo
uses: actions/checkout@v4
- name: Setup tmate session
uses: mxschmitt/action-tmate@v3
if: ${{ github.event_name == 'workflow_dispatch' && inputs.debug_enabled }}
- name: Set environment variables
run: |
echo "PYTHON_VERSION=${{ matrix.python-version }}" >> $GITHUB_ENV
echo "PYWS_FORTRAN=false" >> $GITHUB_ENV
cat .mf6_ci_ref_remote >> $GITHUB_ENV
- name: Enforce MF6 ref and remote merge to main
if: github.base_ref == 'main'
run: |
echo Merge commit: GITHUB_BASE_REF = $GITHUB_BASE_REF
req_ref=develop # if not develop, submit an issue
echo $MF6_REF
if [[ "$MF6_REF" != "$req_ref" ]]; then exit 1; fi
req_remote=MODFLOW-USGS/modflow6
echo $MF6_REMOTE
if [[ "$MF6_REMOTE" != "$req_remote" ]]; then echo "bad mf6 remote in .mf6_ci_ref_remote"; exit 1; fi
- name: Setup gfortran
uses: fortran-lang/setup-fortran@v1
with:
compiler: gcc
version: 11
- name: Link gfortran dylibs on Mac
if: runner.os == 'macOS'
run: .github/scripts/symlink_gfortran_mac.sh
- name: Install Dependencies via Micromamba
uses: mamba-org/[email protected]
with:
micromamba-version: '1.5.10-0'
log-level: debug
environment-file: environment.yml
cache-environment: true
cache-downloads: true
create-args: >-
python=${{matrix.python-version}}
- name: Checkout MODFLOW 6
uses: actions/checkout@v4
with:
repository: ${{ env.MF6_REMOTE }}
ref: ${{ env.MF6_REF }}
path: modflow6
- name: Update flopy MODFLOW 6 classes
working-directory: modflow6/autotest
run: |
python update_flopy.py
- name: Install mf6 nightly build binaries
uses: modflowpy/install-modflow-action@v1
with:
repo: modflow6-nightly-build
- name: Install pywatershed
run: |
pip install .
- name: Version info
run: |
pip -V
pip list
- name: Get GIS files for tests
run: |
python pywatershed/utils/gis_files.py
- name: domainless - run tests not requiring domain data
working-directory: autotest
run: pytest
-m domainless
-n=auto
-vv
--durations=0
--cov=pywatershed
--cov-report=xml
--junitxml=pytest_domainless.xml
- name: sagehen_5yr_no_cascades - generate and manage test data domain, run PRMS and convert csv output to NetCDF
working-directory: autotest
run: |
python generate_test_data.py \
-n=auto --domain=sagehen_5yr --control_pattern=sagehen_no_cascades.control \
--remove_prms_csvs --remove_prms_output_dirs
- name: sagehen_5yr_no_cascades - list netcdf input files
working-directory: test_data
run: |
find sagehen_5yr/output_no_cascades -name '*.nc'
- name: sagehen_5yr_no_cascades - pywatershed tests
working-directory: autotest
run: pytest
-vv
-rs
-n=auto
-m "not domainless"
--domain=sagehen_5yr
--control_pattern=sagehen_no_cascades.control
--durations=0
--cov=pywatershed
--cov-report=xml
--junitxml=pytest_sagehen_5yr_no_cascades.xml
- name: hru_1_nhm - generate and manage test data domain, run PRMS and convert csv output to NetCDF
working-directory: autotest
run: |
python generate_test_data.py \
-n=auto --domain=hru_1 --control_pattern=nhm.control \
--remove_prms_csvs --remove_prms_output_dirs
- name: hru_1_nhm - list netcdf input files
working-directory: test_data
run: |
find hru_1/output -name '*.nc'
- name: hru_1_nhm - pywatershed tests
working-directory: autotest
run: pytest
-vv
-rs
-n=auto
-m "not domainless"
--domain=hru_1
--control_pattern=nhm.control
--durations=0
--cov=pywatershed
--cov-report=xml
--junitxml=pytest_hru_1_nhm.xml
- name: drb_2yr with and without dprst and obsin - generate and manage test data
working-directory: autotest
run: |
python generate_test_data.py \
-n=auto --domain=drb_2yr \
--remove_prms_csvs --remove_prms_output_dirs
- name: drb_2yr_nhm - list netcdf input files
working-directory: test_data
run: |
find drb_2yr/output -name '*.nc'
- name: drb_2yr_no_dprst - list netcdf input files
working-directory: test_data
run: |
find drb_2yr/output_no_dprst -name '*.nc'
- name: drb_2yr_nhm - pywatershed tests
working-directory: autotest
run: pytest
-vv
-rs
-n=auto
-m "not domainless"
--domain=drb_2yr
--control_pattern=nhm.control
--durations=0
--cov=pywatershed
--cov-report=xml
--junitxml=pytest_drb_2yr_nhm.xml
# Specific tests not redundant with dprst
- name: drb_2yr_no_dprst - pywatershed tests
working-directory: autotest
run: pytest
test_prms_runoff.py
test_prms_soilzone.py
test_prms_groundwater.py
test_prms_above_snow.py
test_prms_below_snow.py
-vv
-rs
-n=auto
-m "not domainless"
--domain=drb_2yr
--control_pattern=no_dprst
--durations=0
--cov=pywatershed
--cov-report=xml
--junitxml=pytest_drb_2yr_no_dprst.xml
# Specific tests not redundant with dprst
- name: drb_2yr_obsin - pywatershed tests
working-directory: autotest
run: pytest
test_obsin_flow_node.py
-vv
-n=0
-m "not domainless"
--domain=drb_2yr
--control_pattern=nhm_obsin.control
--durations=0
--cov=pywatershed
--cov-report=xml
--junitxml=pytest_drb_2yr_no_dprst.xml
- name: ucb_2yr_nhm - generate and manage test data
working-directory: autotest
run: |
python generate_test_data.py \
-n=auto --domain=ucb_2yr --control_pattern=nhm.control \
--remove_prms_csvs --remove_prms_output_dirs
- name: ucb_2yr_nhm - list netcdf input files
working-directory: test_data
run: |
find ucb_2yr/output -name '*.nc'
- name: ucb_2yr_nhm - pywatershed tests
working-directory: autotest
run: pytest
-vv
-n=auto
-m "not domainless"
--domain=ucb_2yr
--control_pattern=nhm.control
--durations=0
--cov=pywatershed
--cov-report=xml
--junitxml=pytest_ucb_2yr_nhm.xml
- name: Upload test results
if: always()
uses: actions/upload-artifact@v4
with:
name: Test results for ${{ runner.os }}-${{ matrix.python-version }}
path: |
./autotest/pytest_hru_1_nhm.xml
./autotest/pytest_drb_2yr_nhm.xml
./autotest/pytest_drb_2yr_no_dprst.xml
./autotest/pytest_ucb_2yr_nhm.xml
- name: Upload code coverage to Codecov
uses: codecov/codecov-action@v3
with:
file: ./autotest/coverage.xml # should be just the ucb result
# flags: unittests
env_vars: RUNNER_OS,PYTHON_VERSION
# name: codecov-umbrella
fail_ci_if_error: false
version: "v0.1.15"