Skip to content

Fast Matrix Assembly #2577

Fast Matrix Assembly

Fast Matrix Assembly #2577

# This workflow will install Python dependencies and run tests with a variety of Python versions
# For more information see: https://help.github.com/actions/language-and-framework-guides/using-python-with-github-actions
name: Run tests
on:
push:
branches: [ devel ]
pull_request:
branches: [ devel ]
jobs:
test:
runs-on: ${{ matrix.os }}
strategy:
fail-fast: false
matrix:
os: [ ubuntu-latest ]
python-version: [ 3.9, '3.10', '3.11', '3.12' ]
isMerge:
- ${{ github.event_name == 'push' && github.ref == 'refs/heads/devel' }}
exclude:
- { isMerge: false, python-version: '3.10' }
- { isMerge: false, python-version: '3.11' }
include:
- os: macos-latest
python-version: '3.10'
- os: macos-latest
python-version: '3.11'
name: ${{ matrix.os }} / Python ${{ matrix.python-version }}
steps:
- uses: actions/checkout@v4
- name: Set up Python ${{ matrix.python-version }}
uses: actions/setup-python@v5
with:
python-version: ${{ matrix.python-version }}
cache: 'pip'
cache-dependency-path: |
pyproject.toml
requirements.txt
requirements_extra.txt
- name: Install non-Python dependencies on Ubuntu
if: matrix.os == 'ubuntu-latest'
uses: awalsh128/cache-apt-pkgs-action@latest
with:
packages: gfortran openmpi-bin libopenmpi-dev libhdf5-openmpi-dev
version: 1.0
execute_install_scripts: true
# When loading cached apt packages, the default MPI compiler isn't set.
# Workaround is to 'reinstall' openmpi-bin, which doesn't actually perform
# installation (since openmpi-bin already exists), but instead reruns
# `update-alternatives` which fixes the symlinks to mpicc/mpif90.
- name: Set default MPI and HDF5 C compilers on Ubuntu
if: matrix.os == 'ubuntu-latest'
run: |
sudo apt-get update
sudo apt-get install --reinstall openmpi-bin libhdf5-openmpi-dev
- name: Install non-Python dependencies on macOS
if: matrix.os == 'macos-latest'
run: |
brew install open-mpi
brew install hdf5-mpi
brew install libomp
GFORTRAN_HOME=$(which gfortran || true)
echo "GFORTRAN_HOME : $GFORTRAN_HOME"
if [[ ! -f "$GFORTRAN_HOME" ]]; then
gfort=$(find ${PATH//:/\/ } -name 'gfortran-*' -exec basename {} \; | sort | tail -n 1 || true)
echo "Found $gfort"
gfort_path=$(which ${gfort})
folder=$(dirname ${gfort_path})
ln -s ${gfort_path} ${folder}/gfortran
fi
echo "MPI_OPTS=--oversubscribe" >> $GITHUB_ENV
- name: Print information on MPI and HDF5 libraries
run: |
ompi_info
h5pcc -showconfig -echo || true
- name: Upgrade pip
run: |
python -m pip install --upgrade pip
- name: Determine directory of parallel HDF5 library
run: |
if [[ "${{ matrix.os }}" == "ubuntu-latest" ]]; then
HDF5_DIR=$(dpkg -L libhdf5-openmpi-dev | grep libhdf5.so | xargs dirname)
elif [[ "${{ matrix.os }}" == "macos-latest" ]]; then
HDF5_DIR=$(brew list hdf5-mpi | grep "libhdf5.dylib" | xargs dirname | xargs dirname)
fi
echo $HDF5_DIR
echo "HDF5_DIR=$HDF5_DIR" >> $GITHUB_ENV
- name: Cache PETSc
uses: actions/cache@v4
id: cache-petsc
env:
cache-name: cache-PETSc
with:
path: "./petsc"
key: cache-${{ matrix.os }}-${{ matrix.python-version }}
- if: steps.cache-petsc.outputs.cache-hit != 'true'
name: Download a specific release of PETSc
run: |
git clone --depth 1 --branch v3.21.4 https://gitlab.com/petsc/petsc.git
- if: steps.cache-petsc.outputs.cache-hit != 'true'
name: Install PETSc with complex support
working-directory: ./petsc
run: |
export PETSC_DIR=$(pwd)
export PETSC_ARCH=petsc-cmplx
./configure --with-scalar-type=complex --with-fortran-bindings=0 --have-numpy=1
make all
echo "PETSC_DIR=$PETSC_DIR" > petsc.env
echo "PETSC_ARCH=$PETSC_ARCH" >> petsc.env
# This step is not really necessary and could be combined with PETSc install
# step; however it's good to verify if the cached PETSc installation really works!
- name: Test PETSc installation
working-directory: ./petsc
run: |
source petsc.env
make check
echo "PETSC_DIR=$PETSC_DIR" >> $GITHUB_ENV
echo "PETSC_ARCH=$PETSC_ARCH" >> $GITHUB_ENV
- name: Install petsc4py
working-directory: ./petsc
run: |
python -m pip install wheel Cython numpy
python -m pip install src/binding/petsc4py
- name: Install Python dependencies
run: |
export CC="mpicc"
export HDF5_MPI="ON"
python -m pip install -r requirements.txt
python -m pip install -r requirements_extra.txt --no-build-isolation
python -m pip list
- name: Check parallel h5py installation
run: |
python -c "
from mpi4py import MPI
import h5py
# This particular instantiation of h5py.File will fail if parallel h5py isn't installed
f = h5py.File('parallel_test.hdf5', 'w', driver='mpio', comm=MPI.COMM_WORLD)
print(f)"
- name: Install project
run: |
python -m pip install .
python -m pip freeze
- name: Test Pyccel optimization flags
run: |
pytest --pyargs psydac -m pyccel --capture=no
- name: Initialize test directory
run: |
mkdir pytest
cp mpi_tester.py pytest
- name: Run single-process tests with Pytest
working-directory: ./pytest
run: |
export PSYDAC_MESH_DIR=$GITHUB_WORKSPACE/mesh
export OMP_NUM_THREADS=2
python -m pytest -n auto --pyargs psydac -m "not parallel and not petsc"
#- name: Run MPI tests with Pytest
# working-directory: ./pytest
# run: |
# export PSYDAC_MESH_DIR=$GITHUB_WORKSPACE/mesh
# export OMP_NUM_THREADS=2
# python mpi_tester.py --mpirun="mpiexec -n 4 ${MPI_OPTS}" --pyargs psydac -m "parallel and not petsc"
- name: Run single-process PETSc tests with Pytest
working-directory: ./pytest
run: |
export PSYDAC_MESH_DIR=$GITHUB_WORKSPACE/mesh
export OMP_NUM_THREADS=2
python -m pytest -n auto --pyargs psydac -m "not parallel and petsc"
#- name: Run MPI PETSc tests with Pytest
# working-directory: ./pytest
# run: |
# export PSYDAC_MESH_DIR=$GITHUB_WORKSPACE/mesh
# export OMP_NUM_THREADS=2
# python mpi_tester.py --mpirun="mpiexec -n 4 ${MPI_OPTS}" --pyargs psydac -m "parallel and petsc"
- name: Remove test directory
if: ${{ always() }}
run: |
rm -rf pytest