Skip to content

add dispatcher for sqrtm #12

add dispatcher for sqrtm

add dispatcher for sqrtm #12

Workflow file for this run

# The name is short because we mostly care how it appears in the pull request
# "checks" dialogue box - it looks like
# Tests / ubuntu-latest, python-3.9, defaults
# or similar.
name: Tests
on:
[push, pull_request]
defaults:
run:
# The slightly odd shell call is to force bash to read .bashrc, which is
# necessary for having conda behave sensibly. We use bash as the shell even
# on Windows, since we don't run anything much complicated, and it makes
# things much simpler.
shell: bash -l -e {0}
jobs:
cases:
name: ${{ matrix.os }}, python${{ matrix.python-version }}, ${{ matrix.case-name }}
runs-on: ${{ matrix.os }}
env:
MPLBACKEND: Agg # Explicitly define matplotlib backend for Windows tests
strategy:
fail-fast: false
matrix:
os: [ubuntu-latest]
# Test other versions of Python in special cases to avoid exploding the
# matrix size; make sure to test all supported versions in some form.
python-version: ["3.11"]
case-name: [defaults]
numpy-requirement: [">=1.22"]
scipy-requirement: [">=1.8"]
coverage-requirement: ["==6.5"]
# Extra special cases. In these, the new variable defined should always
# be a truth-y value (hence 'nomkl: 1' rather than 'mkl: 0'), because
# the lack of a variable is _always_ false-y, and the defaults lack all
# the special cases.
include:
# Test with the version 2 of numpy comming soon.
- case-name: numpy2
os: ubuntu-latest
python-version: "3.12"
scipy-requirement: ""
numpy-requirement: "==2.0.0rc2"
pypi: 1
# Binaries compiled with numpy 2 should be compatible when using
# numpy 1.X at runtime.
- case-name: numpy2_to_1
os: ubuntu-latest
python-version: "3.10"
scipy-requirement: ""
numpy-requirement: "==2.0.0rc2"
roll_back_numpy: 1
pypi: 1 # numpy 2 not yet available on conda
# Python 3.10, no mkl, scipy 1.9, numpy 1.23
# Scipy 1.9 did not support cython 3.0 yet.
# cython#17234
- case-name: no mkl
os: ubuntu-latest
python-version: "3.10"
scipy-requirement: ">=1.9,<1.10"
numpy-requirement: ">=1.23,<1.24"
condaforge: 1
oldcython: 1
nomkl: 1
pytest-extra-options: "-W ignore:dep_util:DeprecationWarning"
# Python 3.10, no cython, scipy 1.10, numpy 1.24
- case-name: no cython
os: ubuntu-latest
python-version: "3.10"
scipy-requirement: ">=1.10,<1.11"
numpy-requirement: ">=1.24,<1.25"
oldcython: 1
nocython: 1
# Python 3.11 and recent numpy
# Use conda-forge to provide Python 3.11 and latest numpy
# Ignore deprecation of the cgi module in Python 3.11 that is
# still imported by Cython.Tempita. This was addressed in
# https://github.com/cython/cython/pull/5128 but not backported
# to any currently released version.
- case-name: Python 3.11
os: ubuntu-latest
python-version: "3.11"
condaforge: 1
scipy-requirement: ">=1.11,<1.12"
numpy-requirement: ">=1.25,<1.26"
conda-extra-pkgs: "suitesparse" # for compiling cvxopt
# Python 3.12 and latest numpy
# Use conda-forge to provide Python 3.11 and latest numpy
- case-name: Python 3.12
os: ubuntu-latest
python-version: "3.12"
scipy-requirement: ">=1.12,<1.13"
numpy-requirement: ">=1.26,<1.27"
condaforge: 1
pytest-extra-options: "-W ignore:datetime:DeprecationWarning"
# Install mpi4py to test mpi_pmap
# Should be enough to include this in one of the runs
includempi: 1
# Mac
# Mac has issues with MKL since september 2022.
- case-name: macos
# setup-miniconda not compatible with macos-latest presently.
# https://github.com/conda-incubator/setup-miniconda/issues/344
os: macos-12
python-version: "3.11"
condaforge: 1
nomkl: 1
# Windows. Once all tests pass without special options needed, this
# can be moved to the main os list in the test matrix. All the tests
# that fail currently seem to do so because mcsolve uses
# multiprocessing under the hood. Windows does not support fork()
# well, which makes transfering objects to the child processes
# error prone. See, e.g., https://github.com/qutip/qutip/issues/1202
- case-name: Windows
os: windows-latest
python-version: "3.11"
steps:
- uses: actions/checkout@v4
- uses: conda-incubator/setup-miniconda@v3
with:
auto-update-conda: true
python-version: ${{ matrix.python-version }}
channels: ${{ matrix.condaforge == 1 && 'conda-forge' || 'defaults' }}
- name: Install QuTiP and dependencies
# In the run, first we handle any special cases. We do this in bash
# rather than in the GitHub Actions file directly, because bash gives us
# a proper programming language to use.
# We install without build isolation so qutip is compiled with the
# version of cython, scipy, numpy in the test matrix, not a temporary
# version use in the installation virtual environment.
run: |
# Install the extra requirement
python -m pip install pytest>=5.2 pytest-rerunfailures # tests
python -m pip install matplotlib>=1.2.1 # graphics
python -m pip install cvxpy>=1.0 cvxopt # semidefinite
python -m pip install ipython # ipython
python -m pip install loky tqdm # extras
python -m pip install "coverage${{ matrix.coverage-requirement }}" chardet
python -m pip install pytest-cov coveralls pytest-fail-slow
if [[ "${{ matrix.pypi }}" ]]; then
pip install "numpy${{ matrix.numpy-requirement }}" "scipy${{ matrix.scipy-requirement }}"
elif [[ -z "${{ matrix.nomkl }}" ]]; then
conda install blas=*=mkl "numpy${{ matrix.numpy-requirement }}" "scipy${{ matrix.scipy-requirement }}"
elif [[ "${{ matrix.os }}" =~ ^windows.*$ ]]; then
# Conda doesn't supply forced nomkl builds on Windows, so we rely on
# pip not automatically linking to MKL.
pip install "numpy${{ matrix.numpy-requirement }}" "scipy${{ matrix.scipy-requirement }}"
else
conda install nomkl "numpy${{ matrix.numpy-requirement }}" "scipy${{ matrix.scipy-requirement }}"
fi
if [[ -n "${{ matrix.conda-extra-pkgs }}" ]]; then
conda install "${{ matrix.conda-extra-pkgs }}"
fi
if [[ "${{ matrix.includempi }}" ]]; then
# Use openmpi because mpich causes problems. Note, environment variable names change in v5
conda install "openmpi<5" mpi4py
fi
if [[ "${{ matrix.oldcython }}" ]]; then
python -m pip install cython==0.29.36 filelock
else
python -m pip install cython filelock
fi
python -m pip install -e . -v --no-build-isolation
if [[ "${{ matrix.nocython }}" ]]; then
python -m pip uninstall cython -y
fi
if [[ "${{ matrix.roll_back_numpy }}" ]]; then
# Binary compiled with numpy 2.X should be compatible with numpy 1.X
python -m pip install "numpy<1.24"
fi
- name: Package information
run: |
conda list
python -c "import qutip; qutip.about()"
python -c "import qutip; print(qutip.settings)"
- name: Environment information
run: |
uname -a
if [[ "ubuntu-latest" == "${{ matrix.os }}" ]]; then
hostnamectl
lscpu
free -h
fi
- name: Run tests
# If our tests are running for longer than an hour, _something_ is wrong
# somewhere. The GitHub default is 6 hours, which is a bit long to wait
# to see if something hung.
timeout-minutes: 60
run: |
if [[ -n "${{ matrix.openmp }}" ]]; then
# Force OpenMP runs to use more threads, even if there aren't
# actually that many CPUs. We have to check any dispatch code is
# truly being executed.
export QUTIP_NUM_PROCESSES=2
fi
if [[ "${{ matrix.includempi }}" ]]; then
# By default, the max. number of allowed worker processes in openmpi is
# (number of physical cpu cores) - 1.
# We only have 2 physical cores, but we want to test mpi_pmap with 2 workers.
export OMPI_MCA_rmaps_base_oversubscribe=true
fi
pytest -Werror --strict-config --strict-markers --fail-slow=300 --durations=0 --durations-min=1.0 --verbosity=1 --cov=qutip --cov-report= --color=yes ${{ matrix.pytest-extra-options }} qutip/tests
# Above flags are:
# -Werror
# treat warnings as errors
# --strict-config
# error out if the configuration file is not parseable
# --strict-markers
# error out if a marker is used but not defined in the
# configuration file
# --timeout=300
# error any individual test that goes longer than the given time
# --durations=0 --durations-min=1.0
# at the end, show a list of all the tests that took longer than a
# second to run
# --verbosity=1
# turn the verbosity up so pytest prints the names of the tests
# it's currently working on
# --cov=qutip
# limit coverage reporting to code that's within the qutip package
# --cov-report=
# don't print the coverage report to the terminal---it just adds
# cruft, and we're going to upload the .coverage file to Coveralls
# --color=yes
# force coloured output in the terminal
- name: Upload to Coveralls
env:
GITHUB_TOKEN: ${{ secrets.github_token }}
COVERALLS_FLAG_NAME: ${{ matrix.os }}-${{ matrix.python-version }}-${{ matrix.case-name }}
COVERALLS_PARALLEL: true
run: coveralls --service=github
towncrier-check:
name: Verify Towncrier entry added
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
with:
fetch-depth: 0
- name: Install Towncrier
run: |
python -m pip install towncrier
- name: Verify Towncrier entry added
if: github.event_name == 'pull_request'
env:
BASE_BRANCH: ${{ github.base_ref }}
run: |
# Fetch the pull request' base branch so towncrier will be able to
# compare the current branch with the base branch.
# Source: https://github.com/actions/checkout/#fetch-all-branches.
git fetch --no-tags origin +refs/heads/${BASE_BRANCH}:refs/remotes/origin/${BASE_BRANCH}
towncrier check --compare-with origin/${BASE_BRANCH}
towncrier build --version "$(cat VERSION)" --draft
finalise:
name: Finalise coverage reporting
needs: cases
runs-on: ubuntu-latest
container: python:3-slim
steps:
- name: Finalise coverage reporting
env:
GITHUB_TOKEN: ${{ secrets.github_token }}
run: |
python -m pip install coveralls
coveralls --service=github --finish