[MAIN] update gh-action runner from macOS12 to macOS14 #1025
Workflow file for this run
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
# This workflow will set up GitHub-hosted runners and install the required dependencies for elephant tests. | |
# On a pull requests and on pushes to master it will run different tests for elephant. | |
name: tests | |
# define events that trigger workflow 'tests' | |
on: | |
workflow_dispatch: # enables manual triggering of workflow | |
inputs: | |
logLevel: | |
description: 'Log level' | |
required: true | |
default: 'warning' | |
type: choice | |
options: | |
- info | |
- warning | |
- debug | |
pull_request: | |
branches: | |
- master | |
types: | |
#- assigned | |
#- unassigned | |
#- labeled | |
#- unlabeled | |
- opened | |
#- edited | |
#- closed | |
- reopened | |
- synchronize | |
#- converted_to_draft | |
#- ready_for_review | |
#- locked | |
#- unlocked | |
#- review_requested | |
#- review_request_removed | |
#- auto_merge_enabled | |
#- auto_merge_disabled | |
push: | |
branches: | |
- master | |
# Cancel previous workflows on the same pull request | |
concurrency: | |
group: ${{ github.workflow }}-${{ github.ref }} | |
cancel-in-progress: true | |
# jobs define the steps that will be executed on the runner | |
jobs: | |
# _ | |
# _ __ (_)_ __ | |
# | '_ \| | '_ \ | |
# | |_) | | |_) | | |
# | .__/|_| .__/ | |
# |_| |_| | |
# install dependencies and elephant with pip and run tests with pytest | |
build-and-test-pip: | |
runs-on: ${{ matrix.os }} | |
strategy: | |
matrix: | |
# python versions for elephant: [3.8, 3.9, "3.10", 3.11, 3.12] | |
python-version: [3.9, "3.10", 3.11, 3.12] | |
# OS [ubuntu-latest, macos-latest, windows-latest] | |
os: [ubuntu-latest] | |
# do not cancel all in-progress jobs if any matrix job fails | |
fail-fast: false | |
steps: | |
- uses: actions/[email protected] | |
- name: Set up Python ${{ matrix.python-version }} | |
uses: actions/[email protected] | |
with: | |
python-version: ${{ matrix.python-version }} | |
check-latest: true | |
cache: 'pip' | |
cache-dependency-path: | | |
**/requirements.txt | |
**/requirements-extras.txt | |
**/requirements-tests.txt | |
- name: Get current hash (SHA) of the elephant_data repo | |
id: elephant-data | |
run: | | |
echo "dataset_hash=$(git ls-remote https://gin.g-node.org/NeuralEnsemble/elephant-data.git HEAD | cut -f1)" >> $GITHUB_OUTPUT | |
- uses: actions/cache/[email protected] | |
# Loading cache of elephant-data | |
id: cache-datasets | |
with: | |
path: ~/elephant-data | |
key: datasets-${{ steps.elephant-data.outputs.dataset_hash }} | |
restore-keys: datasets- | |
enableCrossOsArchive: true | |
- name: Install dependencies | |
run: | | |
python -m pip install --upgrade pip | |
pip install coveralls | |
pip install -e .[extras,tests] | |
- name: List packages | |
run: | | |
pip list | |
python --version | |
- name: Test with pytest | |
run: | | |
if [ -d ~/elephant-data ]; then | |
export ELEPHANT_DATA_LOCATION=~/elephant-data | |
echo $ELEPHANT_DATA_LOCATION | |
fi | |
coverage run --source=elephant -m pytest | |
coveralls --service=github || echo "Coveralls submission failed" | |
env: | |
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} | |
# ___ ____ | |
# _ __ ___ __ _ ___ / _ \/ ___| | |
# | '_ ` _ \ / _` |/ __| | | \___ \ | |
# | | | | | | (_| | (__| |_| |___) | | |
# |_| |_| |_|\__,_|\___|\___/|____/ | |
test-macOS: | |
name: conda (${{ matrix.python-version }}, ${{ matrix.os }}) | |
runs-on: ${{ matrix.os }} | |
strategy: | |
# do not cancel all in-progress jobs if any matrix job fails | |
fail-fast: false | |
matrix: | |
# OS [ubuntu-latest, macos-latest, windows-latest] | |
os: [macos-13,macos-14] | |
python-version: [3.11] | |
steps: | |
- name: Get current year-month | |
id: date | |
run: echo "date=$(date +'%Y-%m')" >> $GITHUB_OUTPUT | |
- uses: actions/[email protected] | |
- name: Cache conda | |
uses: actions/cache@v3 | |
with: | |
path: ~/conda_pkgs_dir | |
key: ${{ runner.os }}-conda-${{hashFiles('requirements/environment.yml') }}-${{ hashFiles('**/CI.yml') }}-${{ steps.date.outputs.date }} | |
- name: Get current hash (SHA) of the elephant_data repo | |
id: elephant-data | |
run: | | |
echo "dataset_hash=$(git ls-remote https://gin.g-node.org/NeuralEnsemble/elephant-data.git HEAD | cut -f1)" >> $GITHUB_OUTPUT | |
- uses: actions/cache/[email protected] | |
# Loading cache of elephant-data | |
id: cache-datasets | |
with: | |
path: ~/elephant-data | |
key: datasets-${{ steps.elephant-data.outputs.dataset_hash }} | |
restore-keys: datasets- | |
- uses: conda-incubator/setup-miniconda@d2e6a045a86077fb6cad6f5adf368e9076ddaa8d # corresponds to v3.1.0 | |
with: | |
auto-update-conda: true | |
python-version: ${{ matrix.python-version }} | |
mamba-version: "*" | |
channels: conda-forge | |
channel-priority: true | |
activate-environment: elephant | |
environment-file: requirements/environment-tests.yml | |
conda-remove-defaults: true | |
- name: Install dependencies | |
shell: bash -l {0} | |
run: | | |
python --version | |
mamba install pytest pytest-cov coveralls | |
pip install -e .[extras] | |
- name: List packages | |
shell: bash -l {0} | |
run: | | |
pip list | |
mamba list | |
python --version | |
- name: Test with pytest | |
shell: bash -l {0} | |
run: | | |
if [ -d ~/elephant-data ]; then | |
export ELEPHANT_DATA_LOCATION=~/elephant-data | |
echo $ELEPHANT_DATA_LOCATION | |
fi | |
pytest --cov=elephant | |
# __ ___ _ | |
# \ \ / (_)_ __ __| | _____ _____ | |
# \ \ /\ / /| | '_ \ / _` |/ _ \ \ /\ / / __| | |
# \ V V / | | | | | (_| | (_) \ V V /\__ \ | |
# \_/\_/ |_|_| |_|\__,_|\___/ \_/\_/ |___/ | |
# install dependencies with pip and run tests with pytest | |
test-pip: | |
runs-on: ${{ matrix.os }} | |
strategy: | |
matrix: | |
# python versions for elephant: [3.8, 3.9, 3.10, 3.11] | |
python-version: [3.11,] | |
# OS [ubuntu-latest, macos-latest, windows-latest] | |
os: [windows-latest] | |
steps: | |
- uses: actions/[email protected] | |
- name: Set up Python ${{ matrix.python-version }} | |
uses: actions/[email protected] | |
with: | |
python-version: ${{ matrix.python-version }} | |
check-latest: true | |
cache: 'pip' | |
cache-dependency-path: | | |
**/requirements.txt | |
**/requirements-extras.txt | |
**/requirements-tests.txt | |
- name: Get current hash (SHA) of the elephant_data repo | |
id: elephant-data | |
run: | | |
echo "dataset_hash=$(git ls-remote https://gin.g-node.org/NeuralEnsemble/elephant-data.git HEAD | cut -f1)" >> $GITHUB_OUTPUT | |
- uses: actions/cache/[email protected] | |
# Loading cache of elephant-data | |
id: cache-datasets | |
with: | |
path: ~/elephant-data | |
key: datasets-${{ steps.elephant-data.outputs.dataset_hash }} | |
restore-keys: datasets- | |
enableCrossOsArchive: true | |
- name: Install dependencies | |
run: | | |
python -m pip install --upgrade pip | |
pip install pytest-cov coveralls | |
pip install -e .[extras,tests] | |
- name: List packages | |
run: | | |
pip list | |
python --version | |
- name: Test with pytest | |
run: | | |
if (Test-Path "$env:USERPROFILE\elephant-data") { | |
$env:ELEPHANT_DATA_LOCATION = "$env:USERPROFILE\elephant-data" | |
Write-Output $env:ELEPHANT_DATA_LOCATION | |
} | |
pytest --cov=elephant | |
# __ __ ____ ___ | |
# | \/ | _ \_ _| | |
# | |\/| | |_) | | | |
# | | | | __/| | | |
# |_| |_|_| |___| | |
# install dependencies and elephant with pip and run MPI | |
test-pip-MPI: | |
runs-on: ${{ matrix.os }} | |
strategy: | |
matrix: | |
# python versions for elephant: [3.8, 3.9, 3.10, 3.11] | |
python-version: [3.9] | |
# OS [ubuntu-latest, macos-latest, windows-latest] | |
os: [ubuntu-latest] | |
# do not cancel all in-progress jobs if any matrix job fails | |
fail-fast: false | |
steps: | |
- uses: actions/[email protected] | |
- name: Set up Python ${{ matrix.python-version }} | |
uses: actions/[email protected] | |
with: | |
python-version: ${{ matrix.python-version }} | |
check-latest: true | |
cache: 'pip' | |
cache-dependency-path: | | |
**/requirements.txt | |
**/requirements-extras.txt | |
**/requirements-tests.txt | |
- name: Get current hash (SHA) of the elephant_data repo | |
id: elephant-data | |
run: | | |
echo "dataset_hash=$(git ls-remote https://gin.g-node.org/NeuralEnsemble/elephant-data.git HEAD | cut -f1)" >> $GITHUB_OUTPUT | |
- uses: actions/cache/[email protected] | |
# Loading cache of elephant-data | |
id: cache-datasets | |
with: | |
path: ~/elephant-data | |
key: datasets-${{ steps.elephant-data.outputs.dataset_hash }} | |
restore-keys: datasets- | |
enableCrossOsArchive: true | |
- name: Setup environment | |
run: | | |
sudo apt-get update | |
sudo apt install -y libopenmpi-dev openmpi-bin | |
python -m pip install --upgrade pip | |
pip install mpi4py | |
pip install pytest-cov coveralls | |
pip install -e .[extras,tests] | |
- name: List packages | |
run: | | |
pip list | |
python --version | |
- name: Test with pytest | |
run: | | |
if [ -d ~/elephant-data ]; then | |
export ELEPHANT_DATA_LOCATION=~/elephant-data | |
echo $ELEPHANT_DATA_LOCATION | |
fi | |
mpiexec -n 1 python -m mpi4py -m coverage run --source=elephant -m pytest | |
coveralls --service=github || echo "Coveralls submission failed" | |
env: | |
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} | |
# ____ _ | |
# / ___|___ _ __ __| | __ _ | |
# | | / _ \| '_ \ / _` |/ _` | | |
# | |__| (_) | | | | (_| | (_| | | |
# \____\___/|_| |_|\__,_|\__,_| | |
# install dependencies with conda and run tests with pytest | |
test-conda: | |
runs-on: ${{ matrix.os }} | |
strategy: | |
matrix: | |
# python versions for elephant: [3.8, 3.9, 3.10, 3.11] | |
python-version: [3.11] | |
# OS [ubuntu-latest, macos-latest, windows-latest] | |
os: [ubuntu-latest] | |
# do not cancel all in-progress jobs if any matrix job fails | |
fail-fast: false | |
steps: | |
- name: Get current year-month | |
id: date | |
run: echo "date=$(date +'%Y-%m')" >> $GITHUB_OUTPUT | |
- uses: actions/[email protected] | |
- name: Get pip cache dir | |
id: pip-cache | |
run: | | |
echo "dir=$(pip cache dir)" >> $GITHUB_OUTPUT | |
- name: Cache pip | |
uses: actions/cache@v3 | |
with: | |
path: ${{ steps.pip-cache.outputs.dir }} | |
key: ${{ runner.os }}-pip-${{hashFiles('requirements/environment-tests.yml') }}-${{ hashFiles('**/CI.yml') }}-${{ steps.date.outputs.date }} | |
- name: Get current hash (SHA) of the elephant_data repo | |
id: elephant-data | |
run: | | |
echo "dataset_hash=$(git ls-remote https://gin.g-node.org/NeuralEnsemble/elephant-data.git HEAD | cut -f1)" >> $GITHUB_OUTPUT | |
- uses: actions/cache/[email protected] | |
# Loading cache of elephant-data | |
id: cache-datasets | |
with: | |
path: ~/elephant-data | |
key: datasets-${{ steps.elephant-data.outputs.dataset_hash }} | |
restore-keys: datasets- | |
enableCrossOsArchive: true | |
- uses: conda-incubator/setup-miniconda@d2e6a045a86077fb6cad6f5adf368e9076ddaa8d # corresponds to v3.1.0 | |
with: | |
auto-update-conda: true | |
python-version: ${{ matrix.python-version }} | |
mamba-version: "*" | |
channels: conda-forge | |
channel-priority: true | |
activate-environment: elephant | |
environment-file: requirements/environment-tests.yml | |
conda-remove-defaults: true | |
- name: Install dependencies | |
shell: bash -el {0} | |
run: | | |
python --version | |
mamba install -c conda-forge pytest pytest-cov coveralls mpi4py openmpi | |
pip install -e . | |
- name: List packages | |
shell: bash -el {0} | |
run: | | |
pip list | |
mamba list | |
python --version | |
- name: Test with pytest | |
shell: bash -el {0} | |
run: | | |
if [ -d ~/elephant-data ]; then | |
export ELEPHANT_DATA_LOCATION=~/elephant-data | |
echo $ELEPHANT_DATA_LOCATION | |
fi | |
pytest --cov=elephant | |
# ____ | |
# | _ \ ___ ___ ___ | |
# | | | |/ _ \ / __/ __| | |
# | |_| | (_) | (__\__ \ | |
# |____/ \___/ \___|___/ | |
# install dependencies for the documentation and build .html | |
docs: | |
name: docs (${{ matrix.os }}) | |
runs-on: ${{ matrix.os }} | |
strategy: | |
matrix: | |
# python versions for elephant: [3.8, 3.9, 3.10, 3.11, 3.12] | |
python-version: [3.12] | |
# OS [ubuntu-latest, macos-latest, windows-latest] | |
os: [ubuntu-latest] | |
steps: | |
- name: Get current year-month | |
id: date | |
run: echo "date=$(date +'%Y-%m')" >> $GITHUB_OUTPUT | |
- uses: actions/[email protected] | |
- name: Get pip cache dir | |
id: pip-cache | |
run: | | |
echo "dir=$(pip cache dir)" >> $GITHUB_OUTPUT | |
- name: Cache pip | |
uses: actions/cache@v3 | |
with: | |
path: ${{ steps.pip-cache.outputs.dir }} | |
# Look to see if there is a cache hit for the corresponding requirements files | |
key: ${{ runner.os }}-pip-${{ hashFiles('**/requirements-docs.txt') }}-${{ hashFiles('**/requirements-tutorials.txt') }}-${{ hashFiles('**/environment-docs.yml') }} | |
-${{ hashFiles('**/CI.yml') }}-${{ steps.date.outputs.date }} | |
- uses: conda-incubator/setup-miniconda@d2e6a045a86077fb6cad6f5adf368e9076ddaa8d # corresponds to v3.1.0 | |
with: | |
auto-update-conda: true | |
python-version: ${{ matrix.python-version }} | |
mamba-version: "*" | |
activate-environment: elephant | |
environment-file: requirements/environment.yml | |
conda-remove-defaults: true | |
- name: Install dependencies | |
shell: bash -el {0} # enables conda incubator to activate environment | |
run: | | |
sudo apt-get update | |
sudo apt install -y libopenmpi-dev openmpi-bin | |
mamba install -c conda-forge openmpi pandoc libstdcxx-ng # fix libstdc++.so.6: version for new scipy versions > 1.9.1 | |
mamba env update --file requirements/environment-docs.yml --name elephant | |
python -m pip install --upgrade pip | |
pip install -e .[extras,tutorials,docs] | |
# run notebooks | |
sed -i -E "s/nbsphinx_execute *=.*/nbsphinx_execute = 'always'/g" doc/conf.py | |
- name: List packages | |
shell: bash -el {0} | |
run: | | |
pip list | |
mamba list | |
python --version | |
- name: make html | |
shell: bash -el {0} | |
run: | | |
cd doc | |
make html | |
# install dependencies and elephant with pip and run tests with pytest | |
doctests: | |
runs-on: ${{ matrix.os }} | |
strategy: | |
matrix: | |
# python versions for elephant: [3.7, 3.8, 3.9, "3.10"] | |
python-version: ["3.10"] | |
# OS [ubuntu-latest, macos-latest, windows-latest] | |
os: [ubuntu-latest] | |
steps: | |
# used to reset cache every month | |
- name: Get current year-month | |
id: date | |
run: echo "::set-output name=date::$(date +'%Y-%m')" | |
- uses: actions/[email protected] | |
- name: Set up Python ${{ matrix.python-version }} | |
uses: actions/[email protected] | |
with: | |
python-version: ${{ matrix.python-version }} | |
- name: Cache test_env | |
uses: actions/cache@v3 | |
with: | |
path: ~/test_env | |
# Look to see if there is a cache hit for the corresponding requirements files | |
# cache will be reset on changes to any requirements or every month | |
key: ${{ runner.os }}-venv-${{ hashFiles('**/requirements.txt') }}-${{ hashFiles('**/requirements-tests.txt') }} | |
-${{ hashFiles('**/requirements-extras.txt') }}-${{ hashFiles('**/CI.yml') }}-${{ hashFiles('setup.py') }} | |
-${{ steps.date.outputs.date }} | |
- name: Install dependencies | |
run: | | |
# create an environment and install everything | |
python -m venv ~/test_env | |
source ~/test_env/bin/activate | |
sudo apt install -y libopenmpi-dev openmpi-bin | |
python -m pip install --upgrade pip | |
pip install mpi4py | |
pip install pytest-cov coveralls | |
pip install -e .[extras,tests] | |
- name: List packages | |
run: | | |
source ~/test_env/bin/activate | |
pip list | |
python --version | |
- name: Run doctests | |
run: | | |
source ~/test_env/bin/activate | |
pytest elephant --doctest-modules --ignore=elephant/test/ |