Skip to content

PR 636 triggered by: pull_request of refs/pull/636/merge branch (workflow run ID: 6686929064; number: 2572; attempt: 1) #2572

PR 636 triggered by: pull_request of refs/pull/636/merge branch (workflow run ID: 6686929064; number: 2572; attempt: 1)

PR 636 triggered by: pull_request of refs/pull/636/merge branch (workflow run ID: 6686929064; number: 2572; attempt: 1) #2572

Workflow file for this run

---
name: CI/CD
on: # yamllint disable-line rule:truthy
push:
branches-ignore:
- dependabot/**
- maintenance/pip-tools-constraint-lockfiles
- maintenance/pip-tools-constraint-lockfiles-**
- patchback/backports/**
- pre-commit-ci-update-config
pull_request:
schedule:
- cron: 1 0 * * * # Run daily at 0:01 UTC
workflow_dispatch:
inputs:
release-version:
# github.event_name == 'workflow_dispatch'
# && github.event.inputs.release-version
description: >-
Target PEP440-compliant version to release.
Please, don't prepend `v`.
required: true
type: string
release-committish:
# github.event_name == 'workflow_dispatch'
# && github.event.inputs.release-committish
default: ''
description: >-
The commit to be released to PyPI and tagged
in Git as `release-version`. Normally, you
should keep this empty.
type: string
YOLO:
default: false
description: >-
Set this flag to disregard the outcome of the
test stage. The test results will block the
release otherwise. Only use this under
extraordinary circumstances to ignore the test
failures and cut the release regardless.
type: boolean
concurrency:
group: >-
${{
github.workflow
}}-${{
github.ref_type
}}-${{
github.event.pull_request.number || github.sha
}}
cancel-in-progress: true
env:
dists-artifact-name: python-package-distributions
FORCE_COLOR: 1 # Request colored output from CLI tools supporting it
MYPY_FORCE_COLOR: 1 # MyPy's color enforcement
PIP_DISABLE_PIP_VERSION_CHECK: 1
PIP_NO_PYTHON_VERSION_WARNING: 1
PIP_NO_WARN_SCRIPT_LOCATION: 1
PRE_COMMIT_COLOR: always
PY_COLORS: 1 # Recognized by the `py` package, dependency of `pytest`
PYTHONIOENCODING: utf-8
PYTHONUTF8: 1
TOX_PARALLEL_NO_SPINNER: 1
TOX_TESTENV_PASSENV: >- # Make tox-wrapped tools see color requests
FORCE_COLOR
MYPY_FORCE_COLOR
NO_COLOR
PIP_DISABLE_PIP_VERSION_CHECK
PIP_NO_PYTHON_VERSION_WARNING
PIP_NO_WARN_SCRIPT_LOCATION
PRE_COMMIT_COLOR
PY_COLORS
PYTEST_THEME
PYTEST_THEME_MODE
PYTHONIOENCODING
PYTHONLEGACYWINDOWSSTDIO
PYTHONUTF8
TOX_VERSION: tox < 4
run-name: >-
${{
github.event_name == 'workflow_dispatch'
&& format('📦 Releasing v{0}...', github.event.inputs.release-version)
|| ''
}}
${{
github.event.pull_request.number && 'PR' || ''
}}${{
!github.event.pull_request.number && 'Commit' || ''
}}
${{ github.event.pull_request.number || github.sha }}
triggered by: ${{ github.event_name }} of ${{
github.ref
}} ${{
github.ref_type
}}
(workflow run ID: ${{
github.run_id
}}; number: ${{
github.run_number
}}; attempt: ${{
github.run_attempt
}})
jobs:
pre-setup:
name: ⚙️ Pre-set global build settings
runs-on: ubuntu-latest
defaults:
run:
shell: python
outputs:
dist-version: >-
${{
steps.request-check.outputs.release-requested == 'true'
&& github.event.inputs.release-version
|| steps.scm-version.outputs.dist-version
}}
is-untagged-devel: >-
${{ steps.untagged-check.outputs.is-untagged-devel || false }}
release-requested: >-
${{
steps.request-check.outputs.release-requested || false
}}
is-yolo-mode: >-
${{
(
steps.request-check.outputs.release-requested == 'true'
&& github.event.inputs.YOLO
)
&& true || false
}}
cache-key-files: >-
${{ steps.calc-cache-key-files.outputs.files-hash-key }}
git-tag: ${{ steps.git-tag.outputs.tag }}
sdist-artifact-name: ${{ steps.artifact-name.outputs.sdist }}
wheel-artifact-name: ${{ steps.artifact-name.outputs.wheel }}
steps:
- name: Switch to using Python 3.11 by default
uses: actions/setup-python@v4
with:
python-version: 3.11
- name: >-
Mark the build as untagged '${{
github.event.repository.default_branch
}}' branch build
id: untagged-check
if: >-
github.event_name == 'push' &&
github.ref == format(
'refs/heads/{0}', github.event.repository.default_branch
)
run: |
from os import environ
from pathlib import Path
FILE_APPEND_MODE = 'a'
with Path(environ['GITHUB_OUTPUT']).open(
mode=FILE_APPEND_MODE,
) as outputs_file:
print('is-untagged-devel=true', file=outputs_file)
- name: Mark the build as "release request"
id: request-check
if: github.event_name == 'workflow_dispatch'
run: |
from os import environ
from pathlib import Path
FILE_APPEND_MODE = 'a'
with Path(environ['GITHUB_OUTPUT']).open(
mode=FILE_APPEND_MODE,
) as outputs_file:
print('release-requested=true', file=outputs_file)
- name: Check out src from Git
if: >-
steps.request-check.outputs.release-requested != 'true'
uses: actions/checkout@v3
with:
fetch-depth: >-
${{
steps.request-check.outputs.release-requested == 'true'
&& 1 || 0
}}
ref: ${{ github.event.inputs.release-committish }}
- name: >-
Calculate Python interpreter version hash value
for use in the cache key
if: >-
steps.request-check.outputs.release-requested != 'true'
id: calc-cache-key-py
run: |
from hashlib import sha512
from os import environ
from pathlib import Path
from sys import version
FILE_APPEND_MODE = 'a'
hash = sha512(version.encode()).hexdigest()
with Path(environ['GITHUB_OUTPUT']).open(
mode=FILE_APPEND_MODE,
) as outputs_file:
print(f'py-hash-key={hash}', file=outputs_file)
- name: >-
Calculate dependency files' combined hash value
for use in the cache key
if: >-
steps.request-check.outputs.release-requested != 'true'
id: calc-cache-key-files
run: |
from os import environ
from pathlib import Path
FILE_APPEND_MODE = 'a'
with Path(environ['GITHUB_OUTPUT']).open(
mode=FILE_APPEND_MODE,
) as outputs_file:
print(
"files-hash-key=${{
hashFiles(
'setup.cfg', 'setup.py', 'tox.ini', 'pyproject.toml',
'.pre-commit-config.yaml', 'pytest.ini'
)
}}",
file=outputs_file,
)
- name: Get pip cache dir
id: pip-cache-dir
if: >-
steps.request-check.outputs.release-requested != 'true'
run: >-
echo "dir=$(python -m pip cache dir)" >> "${GITHUB_OUTPUT}"
shell: bash
- name: Set up pip cache
if: >-
steps.request-check.outputs.release-requested != 'true'
uses: actions/cache@v3
with:
path: ${{ steps.pip-cache-dir.outputs.dir }}
key: >-
${{ runner.os }}-pip-${{
steps.calc-cache-key-py.outputs.py-hash-key }}-${{
steps.calc-cache-key-files.outputs.files-hash-key }}
restore-keys: |
${{ runner.os }}-pip-${{
steps.calc-cache-key-py.outputs.py-hash-key
}}-
${{ runner.os }}-pip-
${{ runner.os }}-
- name: Drop Git tags from HEAD for non-release requests
if: >-
steps.request-check.outputs.release-requested != 'true'
run: >-
git tag --points-at HEAD
|
xargs git tag --delete
shell: bash
- name: Set up versioning prerequisites
if: >-
steps.request-check.outputs.release-requested != 'true'
run: >-
python -m
pip install
--user
setuptools-scm
shell: bash
- name: Set the current dist version from Git
if: steps.request-check.outputs.release-requested != 'true'
id: scm-version
run: |
from os import environ
from pathlib import Path
import setuptools_scm
FILE_APPEND_MODE = 'a'
ver = setuptools_scm.get_version(
${{
steps.untagged-check.outputs.is-untagged-devel == 'true'
&& 'local_scheme="no-local-version"' || ''
}}
)
with Path(environ['GITHUB_OUTPUT']).open(
mode=FILE_APPEND_MODE,
) as outputs_file:
print(f'dist-version={ver}', file=outputs_file)
- name: Set the target Git tag
id: git-tag
run: |
from os import environ
from pathlib import Path
FILE_APPEND_MODE = 'a'
with Path(environ['GITHUB_OUTPUT']).open(
mode=FILE_APPEND_MODE,
) as outputs_file:
print(
"tag=v${{
steps.request-check.outputs.release-requested == 'true'
&& github.event.inputs.release-version
|| steps.scm-version.outputs.dist-version
}}",
file=outputs_file,
)
- name: Set the expected dist artifact names
id: artifact-name
run: |
from os import environ
from pathlib import Path
FILE_APPEND_MODE = 'a'
with Path(environ['GITHUB_OUTPUT']).open(
mode=FILE_APPEND_MODE,
) as outputs_file:
print(
"sdist=cheroot-${{
steps.request-check.outputs.release-requested == 'true'
&& github.event.inputs.release-version
|| steps.scm-version.outputs.dist-version
}}.tar.gz",
file=outputs_file,
)
print(
"wheel=cheroot-${{
steps.request-check.outputs.release-requested == 'true'
&& github.event.inputs.release-version
|| steps.scm-version.outputs.dist-version
}}-py3-none-any.whl",
file=outputs_file,
)
build:
name: >-
👷 dists ${{ needs.pre-setup.outputs.git-tag }}
[mode: ${{
fromJSON(needs.pre-setup.outputs.is-untagged-devel)
&& 'nightly' || ''
}}${{
fromJSON(needs.pre-setup.outputs.release-requested)
&& 'release' || ''
}}${{
(
!fromJSON(needs.pre-setup.outputs.is-untagged-devel)
&& !fromJSON(needs.pre-setup.outputs.release-requested)
) && 'test' || ''
}}]
needs:
- pre-setup # transitive, for accessing settings
runs-on: ubuntu-latest
env:
TOXENV: cleanup-dists,build-dists
steps:
- name: Switch to using Python 3.11
uses: actions/setup-python@v4
with:
python-version: 3.11
- name: Grab the source from Git
uses: actions/checkout@v3
with:
fetch-depth: >-
${{
fromJSON(needs.pre-setup.outputs.release-requested)
&& 1 || 0
}}
ref: ${{ github.event.inputs.release-committish }}
- name: >-
Calculate Python interpreter version hash value
for use in the cache key
id: calc-cache-key-py
run: |
from hashlib import sha512
from os import environ
from pathlib import Path
from sys import version
FILE_APPEND_MODE = 'a'
hash = sha512(version.encode()).hexdigest()
with Path(environ['GITHUB_OUTPUT']).open(
mode=FILE_APPEND_MODE,
) as outputs_file:
print(f'py-hash-key={hash}', file=outputs_file)
shell: python
- name: Get pip cache dir
id: pip-cache-dir
run: >-
echo "dir=$(python -m pip cache dir)" >> "${GITHUB_OUTPUT}"
- name: Set up pip cache
uses: actions/cache@v3
with:
path: ${{ steps.pip-cache-dir.outputs.dir }}
key: >-
${{ runner.os }}-pip-${{
steps.calc-cache-key-py.outputs.py-hash-key }}-${{
needs.pre-setup.outputs.cache-key-files }}
restore-keys: |
${{ runner.os }}-pip-${{
steps.calc-cache-key-py.outputs.py-hash-key
}}-
${{ runner.os }}-pip-
- name: Install tox
run: >-
python -m
pip install
--user
'${{ env.TOX_VERSION }}'
- name: Pre-populate the tox env
run: >-
python -m
tox
--parallel auto
--parallel-live
--skip-missing-interpreters false
--notest
- name: Drop Git tags from HEAD for non-tag-create events
if: >-
!fromJSON(needs.pre-setup.outputs.release-requested)
run: >-
git tag --points-at HEAD
|
xargs git tag --delete
shell: bash
- name: Setup git user as [bot]
if: >-
fromJSON(needs.pre-setup.outputs.release-requested)
|| fromJSON(needs.pre-setup.outputs.is-untagged-devel)
uses: fregante/setup-git-user@v2
- name: >-
Tag the release in the local Git repo
as ${{ needs.pre-setup.outputs.git-tag }}
for setuptools-scm to set the desired version
if: >-
fromJSON(needs.pre-setup.outputs.release-requested)
run: >-
git tag
-m '${{ needs.pre-setup.outputs.git-tag }}'
'${{ needs.pre-setup.outputs.git-tag }}'
--
${{
fromJSON(needs.pre-setup.outputs.release-requested)
&& github.event.inputs.release-committish || ''
}}
- name: Install tomlkit Python distribution package
if: >-
fromJSON(needs.pre-setup.outputs.is-untagged-devel)
run: >-
python -m pip install --user tomlkit
- name: Instruct setuptools-scm not to add a local version part
if: >-
fromJSON(needs.pre-setup.outputs.is-untagged-devel)
run: |
from pathlib import Path
import tomlkit
pyproject_toml_path = Path.cwd() / 'pyproject.toml'
pyproject_toml_txt = pyproject_toml_path.read_text()
pyproject_toml = tomlkit.loads(pyproject_toml_txt)
setuptools_scm_section = pyproject_toml['tool']['setuptools_scm']
setuptools_scm_section['local_scheme'] = 'no-local-version'
patched_pyproject_toml_txt = tomlkit.dumps(pyproject_toml)
pyproject_toml_path.write_text(patched_pyproject_toml_txt)
shell: python
- name: Pretend that pyproject.toml is unchanged
if: >-
fromJSON(needs.pre-setup.outputs.is-untagged-devel)
run: |
git diff --color=always
git update-index --assume-unchanged pyproject.toml
- name: Build dists
run: >-
python -m
tox
--parallel auto
--parallel-live
--skip-missing-interpreters false
--skip-pkg-install
- name: Verify that the artifacts with expected names got created
run: >-
ls -1
'dist/${{ needs.pre-setup.outputs.sdist-artifact-name }}'
'dist/${{ needs.pre-setup.outputs.wheel-artifact-name }}'
- name: Store the distribution packages
uses: actions/upload-artifact@v3
with:
name: ${{ env.dists-artifact-name }}
# NOTE: Exact expected file names are specified here
# NOTE: as a safety measure — if anything weird ends
# NOTE: up being in this dir or not all dists will be
# NOTE: produced, this will fail the workflow.
path: |
dist/${{ needs.pre-setup.outputs.sdist-artifact-name }}
dist/${{ needs.pre-setup.outputs.wheel-artifact-name }}
retention-days: >-
${{
(
fromJSON(needs.pre-setup.outputs.release-requested)
) && 90 || 30
}}
lint:
name: 🧹 ${{ matrix.toxenv }}
needs:
- build
- pre-setup # transitive, for accessing settings
runs-on: ${{ matrix.os }}-latest
strategy:
matrix:
os:
- ubuntu
python-version:
- >-
3.10
toxenv:
- pre-commit
- setup-check
- metadata-validation
- build-docs
- doctest-docs
- linkcheck-docs
- spellcheck-docs
fail-fast: false
env:
TOXENV: ${{ matrix.toxenv }}
steps:
- name: >-
Switch to using Python v${{ matrix.python-version }}
by default
uses: actions/setup-python@v4
with:
python-version: ${{ matrix.python-version }}
# NOTE: `pre-commit --show-diff-on-failure` and `sphinxcontrib-spellcheck`
# NOTE: with Git authors allowlist enabled both depend on the presence of a
# NOTE: Git repository.
- name: Grab the source from Git
if: >-
contains(fromJSON('["pre-commit", "spellcheck-docs"]'), matrix.toxenv)
uses: actions/checkout@v3
with:
ref: ${{ github.event.inputs.release-committish }}
- name: Retrieve the project source from an sdist inside the GHA artifact
if: >-
!contains(fromJSON('["pre-commit", "spellcheck-docs"]'), matrix.toxenv)
uses: re-actors/checkout-python-sdist@release/v1
with:
source-tarball-name: ${{ needs.pre-setup.outputs.sdist-artifact-name }}
workflow-artifact-name: ${{ env.dists-artifact-name }}
- name: >-
Calculate Python interpreter version hash value
for use in the cache key
id: calc-cache-key-py
run: |
from hashlib import sha512
from os import environ
from pathlib import Path
from sys import version
FILE_APPEND_MODE = 'a'
hash = sha512(version.encode()).hexdigest()
with Path(environ['GITHUB_OUTPUT']).open(
mode=FILE_APPEND_MODE,
) as outputs_file:
print(f'py-hash-key={hash}', file=outputs_file)
shell: python
- name: Get pip cache dir
id: pip-cache
run: >-
echo "dir=$(pip cache dir)" >> "${GITHUB_OUTPUT}"
- name: Set up pip cache
uses: actions/cache@v3
with:
path: ${{ steps.pip-cache.outputs.dir }}
key: >-
${{ runner.os }}-pip-${{
steps.calc-cache-key-py.outputs.py-hash-key }}-${{
needs.pre-setup.outputs.cache-key-files }}
restore-keys: |
${{ runner.os }}-pip-${{
steps.calc-cache-key-py.outputs.py-hash-key
}}-
${{ runner.os }}-pip-
- name: Install tox
run: >-
python -m
pip install
--user
'${{ env.TOX_VERSION }}'
- name: Make the env clean of non-test files
if: matrix.toxenv == 'metadata-validation'
run: |
shopt -s extglob
rm -rf !tox.ini
shell: bash
- name: Download all the dists
if: matrix.toxenv == 'metadata-validation'
uses: actions/download-artifact@v3
with:
name: ${{ env.dists-artifact-name }}
path: dist/
- name: >-
Pre-populate tox envs: `${{ env.TOXENV }}`
run: >-
python -m
tox
--parallel auto
--parallel-live
--skip-missing-interpreters false
--notest
- name: Initialize pre-commit envs if needed
run: >-
test -d .tox/pre-commit
&& .tox/pre-commit/bin/python -m pre_commit install-hooks
|| :
- name: >-
Run tox envs: `${{ env.TOXENV }}`
run: >-
python -m
tox
--parallel auto
--parallel-live
--skip-missing-interpreters false
--skip-pkg-install
tests:
name: >-
🧪 🐍${{
matrix.python-version
}} @ ${{
matrix.os
}}
needs:
- build
- pre-setup # transitive, for accessing settings
runs-on: ${{ matrix.os }}
strategy:
matrix:
python-version:
# NOTE: The latest and the lowest supported Pythons are prioritized
# NOTE: to improve the responsiveness. It's nice to see the most
# NOTE: important results first.
- 3.11
- >-
3.10
- 3.9
- 3.8
os:
- ubuntu-22.04
- ubuntu-20.04
- macos-11.0
- macos-latest
- windows-2019
- windows-2022
continue-on-error: >-
${{
(
fromJSON(needs.pre-setup.outputs.is-yolo-mode) ||
(
startsWith(matrix.python-version, '~')
) ||
contains(matrix.python-version, 'alpha')
) && true || false
}}
env:
TOXENV: python
steps:
- name: Set up Python ${{ matrix.python-version }}
id: python-install
uses: actions/setup-python@v4
with:
python-version: ${{ matrix.python-version }}
- name: Retrieve the project source from an sdist inside the GHA artifact
uses: re-actors/checkout-python-sdist@release/v1
with:
source-tarball-name: ${{ needs.pre-setup.outputs.sdist-artifact-name }}
workflow-artifact-name: ${{ env.dists-artifact-name }}
- name: Figure out if the interpreter ABI is stable
id: py-abi
run: |
from os import environ
from sys import version_info
FILE_APPEND_MODE = 'a'
is_stable_abi = version_info.releaselevel == 'final'
with open(
environ['GITHUB_OUTPUT'],
mode=FILE_APPEND_MODE,
) as outputs_file:
print(
'is-stable-abi={is_stable_abi}'.
format(is_stable_abi=str(is_stable_abi).lower()),
file=outputs_file,
)
shell: python
- name: >-
Calculate Python interpreter version hash value
for use in the cache key
if: fromJSON(steps.py-abi.outputs.is-stable-abi)
id: calc-cache-key-py
run: |
from hashlib import sha512
from os import environ
from sys import version
FILE_APPEND_MODE = 'a'
hash = sha512(version.encode()).hexdigest()
with open(
environ['GITHUB_OUTPUT'], mode=FILE_APPEND_MODE,
) as outputs_file:
print(f'py-hash-key={hash}', file=outputs_file)
shell: python
- name: Get pip cache dir
if: fromJSON(steps.py-abi.outputs.is-stable-abi)
id: pip-cache-dir
run: >-
echo "dir=$(python -m pip cache dir)" >> "${GITHUB_OUTPUT}"
shell: bash
- name: Set up pip cache
if: fromJSON(steps.py-abi.outputs.is-stable-abi)
uses: actions/cache@v3
with:
path: ${{ steps.pip-cache-dir.outputs.dir }}
key: >-
${{ runner.os }}-pip-${{
steps.calc-cache-key-py.outputs.py-hash-key }}-${{
needs.pre-setup.outputs.cache-key-files }}
restore-keys: |
${{ runner.os }}-pip-${{
steps.calc-cache-key-py.outputs.py-hash-key
}}-
${{ runner.os }}-pip-
- name: Upgrade pip with `requires_python`
run: >-
python -m
pip install
--user
--upgrade
--force-reinstall
pip-with-requires-python
- name: Install tox
run: >-
python -m
pip install
--user
'${{ env.TOX_VERSION }}'
- name: Download all the dists
uses: actions/download-artifact@v3
with:
name: ${{ env.dists-artifact-name }}
path: dist/
- name: >-
Pre-populate tox env:
${{ env.TOXENV }}
run: >-
python -m
tox
--parallel auto
--parallel-live
--skip-missing-interpreters false
--installpkg 'dist/${{ needs.pre-setup.outputs.wheel-artifact-name }}'
--notest
- name: Windows system info
run: systeminfo
if: >-
startsWith(matrix.os, 'windows-')
- name: >-
Log platform.platform()
run: >-
python -m platform
- name: >-
Log platform.version()
run: >-
python -c "import platform;
print(platform.version())"
- name: >-
Log platform.uname()
run: >-
python -c "import platform;
print(platform.uname())"
- name: >-
Log platform.release()
run: >-
python -c "import platform;
print(platform.release())"
- name: Log stdlib OpenSSL version
run: >-
python -c
"import ssl; print('\nOPENSSL_VERSION: '
+ ssl.OPENSSL_VERSION + '\nOPENSSL_VERSION_INFO: '
+ repr(ssl.OPENSSL_VERSION_INFO)
+ '\nOPENSSL_VERSION_NUMBER: '
+ repr(ssl.OPENSSL_VERSION_NUMBER))"
- name: Run the testing
run: >-
python -m
tox
--parallel auto
--parallel-live
--skip-missing-interpreters false
--installpkg 'dist/${{ needs.pre-setup.outputs.wheel-artifact-name }}'
- name: Produce markdown test summary from JUnit
if: >-
!cancelled()
uses: test-summary/[email protected]
with:
paths: .test-results/pytest/test.xml
- name: Check if Cobertura XML coverage files exist
if: >-
!cancelled()
&& runner.os == 'Linux'
id: coverage-files
run: >-
compgen -G '.test-results/pytest/cov.xml'
&& ( echo 'present=true' >> ${GITHUB_OUTPUT} )
;
exit 0
shell: bash
- name: Produce markdown test summary from Cobertura XML
if: steps.coverage-files.outputs.present == 'true'
uses: irongut/[email protected]
with:
badge: true
filename: .test-results/pytest/cov.xml
format: markdown
output: both
# Ref: https://github.com/irongut/CodeCoverageSummary/issues/66
- name: Append coverage results to Job Summary
if: steps.coverage-files.outputs.present == 'true'
run: >-
cat code-coverage-results.md >> "${GITHUB_STEP_SUMMARY}"
- name: Re-run the failing tests with maximum verbosity
if: >-
!cancelled()
&& failure()
run: >- # `exit 1` makes sure that the job remains red with flaky runs
python -m
tox
--parallel auto
--parallel-live
--skip-missing-interpreters false
-vvvvv
--installpkg 'dist/${{ needs.pre-setup.outputs.wheel-artifact-name }}'
--
--no-cov -vvvvv --lf
&& exit 1
shell: bash
- name: Send coverage data to Codecov
if: >-
!cancelled()
uses: codecov/codecov-action@v3
with:
files: .test-results/pytest/cov.xml
flags: >-
CI-GHA,
OS-${{
runner.os
}},
VM-${{
matrix.os
}},
Py-${{
steps.python-install.outputs.python-version
}},
${{ needs.pre-setup.outputs.wheel-artifact-name }}
check: # This job does nothing and is only used for the branch protection
if: always()
needs:
- lint
- pre-setup # transitive, for accessing settings
- tests
runs-on: ubuntu-latest
steps:
- name: Decide whether the needed jobs succeeded or failed
uses: re-actors/alls-green@release/v1
with:
allowed-failures: >-
${{
fromJSON(needs.pre-setup.outputs.is-yolo-mode)
&& 'lint, tests' || ''
}}
jobs: ${{ toJSON(needs) }}
publish-pypi:
name: Publish 🐍📦 ${{ needs.pre-setup.outputs.git-tag }} to PyPI
needs:
- check
- pre-setup # transitive, for accessing settings
if: >-
fromJSON(needs.pre-setup.outputs.release-requested)
runs-on: ubuntu-latest
environment:
name: pypi
url: >-
https://pypi.org/project/cheroot/${{
needs.pre-setup.outputs.dist-version
}}
permissions:
contents: read # This job doesn't need to `git push` anything
id-token: write # PyPI Trusted Publishing (OIDC)
steps:
- name: Download all the dists
uses: actions/download-artifact@v3
with:
name: ${{ env.dists-artifact-name }}
path: dist/
- name: >-
Publish 🐍📦 ${{ needs.pre-setup.outputs.git-tag }} to PyPI
uses: pypa/gh-action-pypi-publish@release/v1
publish-testpypi:
name: Publish 🐍📦 ${{ needs.pre-setup.outputs.git-tag }} to TestPyPI
needs:
- check
- pre-setup # transitive, for accessing settings
if: >-
fromJSON(needs.pre-setup.outputs.is-untagged-devel)
|| fromJSON(needs.pre-setup.outputs.release-requested)
runs-on: ubuntu-latest
environment:
name: testpypi
url: >-
https://test.pypi.org/project/cheroot/${{
needs.pre-setup.outputs.dist-version
}}
permissions:
contents: read # This job doesn't need to `git push` anything
id-token: write # PyPI Trusted Publishing (OIDC)
steps:
- name: Download all the dists
uses: actions/download-artifact@v3
with:
name: ${{ env.dists-artifact-name }}
path: dist/
- name: >-
Publish 🐍📦 ${{ needs.pre-setup.outputs.git-tag }} to TestPyPI
uses: pypa/gh-action-pypi-publish@release/v1
with:
repository-url: https://test.pypi.org/legacy/
post-release-repo-update:
name: >-
Publish post-release Git tag
for ${{ needs.pre-setup.outputs.git-tag }}
needs:
- publish-pypi
- pre-setup # transitive, for accessing settings
runs-on: ubuntu-latest
steps:
- name: >-
Check if the requested tag ${{ needs.pre-setup.outputs.git-tag }}
is present and is pointing at the required commit ${{
github.event.inputs.release-committish
}}
id: existing-remote-tag-check
run: |
REMOTE_TAGGED_COMMIT_SHA="$(
git ls-remote --tags --refs $(git remote get-url origin) '${{
needs.pre-setup.outputs.git-tag
}}' | awk '{print $1}'
)"
if [[ "${REMOTE_TAGGED_COMMIT_SHA}" == '${{
github.event.inputs.release-committish
}}' ]]
then
echo "already-exists=true" >> "${GITHUB_OUTPUT}"
fi
- name: Fetch the src snapshot
if: steps.existing-remote-tag-check.outputs.already-exists == 'true'
uses: actions/checkout@v3
with:
fetch-depth: 1
ref: ${{ github.event.inputs.release-committish }}
- name: Setup git user as [bot]
if: steps.existing-remote-tag-check.outputs.already-exists == 'true'
uses: fregante/[email protected]
- name: >-
Tag the release in the local Git repo
as ${{ needs.pre-setup.outputs.git-tag }}
if: steps.existing-remote-tag-check.outputs.already-exists == 'true'
run: >-
git tag
-m '${{ needs.pre-setup.outputs.git-tag }}'
-m 'Published at https://pypi.org/project/cheroot/${{
needs.pre-setup.outputs.dist-version
}}'
-m 'This release has been produced by the following workflow run: ${{
github.server_url
}}/${{
github.repository
}}/actions/runs/${{
github.run_id
}}'
'${{ needs.pre-setup.outputs.git-tag }}'
--
${{ github.event.inputs.release-committish }}
- name: >-
Push ${{ needs.pre-setup.outputs.git-tag }} tag corresponding
to the just published release back to GitHub
if: steps.existing-remote-tag-check.outputs.already-exists == 'true'
run: >-
git push --atomic origin '${{ needs.pre-setup.outputs.git-tag }}'
publish-github-release:
name: >-
Publish a GitHub Release for
${{ needs.pre-setup.outputs.git-tag }}
needs:
- post-release-repo-update
- pre-setup # transitive, for accessing settings
runs-on: ubuntu-latest
permissions:
contents: write
discussions: write
steps:
- name: Download all the dists
uses: actions/download-artifact@v3
with:
name: ${{ env.dists-artifact-name }}
path: dist/
- name: >-
Publish a GitHub Release for
${{ needs.pre-setup.outputs.git-tag }}
uses: ncipollo/[email protected]
with:
allowUpdates: false
artifactErrorsFailBuild: false
artifacts: |
dist/${{ needs.pre-setup.outputs.sdist-artifact-name }}
dist/${{ needs.pre-setup.outputs.wheel-artifact-name }}
artifactContentType: raw # Because whl and tgz are of different types
body: >
# Release ${{ needs.pre-setup.outputs.git-tag }}
This release is published to
https://pypi.org/project/cheroot/${{
needs.pre-setup.outputs.dist-version
}}.
This release has been produced by the following workflow run: ${{
github.server_url
}}/${{
github.repository
}}/actions/runs/${{
github.run_id
}}.
# bodyFile: # FIXME: Use once Towncrier is integrated.
commit: ${{ github.event.inputs.release-committish }}
discussionCategory: Announcements
draft: false
name: ${{ needs.pre-setup.outputs.git-tag }}
# omitBody: false
omitBodyDuringUpdate: true
omitName: false
omitNameDuringUpdate: true
omitPrereleaseDuringUpdate: true
prerelease: false
removeArtifacts: false
replacesArtifacts: false
tag: ${{ needs.pre-setup.outputs.git-tag }}
token: ${{ secrets.GITHUB_TOKEN }}
...