Skip to content

[ATO-1493] Send telemetry only if there is License present 3.6 #30013

[ATO-1493] Send telemetry only if there is License present 3.6

[ATO-1493] Send telemetry only if there is License present 3.6 #30013

name: Continuous Integration
on:
push:
branches:
- main
tags:
- "*"
pull_request:
concurrency:
group: continous-integration-${{ github.ref }} # branch or tag name
cancel-in-progress: true
# SECRETS
# - GH_RELEASE_NOTES_TOKEN: personal access token of `rasabot` github account
# (login for account in 1pw)
# - SLACK_WEBHOOK_TOKEN: token to post to RasaHQ slack account (in 1password)
# - PYPI_TOKEN: publishing token for amn41 account, needs to be maintainer of
# RasaHQ/rasa on pypi (account credentials in 1password)
# - DOCKERHUB_PASSWORD: password for an account with write access to the rasa
# repo on hub.docker.com. used to pull and upload containers
# - RASA_OSS_TELEMETRY_WRITE_KEY: key to write to segment. Used to report telemetry.
# The key will be added to the distributions
# - RASA_OSS_EXCEPTION_WRITE_KEY: key to write to sentry. Used to report exceptions.
# The key will be added to the distributions.
# Key can be found at https://sentry.io/settings/rasahq/projects/rasa-open-source/install/python/
# - SENTRY_AUTH_TOKEN: authentication used to tell Sentry about any new releases
# created at https://sentry.io/settings/account/api/auth-tokens/
env:
# needed to fix issues with boto during testing:
# https://github.com/travis-ci/travis-ci/issues/7940
BOTO_CONFIG: /dev/null
IS_TAG_BUILD: ${{ startsWith(github.event.ref, 'refs/tags') }}
DOCKERHUB_USERNAME: tmbo
DEFAULT_PYTHON_VERSION: "3.10"
# for wait_for_xx jobs
WAIT_TIMEOUT_SECS: 3000
WAIT_INTERVAL_SECS: 60
jobs:
changes:
name: Check for file changes
runs-on: ubuntu-22.04
outputs:
# Both of the outputs below are strings but only one exists at any given time
backend: ${{ steps.changed-files.outputs.backend || steps.run-all.outputs.backend }}
docker: ${{ steps.changed-files.outputs.docker || steps.run-all.outputs.docker }}
docs: ${{ steps.changed-files.outputs.docs || steps.run-all.outputs.docs }}
is_pre_release_version: ${{ steps.rasa_check_version_type.outputs.is_pre_release_version }}
steps:
- uses: actions/checkout@ac593985615ec2ede58e132d2e21d2b1cbd6127c
- uses: dorny/paths-filter@4512585405083f25c027a35db413c2b3b9006d50
# Run the normal filters if the all-tests-required label is not set
id: changed-files
if: contains(github.event.pull_request.labels.*.name, 'status:all-tests-required') == false && github.event_name == 'pull_request'
with:
token: ${{ secrets.GITHUB_TOKEN }}
filters: .github/change_filters.yml
- name: Set all filters to true if all tests are required
# Set all filters to true if the all-tests-required label is set or if we are not in a PR
# Bypasses all the change filters in change_filters.yml and forces all outputs to true
id: run-all
if: contains(github.event.pull_request.labels.*.name, 'status:all-tests-required') || github.event_name != 'pull_request'
run: |
echo "backend=true" >> $GITHUB_OUTPUT
echo "docker=true" >> $GITHUB_OUTPUT
echo "docs=true" >> $GITHUB_OUTPUT
- name: Check if tag version is a pre release version
id: rasa_check_version_type
if: env.IS_TAG_BUILD == 'true'
run: |
# Get current tagged Rasa version
CURRENT_TAG=${GITHUB_REF#refs/tags/}
if [[ "$CURRENT_TAG" =~ ^[0-9.]+$ ]]; then
echo "is_pre_release_version=false" >> $GITHUB_OUTPUT
else
echo "is_pre_release_version=true" >> $GITHUB_OUTPUT
fi
wait_for_docs_tests:
# Looks for doc test workflows and waits for it to complete successfully
# Runs on pushes to main exclusively
name: Wait for docs tests
if: github.ref_type != 'tag'
runs-on: ubuntu-22.04
needs: [changes]
steps:
- name: Wait for doc tests
uses: fountainhead/action-wait-for-check@297be350cf8393728ea4d4b39435c7d7ae167c93
id: wait-for-doc-tests
with:
token: ${{ secrets.GITHUB_TOKEN }}
checkName: Test Documentation
ref: ${{ github.event.pull_request.head.sha || github.sha }}
timeoutSeconds: ${{ env.WAIT_TIMEOUT_SECS }}
intervalSeconds: ${{ env.WAIT_INTERVAL_SECS }}
- name: Fail the step if the doc tests run could not be found
if: ${{ steps.wait-for-doc-tests.outputs.conclusion == 'timed_out' }}
run: |
echo "Could not find the doc tests run."
exit 1
quality:
name: Code Quality
if: github.ref_type != 'tag'
runs-on: ubuntu-22.04
needs: [changes]
steps:
- name: Checkout git repository 🕝
if: needs.changes.outputs.backend == 'true'
uses: actions/checkout@ac593985615ec2ede58e132d2e21d2b1cbd6127c
- name: Set up Python ${{ env.DEFAULT_PYTHON_VERSION }} 🐍
if: needs.changes.outputs.backend == 'true'
uses: actions/setup-python@57ded4d7d5e986d7296eab16560982c6dd7c923b
with:
python-version: ${{ env.DEFAULT_PYTHON_VERSION }}
- name: Read Poetry Version 🔢
if: needs.changes.outputs.backend == 'true'
run: |
echo "POETRY_VERSION=$(scripts/poetry-version.sh)" >> $GITHUB_ENV
shell: bash
- name: Install poetry 🦄
if: needs.changes.outputs.backend == 'true'
uses: Gr1N/setup-poetry@15821dc8a61bc630db542ae4baf6a7c19a994844 # v8
with:
poetry-version: ${{ env.POETRY_VERSION }}
- name: Load Poetry Cached Libraries ⬇
id: cache-poetry
if: needs.changes.outputs.backend == 'true'
uses: actions/cache@88522ab9f39a2ea568f7027eddc7d8d8bc9d59c8
with:
path: .venv
key: ${{ runner.os }}-poetry-${{ env.POETRY_VERSION }}-${{ env.DEFAULT_PYTHON_VERSION }}-${{ hashFiles('**/poetry.lock') }}-${{ secrets.POETRY_CACHE_VERSION }}
restore-keys: ${{ runner.os }}-poetry-${{ env.DEFAULT_PYTHON_VERSION }}
- name: Clear Poetry cache
if: steps.cache-poetry.outputs.cache-hit == 'true' && needs.changes.outputs.backend == 'true' && contains(github.event.pull_request.labels.*.name, 'tools:clear-poetry-cache-unit-tests')
run: rm -r .venv
- name: Create virtual environment
if: (steps.cache-poetry.outputs.cache-hit != 'true' || contains(github.event.pull_request.labels.*.name, 'tools:clear-poetry-cache-unit-tests')) && needs.changes.outputs.backend == 'true'
run: python -m venv create .venv
- name: Set up virtual environment
if: needs.changes.outputs.backend == 'true'
run: poetry config virtualenvs.in-project true
- name: Install Dependencies 📦
if: needs.changes.outputs.backend == 'true'
# Poetry intermittently fails to install dependency if it is not PEP 517 compliant
# This is a workaround for that issue
run: |
sudo apt-get -y install libpq-dev
make install-full || make install-full || make install-full
- name: Checkout target branch to be able to diff
if: needs.changes.outputs.backend == 'true' && github.event_name == 'pull_request'
run: |
git fetch origin ${{ github.base_ref }}
echo "DOCSTRING_DIFF_BRANCH=origin/${{ github.base_ref }}" >> $GITHUB_ENV
# Fetch entire history for current branch so that `make lint-docstrings`
# can calculate the proper diff between the branches
git fetch --unshallow origin "${{ github.ref }}"
- name: Add github workflow problem matchers
if: needs.changes.outputs.backend == 'true'
run: |
echo "::add-matcher::.github/matchers/flake8-error-matcher.json"
- name: Lint Code 🎎
if: needs.changes.outputs.backend == 'true'
run: |
# If it's not a pull request, $DOCSTRING_DIFF_BRANCH is unset.
# This will result in an empty diff, which effictively means that
# make lint-docstrings will be skipped for other events than `pull_request`
make lint BRANCH=$DOCSTRING_DIFF_BRANCH
- name: Check Types 📚
if: needs.changes.outputs.backend == 'true'
run: make types
- name: Lint Changelog Filenames 📝
if: needs.changes.outputs.backend == 'true' && github.event_name == 'pull_request'
run: make lint-changelog
- name: Test CLI 🖥
if: needs.changes.outputs.backend == 'true'
# makes sure we catch any dependency error early. they will create strange
# errors during the docs build, so easier to catch them early on by
# trying to run the `rasa` command once before the docs build.
run: poetry run rasa --help
changelog:
name: Check for changelog
runs-on: ubuntu-22.04
steps:
- name: Checkout git repository 🕝
uses: actions/checkout@ac593985615ec2ede58e132d2e21d2b1cbd6127c
- name: Assert release includes all changelog entries
# check changelog folder only when we create pull request preparing release
if: github.event_name == 'pull_request' && startsWith(github.head_ref, 'prepare-release') && needs.changes.outputs.is_pre_release_version == 'false'
working-directory: changelog
run: |
# List all unexpected files in changelog/
UNEXPECTED_FILES=$(ls -A --ignore={"README.md",".gitignore","_template.md.jinja2"})
# Exit with error if found any unexpected files
[[ "$UNEXPECTED_FILES" ]] && \
echo "Found the following unexpected files in changelogs/" && \
echo "$UNEXPECTED_FILES" && \
exit 1 || \
echo "Release includes all changelog entries."
test:
name: Run Tests
if: github.ref_type != 'tag'
runs-on: ${{ matrix.os }}
timeout-minutes: 60
needs: [changes]
strategy:
fail-fast: false
matrix:
test:
- test-cli
- test-core-featurizers
- test-policies
- test-nlu-featurizers
- test-nlu-predictors
- test-full-model-training
- test-other-unit-tests
- test-performance
os: [ubuntu-22.04, windows-2019]
python-version: [3.8, 3.9, "3.10"]
steps:
- name: Run DataDog Agent
if: needs.changes.outputs.backend == 'true' && (matrix.os != 'windows-2019' || contains(github.event.pull_request.labels.*.name, 'tools:datadog-windows'))
run: |
docker run --name dd_agent -p 8126:8126 -d -e "DD_API_KEY=${{ secrets.DD_API_KEY }}" -e "DD_INSIDE_CI=true" -e "DD_HOSTNAME=none" -e "DD_SITE=datadoghq.eu" -e GITHUB_ACTIONS=true -e CI=true datadog/agent:latest
docker ps --all --filter name=dd_agent --filter status=running --no-trunc --format "{{.ID}} {{.Status}}"
docker port dd_agent
- name: Checkout git repository 🕝
if: needs.changes.outputs.backend == 'true'
uses: actions/checkout@ac593985615ec2ede58e132d2e21d2b1cbd6127c
- name: Set up Python ${{ matrix.python-version }} 🐍
if: needs.changes.outputs.backend == 'true'
uses: actions/setup-python@57ded4d7d5e986d7296eab16560982c6dd7c923b
with:
python-version: ${{ matrix.python-version }}
- name: Read Poetry Version 🔢
if: needs.changes.outputs.backend == 'true'
run: |
echo "POETRY_VERSION=$(scripts/poetry-version.sh)" >> $GITHUB_ENV
shell: bash
- name: Install poetry 🦄
if: needs.changes.outputs.backend == 'true'
uses: Gr1N/setup-poetry@15821dc8a61bc630db542ae4baf6a7c19a994844 # v8
with:
poetry-version: ${{ env.POETRY_VERSION }}
- name: Load Poetry Cached Libraries ⬇
id: cache-poetry
if: needs.changes.outputs.backend == 'true'
uses: actions/cache@88522ab9f39a2ea568f7027eddc7d8d8bc9d59c8
with:
path: .venv
key: ${{ runner.os }}-poetry-${{ env.POETRY_VERSION }}-${{ matrix.python-version }}-${{ hashFiles('**/poetry.lock') }}-venv-${{ secrets.POETRY_CACHE_VERSION }}-${{ env.pythonLocation }}
- name: Clear Poetry cache
if: steps.cache-poetry.outputs.cache-hit == 'true' && needs.changes.outputs.backend == 'true' && contains(github.event.pull_request.labels.*.name, 'tools:clear-poetry-cache-unit-tests')
run: rm -r .venv
# Poetry >= 1.1.0b uses virtualenv to create a virtual environment.
# The virtualenv simply doesn't work on Windows with our setup,
# that's why we use venv to create virtual environment
- name: Create virtual environment
if: (steps.cache-poetry.outputs.cache-hit != 'true' || contains(github.event.pull_request.labels.*.name, 'tools:clear-poetry-cache-unit-tests')) && needs.changes.outputs.backend == 'true'
run: python -m venv create .venv
- name: Set up virtual environment
if: needs.changes.outputs.backend == 'true'
# Poetry on Windows cannot pick up the virtual environments directory properly,
# and it creates a new one every time the pipeline runs.
# This step solves this problem — it tells poetry to always use `.venv` directory inside
# the project itself, which also makes it easier for us to determine the correct directory
# that needs to be cached.
run: poetry config virtualenvs.in-project true
- name: Install Dependencies (Linux) 📦
if: needs.changes.outputs.backend == 'true' && matrix.os == 'ubuntu-22.04'
# Poetry intermittently fails to install dependency if it is not PEP 517 compliant
# This is a workaround for that issue
run: |
sudo apt-get -y install libpq-dev
make install-full | tee .output || make install-full | tee .output || make install-full | tee .output
if grep 'The lock file is not up to date' .output; then exit 1; fi
make prepare-tests-ubuntu
- name: Install Dependencies (Windows) 📦
if: needs.changes.outputs.backend == 'true' && matrix.os == 'windows-2019'
# Restoring cache doesn't work properly on Windows due to symlinks.
# We create symlinks for spacy models, that's why we need to clean them up
# before caching the dependencies directory.
# More information: https://github.com/actions/cache/issues/120
# Poetry intermittently fails to install dependency if it is not PEP 517 compliant
# This is a workaround for that issue
run: |
$spacy_data_dir = ".venv\lib\site-packages\spacy\data"
if (Test-Path $spacy_data_dir) {
Get-ChildItem -Force -ErrorAction Stop $spacy_data_dir | Where-Object { if($_.Attributes -match "ReparsePoint"){$_.Delete()} }
Remove-Item -Force -Recurse $spacy_data_dir
New-Item -Path $spacy_data_dir -Type Directory
}
make install-full || make install-full || make install-full
make prepare-tests-windows-gha
- name: Add github workflow problem matchers
if: needs.changes.outputs.backend == 'true' && matrix.python-version == 3.7 && matrix.os == 'ubuntu-22.04'
# only annotate based on test runs on ubuntu: otherwise
# all errors will be duplicated for each python / os combination
# therefore, we only enable for the one where most tests are run
# (tests will still run in other envs, they will just not create annotations)
run: pip install pytest-github-actions-annotate-failures
- name: Disable "LongPathsEnabled" option on Windows
if: matrix.os == 'windows-2019'
# On Windows laptops, a default preset prevents path names from being longer than
# 260 characters. Some of our users can't enable this setting due to company policies.
# We implemented a fix for model storage. The Windows container in GitHub
# comes with the setting enabled, so we disable it here in order to ensure our tests
# are running in an environment where long path names are prevented.
run: |
(Get-ItemProperty "HKLM:System\CurrentControlSet\Control\FileSystem").LongPathsEnabled
Set-ItemProperty 'HKLM:\System\CurrentControlSet\Control\FileSystem' -Name 'LongPathsEnabled' -value 0
- name: Install ddtrace
if: needs.changes.outputs.backend == 'true'
run: poetry run pip install -U ddtrace
- name: Test Code 🔍 (multi-process)
if: needs.changes.outputs.backend == 'true'
env:
JOBS: 2
PYTHONIOENCODING: "utf-8"
DD_ENV: ${{ matrix.test }}
DD_SERVICE: rasa
DD_ARGS: --ddtrace --ddtrace-patch-all
run: |
make ${{ matrix.test }}
if [[ "${{ matrix.os }}" != "windows-2019" ]]; then
mv .coverage ${{ github.workspace }}/${{ matrix.test }}-coverage
fi
shell: bash # bash shell is a way to make code run for both Linux and Windows
- name: Store coverage reports
if: needs.changes.outputs.backend == 'true' && matrix.os == 'ubuntu-22.04'
uses: actions/upload-artifact@0b7f8abb1508181956e8e162db84b466c27e18ce
with:
name: ${{ matrix.test }}-coverage
path: |
${{ github.workspace }}/${{ matrix.test }}-coverage
test-flaky:
name: Run Flaky Tests
if: github.ref_type != 'tag'
runs-on: ${{ matrix.os }}
timeout-minutes: 60
needs: [changes]
strategy:
fail-fast: false
matrix:
os: [ubuntu-22.04, windows-2019]
python-version: [3.8, 3.9, "3.10"]
steps:
- name: Run DataDog Agent
if: needs.changes.outputs.backend == 'true' && (matrix.os != 'windows-2019' || contains(github.event.pull_request.labels.*.name, 'tools:datadog-windows'))
run: |
docker run --name dd_agent -p 8126:8126 -d -e "DD_API_KEY=${{ secrets.DD_API_KEY }}" -e "DD_INSIDE_CI=true" -e "DD_HOSTNAME=none" -e "DD_SITE=datadoghq.eu" -e GITHUB_ACTIONS=true -e CI=true datadog/agent:latest
docker ps --all --filter name=dd_agent --filter status=running --no-trunc --format "{{.ID}} {{.Status}}"
docker port dd_agent
- name: Checkout git repository 🕝
if: needs.changes.outputs.backend == 'true'
uses: actions/checkout@ac593985615ec2ede58e132d2e21d2b1cbd6127c
- name: Set up Python ${{ matrix.python-version }} 🐍
if: needs.changes.outputs.backend == 'true'
uses: actions/setup-python@57ded4d7d5e986d7296eab16560982c6dd7c923b
with:
python-version: ${{ matrix.python-version }}
- name: Read Poetry Version 🔢
if: needs.changes.outputs.backend == 'true'
run: |
echo "POETRY_VERSION=$(scripts/poetry-version.sh)" >> $GITHUB_ENV
shell: bash
- name: Install poetry 🦄
if: needs.changes.outputs.backend == 'true'
uses: Gr1N/setup-poetry@15821dc8a61bc630db542ae4baf6a7c19a994844 # v8
with:
poetry-version: ${{ env.POETRY_VERSION }}
- name: Load Poetry Cached Libraries ⬇
id: cache-poetry
if: needs.changes.outputs.backend == 'true'
uses: actions/cache@88522ab9f39a2ea568f7027eddc7d8d8bc9d59c8
with:
path: .venv
key: ${{ runner.os }}-poetry-${{ env.POETRY_VERSION }}-${{ matrix.python-version }}-${{ hashFiles('**/poetry.lock') }}-venv-${{ secrets.POETRY_CACHE_VERSION }}-${{ env.pythonLocation }}
- name: Clear Poetry cache
if: steps.cache-poetry.outputs.cache-hit == 'true' && needs.changes.outputs.backend == 'true' && contains(github.event.pull_request.labels.*.name, 'tools:clear-poetry-cache-unit-tests')
run: rm -r .venv
# Poetry >= 1.1.0b uses virtualenv to create a virtual environment.
# The virtualenv simply doesn't work on Windows with our setup,
# that's why we use venv to create virtual environment
- name: Create virtual environment
if: (steps.cache-poetry.outputs.cache-hit != 'true' || contains(github.event.pull_request.labels.*.name, 'tools:clear-poetry-cache-unit-tests')) && needs.changes.outputs.backend == 'true'
run: python -m venv create .venv
- name: Set up virtual environment
if: needs.changes.outputs.backend == 'true'
# Poetry on Windows cannot pick up the virtual environments directory properly,
# and it creates a new one every time the pipeline runs.
# This step solves this problem — it tells poetry to always use `.venv` directory inside
# the project itself, which also makes it easier for us to determine the correct directory
# that needs to be cached.
run: poetry config virtualenvs.in-project true
- name: Install Dependencies (Linux) 📦
if: needs.changes.outputs.backend == 'true' && matrix.os == 'ubuntu-22.04'
run: |
sudo apt-get -y install libpq-dev
make install-full | tee .output
if grep 'The lock file is not up to date' .output; then exit 1; fi
make prepare-tests-ubuntu
- name: Install Dependencies (Windows) 📦
if: needs.changes.outputs.backend == 'true' && matrix.os == 'windows-2019'
# Restoring cache doesn't work properly on Windows due to symlinks.
# We create symlinks for spacy models, that's why we need to clean them up
# before caching the dependencies' directory.
# More information: https://github.com/actions/cache/issues/120
run: |
$spacy_data_dir = ".venv\lib\site-packages\spacy\data"
if (Test-Path $spacy_data_dir) {
Get-ChildItem -Force -ErrorAction Stop $spacy_data_dir | Where-Object { if($_.Attributes -match "ReparsePoint"){$_.Delete()} }
Remove-Item -Force -Recurse $spacy_data_dir
New-Item -Path $spacy_data_dir -Type Directory
}
make install-full
make prepare-tests-windows-gha
- name: Add github workflow problem matchers
if: needs.changes.outputs.backend == 'true' && matrix.python-version == 3.7 && matrix.os == 'ubuntu-22.04'
# only annotate based on test runs on ubuntu: otherwise
# all errors will be duplicated for each python / os combination
# therefore, we only enable for the one where most tests are run
# (tests will still run in other envs, they will just not create annotations)
run: pip install pytest-github-actions-annotate-failures
- name: Disable "LongPathsEnabled" option on Windows
if: matrix.os == 'windows-2019'
# On Windows laptops, a default preset prevents path names from being longer than
# 260 characters. Some of our users can't enable this setting due to company policies.
# We implemented a fix for model storage. The Windows container in GitHub
# comes with the setting enabled, so we disable it here in order to ensure our tests
# are running in an environment where long path names are prevented.
run: |
(Get-ItemProperty "HKLM:System\CurrentControlSet\Control\FileSystem").LongPathsEnabled
Set-ItemProperty 'HKLM:\System\CurrentControlSet\Control\FileSystem' -Name 'LongPathsEnabled' -value 0
- name: Install ddtrace
if: needs.changes.outputs.backend == 'true'
run: poetry run pip install -U ddtrace
- name: Test Code 🔍 (multi-process)
if: needs.changes.outputs.backend == 'true'
env:
JOBS: 2
PYTHONIOENCODING: "utf-8"
DD_ENV: test-flaky
DD_SERVICE: rasa
DD_ARGS: --ddtrace --ddtrace-patch-all
run: |
make test-flaky
if [[ "${{ matrix.os }}" != "windows-2019" ]]; then
mv .coverage ${{ github.workspace }}/test-flaky-coverage
fi
shell: bash # bash shell is a way to make code run for both Linux and Windows
- name: Store coverage reports
if: needs.changes.outputs.backend == 'true' && matrix.os == 'ubuntu-22.04'
uses: actions/upload-artifact@0b7f8abb1508181956e8e162db84b466c27e18ce
with:
name: ${{ matrix.test }}-coverage
path: |
${{ github.workspace }}/${{ matrix.test }}-coverage
upload_coverage_reports:
name: Upload coverage reports to codeclimate
if: github.ref_type != 'tag'
runs-on: ubuntu-22.04
# Always upload results even if tests failed
needs:
- test
- changes
steps:
- name: Checkout git repository 🕝
if: needs.changes.outputs.backend == 'true'
uses: actions/checkout@ac593985615ec2ede58e132d2e21d2b1cbd6127c
- name: Set up Python 3.10 🐍
uses: actions/setup-python@57ded4d7d5e986d7296eab16560982c6dd7c923b
with:
python-version: "3.10"
- name: Get backend coverage reports
if: needs.changes.outputs.backend == 'true'
uses: actions/download-artifact@9bc31d5ccc31df68ecc42ccf4149144866c47d8a
with:
path: ${{ github.workspace }}/tests_coverage
- name: Merge all reports
if: needs.changes.outputs.backend == 'true'
run: |
subs=`ls ${{ github.workspace }}/tests_coverage`
download_dir="${{ github.workspace }}/tests_coverage"
final_dir="${{ github.workspace }}/tests_coverage/final"
# Downloaded artifacts go into folders, gotta extract them all into one folder for upload
mkdir "${final_dir}/"
for i in $subs; do
mv "${download_dir}/$i"/* "${final_dir}/"
done
pip install coverage
coverage combine "${final_dir}/"*
coverage xml
- name: Upload reports to codeclimate
if: needs.changes.outputs.backend == 'true'
uses: paambaati/codeclimate-action@b649ad206d2e83dafb9ed130deba698aa1b41d78
env:
CC_TEST_REPORTER_ID: ${{ secrets.CODECLIMATE_REPORTER_ID }}
with:
coverageLocations: |
${{ github.workspace }}/coverage.xml:coverage.py
debug: true
integration_test:
name: Run Non-Sequential Integration Tests
if: github.ref_type != 'tag'
runs-on: ubuntu-22.04
timeout-minutes: 60
needs: [changes]
env:
REDIS_HOST: localhost
REDIS_PORT: 6379
POSTGRES_HOST: localhost
POSTGRES_PORT: 5432
POSTGRES_USER: postgres
POSTGRES_PASSWORD: postgres
RABBITMQ_HOST: localhost
RABBITMQ_PORT: 5672
RABBITMQ_USER: guest
RABBITMQ_PASSWORD: guest
services:
redis:
image: redis:6
# Set health checks to wait until redis has started
options: >-
--health-cmd "redis-cli ping"
--health-interval 10s
--health-timeout 5s
--health-retries 5
ports:
# FIXME: cannot use ${{ env.REDIS_PORT }} here
# mapping container ports to the host
- 6379:6379
postgres:
image: postgres:13
# Set health checks to wait until postgres has started
options: >-
--health-cmd pg_isready
--health-interval 10s
--health-timeout 5s
--health-retries 5
env:
# postgres image requires password to be set
POSTGRES_PASSWORD: ${{ env.POSTGRES_PASSWORD }}
ports:
# FIXME: cannot use ${{ env.POSTGRES_PORT }} here
# mapping container ports to the host
- 5432:5432
rabbitmq:
# see https://github.com/docker-library/healthcheck/blob/master/rabbitmq/docker-healthcheck
image: healthcheck/rabbitmq
ports:
- 5672:5672
mongodb:
image: mongodb/mongodb-community-server:6.0.4-ubuntu2204
options: >-
--health-cmd "echo 'db.runCommand("ping").ok' | mongosh --quiet"
--health-interval 10s
--health-timeout 5s
--health-retries 5
ports:
- 27017:27017
steps:
- name: Checkout git repository 🕝
if: needs.changes.outputs.backend == 'true'
uses: actions/checkout@ac593985615ec2ede58e132d2e21d2b1cbd6127c
- name: Set up Python ${{ env.DEFAULT_PYTHON_VERSION }} 🐍
if: needs.changes.outputs.backend == 'true'
uses: actions/setup-python@57ded4d7d5e986d7296eab16560982c6dd7c923b
with:
python-version: ${{ env.DEFAULT_PYTHON_VERSION }}
- name: Read Poetry Version 🔢
if: needs.changes.outputs.backend == 'true'
run: |
echo "POETRY_VERSION=$(scripts/poetry-version.sh)" >> $GITHUB_ENV
shell: bash
- name: Install poetry 🦄
if: needs.changes.outputs.backend == 'true'
uses: Gr1N/setup-poetry@15821dc8a61bc630db542ae4baf6a7c19a994844 # v8
with:
poetry-version: ${{ env.POETRY_VERSION }}
- name: Load Poetry Cached Libraries ⬇
id: cache-poetry
if: needs.changes.outputs.backend == 'true'
uses: actions/cache@88522ab9f39a2ea568f7027eddc7d8d8bc9d59c8
with:
path: .venv
key: ${{ runner.os }}-poetry-${{ env.POETRY_VERSION }}-${{ env.DEFAULT_PYTHON_VERSION }}-${{ hashFiles('**/poetry.lock') }}-venv-${{ secrets.POETRY_CACHE_VERSION }}-${{ env.pythonLocation }}
- name: Clear Poetry cache
if: steps.cache-poetry.outputs.cache-hit == 'true' && needs.changes.outputs.backend == 'true' && contains(github.event.pull_request.labels.*.name, 'tools:clear-poetry-cache-unit-tests')
run: rm -r .venv
# Poetry >= 1.1.0b uses virtualenv to create a virtual environment.
# The virtualenv simply doesn't work on Windows with our setup,
# that's why we use venv to create virtual environment
- name: Create virtual environment
if: (steps.cache-poetry.outputs.cache-hit != 'true' || contains(github.event.pull_request.labels.*.name, 'tools:clear-poetry-cache-unit-tests')) && needs.changes.outputs.backend == 'true'
run: python -m venv create .venv
- name: Set up virtual environment
if: needs.changes.outputs.backend == 'true'
# Poetry on Windows cannot pick up the virtual environments directory properly,
# and it creates a new one every time the pipeline runs.
# This step solves this problem — it tells poetry to always use `.venv` directory inside
# the project itself, which also makes it easier for us to determine the correct directory
# that needs to be cached.
run: poetry config virtualenvs.in-project true
- name: Install Dependencies (Linux) 📦
if: needs.changes.outputs.backend == 'true'
run: |
sudo apt-get -y install libpq-dev
make install-full | tee .output
if grep 'The lock file is not up to date' .output; then exit 1; fi
make prepare-tests-ubuntu
- name: Run kafka and zookeeper containers for integration testing
if: needs.changes.outputs.backend == 'true'
run: |
docker-compose -f tests_deployment/docker-compose.kafka.yml up -d
- name: Test Code with Services 🩺
if: needs.changes.outputs.backend == 'true'
env:
JOBS: 2
INTEGRATION_TEST_PYTEST_MARKERS: '"not sequential"'
PYTHONIOENCODING: "utf-8"
run: |
make test-integration
sequential_integration_test:
name: Run Sequential Integration Tests
if: github.ref_type != 'tag'
runs-on: ubuntu-20.04
timeout-minutes: 60
needs: [changes]
env:
POSTGRES_HOST: localhost
POSTGRES_PORT: 5432
POSTGRES_USER: postgres
POSTGRES_PASSWORD: postgres
services:
postgres:
image: postgres:13
# Set health checks to wait until postgres has started
options: >-
--health-cmd pg_isready
--health-interval 10s
--health-timeout 5s
--health-retries 5
env:
# postgres image requires password to be set
POSTGRES_PASSWORD: ${{ env.POSTGRES_PASSWORD }}
ports:
# FIXME: cannot use ${{ env.POSTGRES_PORT }} here
# mapping container ports to the host
- 5432:5432
steps:
- name: Checkout git repository 🕝
if: needs.changes.outputs.backend == 'true'
uses: actions/checkout@v3
- name: Set up Python ${{ env.DEFAULT_PYTHON_VERSION }} 🐍
if: needs.changes.outputs.backend == 'true'
uses: actions/setup-python@57ded4d7d5e986d7296eab16560982c6dd7c923b
with:
python-version: ${{ env.DEFAULT_PYTHON_VERSION }}
- name: Read Poetry Version 🔢
if: needs.changes.outputs.backend == 'true'
run: |
echo "POETRY_VERSION=$(scripts/poetry-version.sh)" >> $GITHUB_ENV
shell: bash
- name: Install poetry 🦄
if: needs.changes.outputs.backend == 'true'
uses: Gr1N/setup-poetry@15821dc8a61bc630db542ae4baf6a7c19a994844 # v8
with:
poetry-version: ${{ env.POETRY_VERSION }}
- name: Load Poetry Cached Libraries ⬇
id: cache-poetry
if: needs.changes.outputs.backend == 'true'
uses: actions/cache@v3
with:
path: .venv
key: ${{ runner.os }}-poetry-${{ env.POETRY_VERSION }}-${{ env.DEFAULT_PYTHON_VERSION }}-${{ hashFiles('**/poetry.lock') }}-venv-${{ secrets.POETRY_CACHE_VERSION }}-${{ env.pythonLocation }}
- name: Clear Poetry cache
if: steps.cache-poetry.outputs.cache-hit == 'true' && needs.changes.outputs.backend == 'true' && contains(github.event.pull_request.labels.*.name, 'tools:clear-poetry-cache-unit-tests')
run: rm -r .venv
# Poetry >= 1.1.0b uses virtualenv to create a virtual environment.
# The virtualenv simply doesn't work on Windows with our setup,
# that's why we use venv to create virtual environment
- name: Create virtual environment
if: (steps.cache-poetry.outputs.cache-hit != 'true' || contains(github.event.pull_request.labels.*.name, 'tools:clear-poetry-cache-unit-tests')) && needs.changes.outputs.backend == 'true'
run: python -m venv create .venv
- name: Set up virtual environment
if: needs.changes.outputs.backend == 'true'
# Poetry on Windows cannot pick up the virtual environments directory properly,
# and it creates a new one every time the pipeline runs.
# This step solves this problem — it tells poetry to always use `.venv` directory inside
# the project itself, which also makes it easier for us to determine the correct directory
# that needs to be cached.
run: poetry config virtualenvs.in-project true
- name: Install Dependencies (Linux) 📦
if: needs.changes.outputs.backend == 'true'
run: |
sudo apt-get -y install libpq-dev
make install-full | tee .output
if grep 'The lock file is not up to date' .output; then exit 1; fi
make prepare-tests-ubuntu
# these integration tests need to be ran in a sequential fashion,
# due to environment constraints, so we're running them in a single process.
- name: Test Code with Services 🩺 (sequential)
if: needs.changes.outputs.backend == 'true'
env:
JOBS: 1
INTEGRATION_TEST_PYTEST_MARKERS: "sequential"
PYTHONIOENCODING: "utf-8"
run: |
make test-integration
- name: Stop kafka and zookeeper containers for integration testing
if: needs.changes.outputs.backend == 'true'
run: |
docker-compose -f tests_deployment/docker-compose.kafka.yml down
build_docker_base_images_and_set_env:
name: Build Docker base images and setup environment
runs-on: ubuntu-22.04
outputs:
base_image_hash: ${{ steps.check_image.outputs.base_image_hash }}
base_mitie_image_hash: ${{ steps.check_image.outputs.base_mitie_image_hash }}
base_builder_image_hash: ${{ steps.check_image.outputs.base_builder_image_hash }}
# Tag name used for images created during Docker image builds, e.g. 3886 - a PR number
image_tag: ${{ steps.set_output.outputs.image_tag }}
# Return 'true' if tag version is equal or higher than the latest tagged Rasa version
is_newest_version: ${{ steps.rasa_get_version.outputs.is_newest_version }}
steps:
# Due to an issue with checking out a wrong commit, we make sure
# to checkout HEAD commit for a pull request.
# More details: https://github.com/actions/checkout/issues/299
- name: Checkout pull request HEAD commit instead of merge commit 🕝
uses: actions/checkout@ac593985615ec2ede58e132d2e21d2b1cbd6127c
if: github.event_name == 'pull_request'
with:
ref: ${{ github.event.pull_request.head.sha }}
- name: Checkout git repository 🕝
uses: actions/checkout@ac593985615ec2ede58e132d2e21d2b1cbd6127c
if: github.event_name != 'pull_request'
- name: Set up QEMU
uses: docker/setup-qemu-action@2b82ce82d56a2a04d2637cd93a637ae1b359c0a7 # v2.2.0
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@ecf95283f03858871ff00b787d79c419715afc34 # v2.7.0
- name: Read Poetry Version 🔢
run: |
echo "POETRY_VERSION=$(scripts/poetry-version.sh)" >> $GITHUB_ENV
shell: bash
- name: Login to DockerHub Registry 🔢
run: echo ${{ secrets.DOCKERHUB_PASSWORD }} | docker login -u ${{ env.DOCKERHUB_USERNAME }} --password-stdin || true
- name: Check if tag version is equal or higher than the latest tagged Rasa version
id: rasa_get_version
if: env.IS_TAG_BUILD == 'true'
run: |
# Get latest tagged Rasa version
git fetch --depth=1 origin "+refs/tags/*:refs/tags/*"
# Fetch branch history
git fetch --prune --unshallow
LATEST_TAGGED_NON_ALPHA_RASA_VERSION=$(git tag | sort -r -V | grep -E "^[0-9.]+$" | head -n1)
CURRENT_TAG=${GITHUB_REF#refs/tags/}
# Return 'true' if tag version is equal or higher than the latest tagged Rasa version
IS_NEWEST_VERSION=$((printf '%s\n%s\n' "${LATEST_TAGGED_NON_ALPHA_RASA_VERSION}" "$CURRENT_TAG" \
| sort -V -C && echo true || echo false) || true)
# Avoid that the script gets released for alphas or release candidates
if [[ "${IS_NEWEST_VERSION}" == "true" && "$CURRENT_TAG" =~ ^[0-9.]+$ ]]; then
echo "is_newest_version=true" >> $GITHUB_OUTPUT
else
echo "is_newest_version=false" >> $GITHUB_OUTPUT
fi
- name: Check if a base image exists
id: check_image
env:
DOCKER_CLI_EXPERIMENTAL: enabled
run: |
# Base image
BASE_IMAGE_HASH=${{ hashFiles('docker/Dockerfile.base') }}
echo "base_image_hash=${BASE_IMAGE_HASH}" >> $GITHUB_OUTPUT
BASE_IMAGE_EXISTS=$((docker manifest inspect rasa/rasa:base-${BASE_IMAGE_HASH} &> /dev/null && echo true || echo false) || true)
echo "base_exists=${BASE_IMAGE_EXISTS}" >> $GITHUB_OUTPUT
# Base MITIE image
BASE_MITIE_IMAGE_HASH=${{ hashFiles('docker/Dockerfile.base-mitie') }}
MAKEFILE_MITIE_HASH=${{ hashFiles('Makefile') }}
echo "base_mitie_image_hash=${BASE_MITIE_IMAGE_HASH:0:50}-${MAKEFILE_MITIE_HASH:0:50}" >> $GITHUB_OUTPUT
BASE_IMAGE_MITIE_EXISTS=$((docker manifest inspect rasa/rasa:base-mitie-${BASE_MITIE_IMAGE_HASH:0:50}-${MAKEFILE_MITIE_HASH:0:50} &> /dev/null && echo true || echo false) || true)
echo "base_mitie_exists=${BASE_IMAGE_MITIE_EXISTS}" >> $GITHUB_OUTPUT
# Base poetry image
BASE_IMAGE_POETRY_EXISTS=$((docker manifest inspect rasa/rasa:base-poetry-${{ env.POETRY_VERSION }} &> /dev/null && echo true || echo false) || true)
echo "base_poetry_exists=${BASE_IMAGE_POETRY_EXISTS}" >> $GITHUB_OUTPUT
# Base builder image
BASE_IMAGE_BUILDER_HASH=${{ hashFiles('docker/Dockerfile.base-builder') }}-poetry-${{ env.POETRY_VERSION }}
echo "base_builder_image_hash=${BASE_IMAGE_BUILDER_HASH}" >> $GITHUB_OUTPUT
BASE_IMAGE_BUILDER_EXISTS=$((docker manifest inspect rasa/rasa:base-builder-${BASE_IMAGE_BUILDER_HASH} &> /dev/null && echo true || echo false) || true)
echo "base_builder_exists=${BASE_IMAGE_BUILDER_EXISTS}" >> $GITHUB_OUTPUT
- name: Build Docker base image 🛠
if: steps.check_image.outputs.base_exists == 'false' || env.IS_TAG_BUILD == 'true'
run: |
export IMAGE_TAG=${{ steps.check_image.outputs.base_image_hash }}
docker buildx bake --set *.platform=linux/amd64,linux/arm64 -f docker/docker-bake.hcl base
- name: Push Docker base image if it's not building from a fork ⬆
if: (steps.check_image.outputs.base_exists == 'false' || env.IS_TAG_BUILD == 'true') && github.event.pull_request.head.repo.owner.login == 'RasaHQ'
run: |
export IMAGE_TAG=${{ steps.check_image.outputs.base_image_hash }}
docker buildx bake --set *.platform=linux/amd64,linux/arm64 -f docker/docker-bake.hcl base --push
- name: Build Docker mitie base image 🛠
if: steps.check_image.outputs.base_mitie_exists == 'false' || steps.check_image.outputs.base_exists == 'false'
run: |
export IMAGE_TAG=${{ steps.check_image.outputs.base_mitie_image_hash }}
docker buildx bake --set *.platform=linux/amd64,linux/arm64 -f docker/docker-bake.hcl base-mitie
- name: Push Docker mitie base image if it's not building from a fork ⬆
if: (steps.check_image.outputs.base_mitie_exists == 'false' || steps.check_image.outputs.base_exists == 'false') && github.event.pull_request.head.repo.owner.login == 'RasaHQ'
run: |
export IMAGE_TAG=${{ steps.check_image.outputs.base_mitie_image_hash }}
docker buildx bake --set *.platform=linux/amd64,linux/arm64 -f docker/docker-bake.hcl base-mitie --push
- name: Build Docker poetry base image 🛠
if: steps.check_image.outputs.base_poetry_exists == 'false' || steps.check_image.outputs.base_exists == 'false'
run: |
export IMAGE_TAG=${{ env.POETRY_VERSION }}
export BASE_IMAGE_HASH=${{ steps.check_image.outputs.base_image_hash }}
docker buildx bake --set *.platform=linux/amd64,linux/arm64 -f docker/docker-bake.hcl base-poetry
- name: Push Docker poetry base image if it's not building from a fork ⬆
if: (steps.check_image.outputs.base_poetry_exists == 'false' || steps.check_image.outputs.base_exists == 'false') && github.event.pull_request.head.repo.owner.login == 'RasaHQ'
run: |
export IMAGE_TAG=${{ env.POETRY_VERSION }}
export BASE_IMAGE_HASH=${{ steps.check_image.outputs.base_image_hash }}
docker buildx bake --set *.platform=linux/amd64,linux/arm64 -f docker/docker-bake.hcl base-poetry --push
- name: Build Docker builder base image 🛠
if: steps.check_image.outputs.base_builder_exists == 'false' || steps.check_image.outputs.base_exists == 'false'
run: |
export IMAGE_TAG=${{ steps.check_image.outputs.base_builder_image_hash }}
docker buildx bake --set *.platform=linux/amd64,linux/arm64 -f docker/docker-bake.hcl base-builder
- name: Push Docker builder base image if it's not building from a fork ⬆
if: (steps.check_image.outputs.base_builder_exists == 'false' || steps.check_image.outputs.base_exists == 'false') && github.event.pull_request.head.repo.owner.login == 'RasaHQ'
run: |
export IMAGE_TAG=${{ steps.check_image.outputs.base_builder_image_hash }}
docker buildx bake --set *.platform=linux/amd64,linux/arm64 -f docker/docker-bake.hcl base-builder --push
# Set environment variables for a pull request
#
# In this scenario, we've created a PR #1234
#
# Example output:
# IMAGE_TAG=1234
- name: Set environment variables - pull_request
if: github.event_name == 'pull_request' && env.IS_TAG_BUILD == 'false'
run: |
echo "IMAGE_TAG=${{ github.event.number }}" >> $GITHUB_ENV
# Set environment variables for a tag
#
# In this scenario, we've pushed the '2.0.6' tag
#
# Example output:
# TAG_NAME=2.0.6
# IMAGE_TAG=2.0.6
- name: Set environment variables - push - tag
if: github.event_name == 'push' && env.IS_TAG_BUILD == 'true'
run: |
TAG_NAME=${GITHUB_REF#refs/tags/}
echo "IMAGE_TAG=${TAG_NAME}" >> $GITHUB_ENV
# Set environment variables for a branch
#
# In this scenario, we've pushed changes into the main branch
#
# Example output:
# IMAGE_TAG=main
- name: Set environment variables - push - branch
if: github.event_name == 'push' && env.IS_TAG_BUILD == 'false'
run: |
BRANCH_NAME=${GITHUB_REF#refs/heads/}
SAFE_BRANCH_NAME="$(echo ${GITHUB_REF#refs/heads/} | sed 's/[\\*+.$\#\-\/]/-/g')"
echo "IMAGE_TAG=${SAFE_BRANCH_NAME}" >> $GITHUB_ENV
- name: Set output
id: set_output
run: |
echo "image_tag=${{ env.IMAGE_TAG }}" >> $GITHUB_OUTPUT
docker:
name: Build Docker
runs-on: ubuntu-22.04
needs: [changes, build_docker_base_images_and_set_env]
env:
IMAGE_TAG: ${{ needs.build_docker_base_images_and_set_env.outputs.image_tag }}
BASE_IMAGE_HASH: ${{ needs.build_docker_base_images_and_set_env.outputs.base_image_hash }}
BASE_MITIE_IMAGE_HASH: ${{ needs.build_docker_base_images_and_set_env.outputs.base_mitie_image_hash }}
BASE_BUILDER_IMAGE_HASH: ${{ needs.build_docker_base_images_and_set_env.outputs.base_builder_image_hash }}
strategy:
matrix:
image: [default, full, mitie-en, spacy-de, spacy-it, spacy-en]
steps:
# Due to an issue with checking out a wrong commit, we make sure
# to checkout HEAD commit for a pull request.
# More details: https://github.com/actions/checkout/issues/299
- name: Checkout pull request HEAD commit instead of merge commit 🕝
uses: actions/checkout@ac593985615ec2ede58e132d2e21d2b1cbd6127c
if: github.event_name == 'pull_request'
with:
ref: ${{ github.event.pull_request.head.sha }}
- name: Checkout git repository 🕝
uses: actions/checkout@ac593985615ec2ede58e132d2e21d2b1cbd6127c
if: github.event_name != 'pull_request'
- name: Set up QEMU
uses: docker/setup-qemu-action@2b82ce82d56a2a04d2637cd93a637ae1b359c0a7 # v2.2.0
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@ecf95283f03858871ff00b787d79c419715afc34 # v2.7.0
- name: Free disk space
if: needs.changes.outputs.docker == 'true'
# tries to make sure we do not run out of disk space, see
# https://github.community/t5/GitHub-Actions/BUG-Strange-quot-No-space-left-on-device-quot-IOExceptions-on/td-p/46101
run: |
sudo swapoff -a
sudo rm -f /swapfile
sudo apt clean
docker image prune -a
df -h
- name: Read Poetry Version 🔢
run: |
echo "POETRY_VERSION=$(scripts/poetry-version.sh)" >> $GITHUB_ENV
shell: bash
- name: Echo Available platforms
run: echo ${{ steps.buildx.outputs.platforms }}
- name: Login to DockerHub Registry 🔢
if: needs.changes.outputs.docker == 'true'
run: echo ${{ secrets.DOCKERHUB_PASSWORD }} | docker login -u ${{ env.DOCKERHUB_USERNAME }} --password-stdin || true
- name: Copy Segment write key to the package
if: needs.changes.outputs.docker == 'true' && github.event_name == 'push' && startsWith(github.ref, 'refs/tags') && github.repository == 'RasaHQ/rasa'
env:
RASA_TELEMETRY_WRITE_KEY: ${{ secrets.RASA_OSS_TELEMETRY_WRITE_KEY }}
RASA_EXCEPTION_WRITE_KEY: ${{ secrets.RASA_OSS_EXCEPTION_WRITE_KEY }}
run: |
./scripts/write_keys_file.sh
- name: Build Docker image
if: needs.changes.outputs.docker == 'true'
run: |
docker buildx bake --set *.platform=linux/amd64,linux/arm64 -f docker/docker-bake.hcl ${{ matrix.image }}
- name: Push image with main tag 📦
if: needs.changes.outputs.docker == 'true' && github.event_name == 'push' && github.ref == 'refs/heads/main' && github.repository == 'RasaHQ/rasa'
run: |
docker buildx bake --set *.platform=linux/amd64,linux/arm64 -f docker/docker-bake.hcl ${{ matrix.image }} --push
- name: Push image with ${{github.ref}} tag 📦
if: needs.changes.outputs.docker == 'true' && github.event_name == 'push' && env.IS_TAG_BUILD == 'true' && github.repository == 'RasaHQ/rasa'
run: |
IS_NEWEST_VERSION=${{ needs.build_docker_base_images_and_set_env.outputs.is_newest_version }}
docker buildx bake --set *.platform=linux/amd64,linux/arm64 -f docker/docker-bake.hcl ${{ matrix.image }} --push
# # Tag the image as latest
# if [[ "${IS_NEWEST_VERSION}" == "true" ]]; then
# if [[ "${{ matrix.image }}" == "default" ]]; then
# RELEASE_TAG="${IMAGE_TAG}"
# else
# RELEASE_TAG="${IMAGE_TAG}-${{ matrix.image }}"
# fi
#
# LATEST_TAG=$(echo $RELEASE_TAG | sed 's/'$IMAGE_TAG'/latest/g')
#
# docker tag rasa/rasa:${RELEASE_TAG} rasa/rasa:${LATEST_TAG}
# docker push rasa/rasa:${LATEST_TAG}
# fi
deploy:
name: Deploy to PyPI
runs-on: ubuntu-22.04
# deploy will only be run when there is a tag available
if: github.event_name == 'push' && startsWith(github.ref, 'refs/tags') && github.repository == 'RasaHQ/rasa'
needs: [docker] # only run after the docker build stage succeeds
steps:
- name: Checkout git repository 🕝
uses: actions/checkout@ac593985615ec2ede58e132d2e21d2b1cbd6127c
- name: Set up Python 3.9 🐍
uses: actions/setup-python@57ded4d7d5e986d7296eab16560982c6dd7c923b
with:
python-version: 3.9
- name: Read Poetry Version 🔢
run: |
echo "POETRY_VERSION=$(scripts/poetry-version.sh)" >> $GITHUB_ENV
shell: bash
- name: Install poetry 🦄
uses: Gr1N/setup-poetry@15821dc8a61bc630db542ae4baf6a7c19a994844 # v8
with:
poetry-version: ${{ env.POETRY_VERSION }}
- name: Copy Segment write key to the package
env:
RASA_TELEMETRY_WRITE_KEY: ${{ secrets.RASA_OSS_TELEMETRY_WRITE_KEY }}
RASA_EXCEPTION_WRITE_KEY: ${{ secrets.RASA_OSS_EXCEPTION_WRITE_KEY }}
run: |
./scripts/write_keys_file.sh
- name: Build ⚒️ Distributions
run: poetry build
- name: Publish to PyPI 📦
uses: pypa/gh-action-pypi-publish@c7f29f7adef1a245bd91520e94867e5c6eedddcc
with:
user: __token__
password: ${{ secrets.PYPI_TOKEN }}
- name: Notify Sentry about the release
env:
GITHUB_TAG: ${{ github.ref }}
SENTRY_ORG: rasahq
SENTRY_AUTH_TOKEN: ${{ secrets.SENTRY_AUTH_TOKEN }}
run: |
curl -sL https://sentry.io/get-cli/ | bash
GITHUB_TAG=${GITHUB_TAG/refs\/tags\//}
sentry-cli releases new -p rasa-open-source "rasa-$GITHUB_TAG"
sentry-cli releases set-commits --auto "rasa-$GITHUB_TAG"
sentry-cli releases finalize "rasa-$GITHUB_TAG"
- name: Notify Slack & Publish Release Notes 🗞
env:
GH_RELEASE_NOTES_TOKEN: ${{ secrets.GH_RELEASE_NOTES_TOKEN }}
SLACK_WEBHOOK_TOKEN: ${{ secrets.SLACK_WEBHOOK_TOKEN }}
GITHUB_TAG: ${{ github.ref }}
GITHUB_REPO_SLUG: ${{ github.repository }}
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
run: |
GITHUB_TAG=${GITHUB_TAG/refs\/tags\//}
pip install -U github3.py pep440-version-utils
python3 scripts/publish_gh_release_notes.py
./scripts/ping_slack_about_package_release.sh
send_slack_notification_for_release_on_failure:
name: Notify Slack & Publish Release Notes
runs-on: ubuntu-22.04
# run this job when the workflow is triggered by a tag push
if: always() && github.repository == 'RasaHQ/rasa' && github.ref_type == 'tag'
needs:
- deploy
steps:
- name: Notify Slack of failure ⛔️
# send notification if 'deploy' is skipped (previous needed job failed) or failed
if: needs.deploy.result != 'success'
env:
SLACK_WEBHOOK: ${{ secrets.SLACK_WEBHOOK_TOKEN }}
uses: Ilshidur/action-slack@689ad44a9c9092315abd286d0e3a9a74d31ab78a
with:
args: "⛔️ *Rasa Open Source* version `${{ github.ref_name }}` could not be released 😱! Please check out GitHub Actions: https://github.com/${{ github.repository }}/actions/runs/${{ github.run_id }}"