move rest_api, sql_database and filesystem sources to dlt core #6405
Workflow file for this run
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
name: common | common | |
on: | |
pull_request: | |
branches: | |
- master | |
- devel | |
workflow_dispatch: | |
concurrency: | |
group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }} | |
cancel-in-progress: true | |
env: | |
RUNTIME__LOG_LEVEL: ERROR | |
RUNTIME__DLTHUB_TELEMETRY_ENDPOINT: ${{ secrets.RUNTIME__DLTHUB_TELEMETRY_ENDPOINT }} | |
# we need the secrets only for the rest_api_pipeline tests which are in tests/sources | |
# so we inject them only at the end | |
DLT_SECRETS_TOML: ${{ secrets.DLT_SECRETS_TOML }} | |
jobs: | |
get_docs_changes: | |
name: docs changes | |
uses: ./.github/workflows/get_docs_changes.yml | |
run_common: | |
name: test | |
needs: get_docs_changes | |
if: needs.get_docs_changes.outputs.changes_outside_docs == 'true' | |
strategy: | |
fail-fast: false | |
matrix: | |
os: ["ubuntu-latest", "macos-latest", "windows-latest"] | |
python-version: ["3.11.x"] | |
# Test all python versions on ubuntu only | |
include: | |
- python-version: "3.8.x" | |
os: "ubuntu-latest" | |
- python-version: "3.9.x" | |
os: "ubuntu-latest" | |
- python-version: "3.10.x" | |
os: "ubuntu-latest" | |
- python-version: "3.12.x" | |
os: "ubuntu-latest" | |
defaults: | |
run: | |
shell: bash | |
runs-on: ${{ matrix.os }} | |
steps: | |
- name: Check out | |
uses: actions/checkout@master | |
- name: Setup Python | |
uses: actions/setup-python@v4 | |
with: | |
python-version: ${{ matrix.python-version }} | |
- name: Install tzdata on windows | |
run: | | |
cd %USERPROFILE% | |
curl https://data.iana.org/time-zones/releases/tzdata2021e.tar.gz --output tzdata.tar.gz | |
mkdir tzdata | |
tar --extract --file tzdata.tar.gz --directory tzdata | |
mkdir %USERPROFILE%\Downloads\tzdata | |
copy tzdata %USERPROFILE%\Downloads\tzdata | |
curl https://raw.githubusercontent.com/unicode-org/cldr/master/common/supplemental/windowsZones.xml --output %USERPROFILE%\Downloads\tzdata\windowsZones.xml | |
if: runner.os == 'Windows' | |
shell: cmd | |
- name: Install Poetry | |
# https://github.com/snok/install-poetry#running-on-windows | |
uses: snok/[email protected] | |
with: | |
virtualenvs-create: true | |
virtualenvs-in-project: true | |
installer-parallel: true | |
# NOTE: do not cache. we want to have a clean state each run and we upgrade depdendencies later | |
# - name: Load cached venv | |
# id: cached-poetry-dependencies | |
# uses: actions/cache@v3 | |
# with: | |
# # path: ${{ steps.pip-cache.outputs.dir }} | |
# path: .venv | |
# key: venv-${{ matrix.os }}-${{ matrix.python-version }}-${{ hashFiles('**/poetry.lock') }} | |
- name: Install dependencies | |
run: poetry install --no-interaction --with sentry-sdk | |
- run: | | |
poetry run pytest tests/common tests/normalize tests/reflection tests/load/test_dummy_client.py tests/extract/test_extract.py tests/extract/test_sources.py tests/pipeline/test_pipeline_state.py | |
if: runner.os != 'Windows' | |
name: Run common tests with minimum dependencies Linux/MAC | |
- run: | | |
poetry run pytest tests/common tests/normalize tests/reflection tests/load/test_dummy_client.py tests/extract/test_extract.py tests/extract/test_sources.py tests/pipeline/test_pipeline_state.py -m "not forked" | |
if: runner.os == 'Windows' | |
name: Run common tests with minimum dependencies Windows | |
shell: cmd | |
- name: Install duckdb dependencies | |
run: poetry install --no-interaction -E duckdb --with sentry-sdk | |
- run: | | |
poetry run pytest tests/pipeline/test_pipeline.py tests/pipeline/test_import_export_schema.py | |
if: runner.os != 'Windows' | |
name: Run pipeline smoke tests with minimum deps Linux/MAC | |
- run: | | |
poetry run pytest tests/pipeline/test_pipeline.py tests/pipeline/test_import_export_schema.py -m "not forked" | |
if: runner.os == 'Windows' | |
name: Run smoke tests with minimum deps Windows | |
shell: cmd | |
- name: Install pyarrow | |
run: poetry install --no-interaction -E duckdb -E cli -E parquet --with sentry-sdk | |
- run: | | |
poetry run pytest tests/pipeline/test_pipeline_extra.py -k arrow | |
if: runner.os != 'Windows' | |
name: Run pipeline tests with pyarrow but no pandas installed | |
- run: | | |
poetry run pytest tests/pipeline/test_pipeline_extra.py -k arrow -m "not forked" | |
if: runner.os == 'Windows' | |
name: Run pipeline tests with pyarrow but no pandas installed Windows | |
shell: cmd | |
- name: create secrets.toml for examples | |
run: pwd && echo "$DLT_SECRETS_TOML" > tests/.dlt/secrets.toml | |
- name: Install pipeline and sources dependencies | |
run: poetry install --no-interaction -E duckdb -E cli -E parquet --with sentry-sdk --with pipeline -E deltalake -E sql_database | |
# TODO: this is needed for the filesystem tests, not sure if this should be in an extra? | |
- name: Install openpyxl for excel tests | |
run: pip install openpyxl | |
- run: | | |
poetry run pytest tests/extract tests/pipeline tests/libs tests/cli/common tests/destinations tests/sources | |
if: runner.os != 'Windows' | |
name: Run extract and pipeline tests Linux/MAC | |
- run: | | |
poetry run pytest tests/extract tests/pipeline tests/libs tests/cli/common tests/destinations tests/sources -m "not forked" | |
if: runner.os == 'Windows' | |
name: Run extract tests Windows | |
shell: cmd | |
# here we upgrade sql alchemy to 2 an run the sql_database tests again | |
- name: Upgrade sql alchemy | |
run: poetry run pip install sqlalchemy==2.0.32 | |
- run: | | |
poetry run tests/sources/sql_database | |
if: runner.os != 'Windows' | |
name: Run extract and pipeline tests Linux/MAC | |
- run: | | |
poetry run tests/sources/sql_database | |
if: runner.os == 'Windows' | |
name: Run extract tests Windows | |
shell: cmd | |
# - name: Install Pydantic 1.0 | |
# run: pip install "pydantic<2" | |
# - run: | | |
# poetry run pytest tests/libs | |
# if: runner.os != 'Windows' | |
# name: Run extract and pipeline tests Linux/MAC | |
# - run: | | |
# poetry run pytest tests/libs | |
# if: runner.os == 'Windows' | |
# name: Run extract tests Windows | |
# shell: cmd | |
matrix_job_required_check: | |
name: common | common tests | |
needs: run_common | |
runs-on: ubuntu-latest | |
if: always() | |
steps: | |
- name: Check matrix job results | |
if: contains(needs.*.result, 'failure') || contains(needs.*.result, 'cancelled') | |
run: | | |
echo "One or more matrix job tests failed or were cancelled. You may need to re-run them." && exit 1 |