Skip to content

Bump dbt-databricks from v1.8.5 to 1.9.1 #194

Bump dbt-databricks from v1.8.5 to 1.9.1

Bump dbt-databricks from v1.8.5 to 1.9.1 #194

Workflow file for this run

name: CI
on:
pull_request:
types:
- opened
- synchronize
branches:
- main
env:
DBT_PROFILES_DIR: ${{ github.workspace }}/integration_tests
DBT_PROJECT_DIR: ${{ github.workspace }}/integration_tests
DBT_DEFAULT_TARGET: databricks
DEV_CATALOG_NAME: cdi_dev
DEV_SCHEMA_NAME: ci_dbt_dag_monitoring
DEV_HOST: ${{ secrets.DATABRICKS_HOST }}
DEV_TOKEN: ${{ secrets.DATABRICKS_TOKEN }}
DEV_HTTP_PATH: ${{ secrets.DATABRICKS_HTTP_PATH }}
BIGQUERY_DATASET: ci_dbt_dag_monitoring
BIGQUERY_PROJECT: indicium-sandbox
DBT_JOB_TIMEOUT: 300
DBT_THREADS: 16
DBT_JOB_RETRIES: 1
SNOWFLAKE_ACCOUNT: ${{ secrets.SNOWFLAKE_TEST_ACCOUNT}}
SNOWFLAKE_USER: ${{ secrets.SNOWFLAKE_TEST_USER }}
SNOWFLAKE_PASSWORD: ${{ secrets.SNOWFLAKE_TEST_PASSWORD}}
SNOWFLAKE_ROLE: INTERNAL_PRODUCTS_CICD
SNOWFLAKE_DATABASE: SANDBOX
SNOWFLAKE_WAREHOUSE: SANDBOX_WAREHOUSE
SNOWFLAKE_SCHEMA: ci_dbt_dag_monitoring
jobs:
dbt-checks:
runs-on: ubuntu-latest
steps:
- name: Checkout repository
uses: actions/checkout@v2
- name: Set up Python
uses: actions/setup-python@v2
with:
python-version: '3.8'
- name: Install dependencies
run: |
python -m pip install --upgrade pip
pip install -r requirements.txt
- name: Authenticate to GCP
uses: "google-github-actions/auth@v2"
with:
credentials_json: "${{ secrets.BIGQUERY_AUTH }}"
- name: Run dbt debug for Databricks
run: dbt debug
- name: Run dbt debug for BigQuery
run: dbt debug --target bigquery
- name: Run dbt debug for Snowflake
run: dbt debug --target snowflake
- name: dbt deps
run: dbt deps
- name: dbt compile
run: dbt compile
integration-test:
runs-on: ubuntu-latest
steps:
- name: Checkout repository
uses: actions/checkout@v2
- name: Set up Python
uses: actions/setup-python@v2
with:
python-version: '3.8'
- name: Install dependencies
run: |
python -m pip install --upgrade pip
pip install -r requirements.txt
- name: enter integration tests
run: |
cd integration_tests/
- name: Authenticate to GCP
uses: "google-github-actions/auth@v2"
with:
credentials_json: "${{ secrets.BIGQUERY_AUTH }}"
- name: Run dbt integration tests Databricks source in Databricks connection
run: |
dbt deps --target databricks
dbt run-operation create_schema --args '{schema_name: ci_dbt_dag_monitoring}' --target databricks
dbt seed --target databricks
dbt run-operation jobs --target databricks
dbt run-operation job_runs --target databricks
dbt test -s source:* --target databricks
dbt build --target databricks
- name: switch enabled sources for adf source
run:
. ${{ github.workspace }}/integration_tests/for_CI/change_dbt_project_adf_source.sh
- name: Run dbt tasks for ADF source in Databricks connection
run: |
dbt deps
dbt seed --target databricks
dbt run-operation adf_pipeline_runs --target databricks
dbt run-operation adf_triggers --target databricks
dbt test -s source:* --target databricks
dbt build --target databricks
- name: switch enabled sources for airflow source
run: |
. ${{ github.workspace }}/integration_tests/for_CI/change_dbt_project_airflow_source.sh
- name: Run dbt tasks for Airflow source in Databricks connection
run: |
dbt deps
dbt seed --target databricks
dbt test -s source:* --target databricks
dbt build --target databricks
dbt run-operation drop_schema --args '{schema_name: ci_dbt_dag_monitoring}'
- name: change databricks database to bigquery database
run: |
. ${{ github.workspace }}/integration_tests/for_CI/change_dbt_project_databricks_source.sh
. ${{ github.workspace }}/integration_tests/for_CI/change_of_database.sh databricks cdi_dev indicium-sandbox
- name: Run dbt integration tests Databricks source in BigQuery connection
run: |
dbt deps --target bigquery
dbt run-operation create_schema --args '{schema_name: ci_dbt_dag_monitoring}' --target bigquery
dbt run-operation jobs --target bigquery
dbt run-operation job_runs --target bigquery
dbt test -s source:* --target bigquery
dbt build --exclude-resource-type seed --target bigquery
- name: switch enabled sources for adf source
run: |
. ${{ github.workspace }}/integration_tests/for_CI/change_of_database.sh adf cdi_dev indicium-sandbox
. ${{ github.workspace }}/integration_tests/for_CI/change_dbt_project_adf_source.sh
- name: Run dbt integration tests ADF source in BigQuery connection
run: |
dbt deps
dbt seed -s adf_pipelines --target bigquery
dbt run-operation adf_activity_runs --target bigquery
dbt run-operation adf_pipeline_runs --target bigquery
dbt run-operation adf_triggers --target bigquery
dbt test -s source:* --target bigquery
dbt build --exclude-resource-type seed --target bigquery
- name: switch enabled sources for airflow source
run: |
. ${{ github.workspace }}/integration_tests/for_CI/change_dbt_project_airflow_source.sh
. ${{ github.workspace }}/integration_tests/for_CI/change_of_database.sh airflow cdi_dev indicium-sandbox
- name: Run dbt tasks for Airflow source in BigQuery connection
run: |
dbt deps
dbt run-operation seed__dag_run --target bigquery
dbt run-operation seed__dag --target bigquery
dbt run-operation seed__task_fail --target bigquery
dbt run-operation seed__task_instance --target bigquery
dbt test -s source:* --target bigquery
dbt build --exclude-resource-type seed --target bigquery
dbt run-operation drop_schema --args '{schema_name: ci_dbt_dag_monitoring}' --target bigquery
- name: change BigQuery database to Snowflake database
run: |
. ${{ github.workspace }}/integration_tests/for_CI/change_of_database.sh databricks indicium-sandbox sandbox
. ${{ github.workspace }}/integration_tests/for_CI/change_dbt_project_databricks_source.sh
- name: Run dbt integration tests Databricks source in Snowflake connection
run: |
dbt deps
dbt run-operation create_schema --args '{schema_name: ci_dbt_dag_monitoring}' --target snowflake
dbt run-operation jobs --target snowflake
dbt run-operation job_runs --target snowflake
dbt test -s source:* --target snowflake
dbt build --exclude-resource-type seed --target snowflake
- name: switch enabled sources for airflow source
run: |
. ${{ github.workspace }}/integration_tests/for_CI/change_of_database.sh airflow indicium-sandbox sandbox
. ${{ github.workspace }}/integration_tests/for_CI/change_dbt_project_from_databricks_to_airflow.sh
- name: Run dbt integration tests Airflow source in Snowflake connection
env:
DBT_PROFILES_DIR: ${{ github.workspace }}/integration_tests
DBT_PROJECT_DIR: ${{ github.workspace }}/integration_tests
run: |
dbt deps
dbt seed -s seeds/airflow/* --target snowflake
dbt test -s source:* --target snowflake
dbt build --exclude-resource-type seed --target snowflake
dbt run-operation drop_schema --args '{schema_name: ci_dbt_dag_monitoring}' --target snowflake