Skip to content

feat(build): expand matrix to include building for spark 3.4.1 #75

feat(build): expand matrix to include building for spark 3.4.1

feat(build): expand matrix to include building for spark 3.4.1 #75

Workflow file for this run

name: CI
on:
push:
branches:
- master
pull_request:
branches:
- master
jobs:
build:
strategy:
matrix:
version:
- airflow: "2.1"
spark: "3.1.3"
hadoop: "3.2.0"
scala: "2.12"
java: "8"
python: "3.8"
sqlalchemy: "1.3"
- airflow: "2.1"
spark: "3.1.3"
hadoop: "3.2.0"
scala: "2.12"
java: "8"
python: "3.8"
sqlalchemy: "1.4"
- airflow: "2.1"
spark: "3.1.3"
hadoop: "3.2.0"
scala: "2.12"
java: "8"
python: "3.9"
sqlalchemy: "1.3"
- airflow: "2.1"
spark: "3.1.3"
hadoop: "3.2.0"
scala: "2.12"
java: "8"
python: "3.9"
sqlalchemy: "1.4"
- airflow: "2.1"
spark: "3.1.3"
hadoop: "3.2.0"
scala: "2.12"
java: "11"
python: "3.8"
sqlalchemy: "1.3"
- airflow: "2.1"
spark: "3.1.3"
hadoop: "3.2.0"
scala: "2.12"
java: "11"
python: "3.8"
sqlalchemy: "1.4"
- airflow: "2.1"
spark: "3.1.3"
hadoop: "3.2.0"
scala: "2.12"
java: "11"
python: "3.9"
sqlalchemy: "1.3"
- airflow: "2.1"
spark: "3.1.3"
hadoop: "3.2.0"
scala: "2.12"
java: "11"
python: "3.9"
sqlalchemy: "1.4"
- airflow: "2.1"
spark: "3.2.2"
hadoop: "3.3.1"
scala: "2.12"
java: "8"
python: "3.8"
sqlalchemy: "1.3"
- airflow: "2.1"
spark: "3.2.2"
hadoop: "3.3.1"
scala: "2.12"
java: "8"
python: "3.8"
sqlalchemy: "1.4"
- airflow: "2.1"
spark: "3.2.2"
hadoop: "3.3.1"
scala: "2.12"
java: "8"
python: "3.9"
sqlalchemy: "1.3"
- airflow: "2.1"
spark: "3.2.2"
hadoop: "3.3.1"
scala: "2.12"
java: "8"
python: "3.9"
sqlalchemy: "1.4"
- airflow: "2.1"
spark: "3.2.2"
hadoop: "3.3.1"
scala: "2.12"
java: "11"
python: "3.8"
sqlalchemy: "1.3"
- airflow: "2.1"
spark: "3.2.2"
hadoop: "3.3.1"
scala: "2.12"
java: "11"
python: "3.8"
sqlalchemy: "1.4"
- airflow: "2.1"
spark: "3.2.2"
hadoop: "3.3.1"
scala: "2.12"
java: "11"
python: "3.9"
sqlalchemy: "1.3"
- airflow: "2.1"
spark: "3.2.2"
hadoop: "3.3.1"
scala: "2.12"
java: "11"
python: "3.9"
sqlalchemy: "1.4"
- airflow: "2.1"
spark: "3.2.2"
hadoop: "3.3.1"
scala: "2.13"
java: "8"
python: "3.8"
sqlalchemy: "1.3"
- airflow: "2.1"
spark: "3.2.2"
hadoop: "3.3.1"
scala: "2.13"
java: "8"
python: "3.8"
sqlalchemy: "1.4"
- airflow: "2.1"
spark: "3.2.2"
hadoop: "3.3.1"
scala: "2.13"
java: "8"
python: "3.9"
sqlalchemy: "1.3"
- airflow: "2.1"
spark: "3.2.2"
hadoop: "3.3.1"
scala: "2.13"
java: "8"
python: "3.9"
sqlalchemy: "1.4"
- airflow: "2.1"
spark: "3.2.2"
hadoop: "3.3.1"
scala: "2.13"
java: "11"
python: "3.8"
sqlalchemy: "1.3"
- airflow: "2.1"
spark: "3.2.2"
hadoop: "3.3.1"
scala: "2.13"
java: "11"
python: "3.8"
sqlalchemy: "1.4"
- airflow: "2.1"
spark: "3.2.2"
hadoop: "3.3.1"
scala: "2.13"
java: "11"
python: "3.9"
sqlalchemy: "1.3"
- airflow: "2.1"
spark: "3.2.2"
hadoop: "3.3.1"
scala: "2.13"
java: "11"
python: "3.9"
sqlalchemy: "1.4"
- airflow: "2.1"
spark: "3.3.0"
hadoop: "3.3.2"
scala: "2.12"
java: "8"
python: "3.8"
sqlalchemy: "1.3"
- airflow: "2.1"
spark: "3.3.0"
hadoop: "3.3.2"
scala: "2.12"
java: "8"
python: "3.8"
sqlalchemy: "1.4"
- airflow: "2.1"
spark: "3.3.0"
hadoop: "3.3.2"
scala: "2.12"
java: "8"
python: "3.9"
sqlalchemy: "1.3"
- airflow: "2.1"
spark: "3.3.0"
hadoop: "3.3.2"
scala: "2.12"
java: "8"
python: "3.9"
sqlalchemy: "1.4"
- airflow: "2.1"
spark: "3.3.0"
hadoop: "3.3.2"
scala: "2.12"
java: "11"
python: "3.8"
sqlalchemy: "1.3"
- airflow: "2.1"
spark: "3.3.0"
hadoop: "3.3.2"
scala: "2.12"
java: "11"
python: "3.8"
sqlalchemy: "1.4"
- airflow: "2.1"
spark: "3.3.0"
hadoop: "3.3.2"
scala: "2.12"
java: "11"
python: "3.9"
sqlalchemy: "1.3"
- airflow: "2.1"
spark: "3.3.0"
hadoop: "3.3.2"
scala: "2.12"
java: "11"
python: "3.9"
sqlalchemy: "1.4"
- airflow: "2.1"
spark: "3.3.0"
hadoop: "3.3.2"
scala: "2.13"
java: "8"
python: "3.8"
sqlalchemy: "1.3"
- airflow: "2.1"
spark: "3.3.0"
hadoop: "3.3.2"
scala: "2.13"
java: "8"
python: "3.8"
sqlalchemy: "1.4"
- airflow: "2.1"
spark: "3.3.0"
hadoop: "3.3.2"
scala: "2.13"
java: "8"
python: "3.9"
sqlalchemy: "1.3"
- airflow: "2.1"
spark: "3.3.0"
hadoop: "3.3.2"
scala: "2.13"
java: "8"
python: "3.9"
sqlalchemy: "1.4"
- airflow: "2.1"
spark: "3.3.0"
hadoop: "3.3.2"
scala: "2.13"
java: "11"
python: "3.8"
sqlalchemy: "1.3"
- airflow: "2.1"
spark: "3.3.0"
hadoop: "3.3.2"
scala: "2.13"
java: "11"
python: "3.8"
sqlalchemy: "1.4"
- airflow: "2.1"
spark: "3.3.0"
hadoop: "3.3.2"
scala: "2.13"
java: "11"
python: "3.9"
sqlalchemy: "1.3"
- airflow: "2.1"
spark: "3.3.0"
hadoop: "3.3.2"
scala: "2.13"
java: "11"
python: "3.9"
sqlalchemy: "1.4"
- airflow: "2.2"
spark: "3.1.3"
hadoop: "3.2.0"
scala: "2.12"
java: "8"
python: "3.7"
sqlalchemy: "1.3"
- airflow: "2.2"
spark: "3.1.3"
hadoop: "3.2.0"
scala: "2.12"
java: "8"
python: "3.7"
sqlalchemy: "1.4"
- airflow: "2.2"
spark: "3.1.3"
hadoop: "3.2.0"
scala: "2.12"
java: "8"
python: "3.8"
sqlalchemy: "1.3"
- airflow: "2.2"
spark: "3.1.3"
hadoop: "3.2.0"
scala: "2.12"
java: "8"
python: "3.8"
sqlalchemy: "1.4"
- airflow: "2.2"
spark: "3.1.3"
hadoop: "3.2.0"
scala: "2.12"
java: "8"
python: "3.9"
sqlalchemy: "1.3"
- airflow: "2.2"
spark: "3.1.3"
hadoop: "3.2.0"
scala: "2.12"
java: "8"
python: "3.9"
sqlalchemy: "1.4"
- airflow: "2.2"
spark: "3.1.3"
hadoop: "3.2.0"
scala: "2.12"
java: "11"
python: "3.7"
sqlalchemy: "1.3"
- airflow: "2.2"
spark: "3.1.3"
hadoop: "3.2.0"
scala: "2.12"
java: "11"
python: "3.7"
sqlalchemy: "1.4"
- airflow: "2.2"
spark: "3.1.3"
hadoop: "3.2.0"
scala: "2.12"
java: "11"
python: "3.8"
sqlalchemy: "1.3"
- airflow: "2.2"
spark: "3.1.3"
hadoop: "3.2.0"
scala: "2.12"
java: "11"
python: "3.8"
sqlalchemy: "1.4"
- airflow: "2.2"
spark: "3.1.3"
hadoop: "3.2.0"
scala: "2.12"
java: "11"
python: "3.9"
sqlalchemy: "1.3"
- airflow: "2.2"
spark: "3.1.3"
hadoop: "3.2.0"
scala: "2.12"
java: "11"
python: "3.9"
sqlalchemy: "1.4"
- airflow: "2.2"
spark: "3.2.2"
hadoop: "3.3.1"
scala: "2.12"
java: "8"
python: "3.7"
sqlalchemy: "1.3"
- airflow: "2.2"
spark: "3.2.2"
hadoop: "3.3.1"
scala: "2.12"
java: "8"
python: "3.7"
sqlalchemy: "1.4"
- airflow: "2.2"
spark: "3.2.2"
hadoop: "3.3.1"
scala: "2.12"
java: "8"
python: "3.8"
sqlalchemy: "1.3"
- airflow: "2.2"
spark: "3.2.2"
hadoop: "3.3.1"
scala: "2.12"
java: "8"
python: "3.8"
sqlalchemy: "1.4"
- airflow: "2.2"
spark: "3.2.2"
hadoop: "3.3.1"
scala: "2.12"
java: "8"
python: "3.9"
sqlalchemy: "1.3"
- airflow: "2.2"
spark: "3.2.2"
hadoop: "3.3.1"
scala: "2.12"
java: "8"
python: "3.9"
sqlalchemy: "1.4"
- airflow: "2.2"
spark: "3.2.2"
hadoop: "3.3.1"
scala: "2.12"
java: "11"
python: "3.7"
sqlalchemy: "1.3"
- airflow: "2.2"
spark: "3.2.2"
hadoop: "3.3.1"
scala: "2.12"
java: "11"
python: "3.7"
sqlalchemy: "1.4"
- airflow: "2.2"
spark: "3.2.2"
hadoop: "3.3.1"
scala: "2.12"
java: "11"
python: "3.8"
sqlalchemy: "1.3"
- airflow: "2.2"
spark: "3.2.2"
hadoop: "3.3.1"
scala: "2.12"
java: "11"
python: "3.8"
sqlalchemy: "1.4"
- airflow: "2.2"
spark: "3.2.2"
hadoop: "3.3.1"
scala: "2.12"
java: "11"
python: "3.9"
sqlalchemy: "1.3"
- airflow: "2.2"
spark: "3.2.2"
hadoop: "3.3.1"
scala: "2.12"
java: "11"
python: "3.9"
sqlalchemy: "1.4"
- airflow: "2.2"
spark: "3.2.2"
hadoop: "3.3.1"
scala: "2.13"
java: "8"
python: "3.7"
sqlalchemy: "1.3"
- airflow: "2.2"
spark: "3.2.2"
hadoop: "3.3.1"
scala: "2.13"
java: "8"
python: "3.7"
sqlalchemy: "1.4"
- airflow: "2.2"
spark: "3.2.2"
hadoop: "3.3.1"
scala: "2.13"
java: "8"
python: "3.8"
sqlalchemy: "1.3"
- airflow: "2.2"
spark: "3.2.2"
hadoop: "3.3.1"
scala: "2.13"
java: "8"
python: "3.8"
sqlalchemy: "1.4"
- airflow: "2.2"
spark: "3.2.2"
hadoop: "3.3.1"
scala: "2.13"
java: "8"
python: "3.9"
sqlalchemy: "1.3"
- airflow: "2.2"
spark: "3.2.2"
hadoop: "3.3.1"
scala: "2.13"
java: "8"
python: "3.9"
sqlalchemy: "1.4"
- airflow: "2.2"
spark: "3.2.2"
hadoop: "3.3.1"
scala: "2.13"
java: "11"
python: "3.7"
sqlalchemy: "1.3"
- airflow: "2.2"
spark: "3.2.2"
hadoop: "3.3.1"
scala: "2.13"
java: "11"
python: "3.7"
sqlalchemy: "1.4"
- airflow: "2.2"
spark: "3.2.2"
hadoop: "3.3.1"
scala: "2.13"
java: "11"
python: "3.8"
sqlalchemy: "1.3"
- airflow: "2.2"
spark: "3.2.2"
hadoop: "3.3.1"
scala: "2.13"
java: "11"
python: "3.8"
sqlalchemy: "1.4"
- airflow: "2.2"
spark: "3.2.2"
hadoop: "3.3.1"
scala: "2.13"
java: "11"
python: "3.9"
sqlalchemy: "1.3"
- airflow: "2.2"
spark: "3.2.2"
hadoop: "3.3.1"
scala: "2.13"
java: "11"
python: "3.9"
sqlalchemy: "1.4"
- airflow: "2.2"
spark: "3.3.0"
hadoop: "3.3.2"
scala: "2.12"
java: "8"
python: "3.7"
sqlalchemy: "1.3"
- airflow: "2.2"
spark: "3.3.0"
hadoop: "3.3.2"
scala: "2.12"
java: "8"
python: "3.7"
sqlalchemy: "1.4"
- airflow: "2.2"
spark: "3.3.0"
hadoop: "3.3.2"
scala: "2.12"
java: "8"
python: "3.8"
sqlalchemy: "1.3"
- airflow: "2.2"
spark: "3.3.0"
hadoop: "3.3.2"
scala: "2.12"
java: "8"
python: "3.8"
sqlalchemy: "1.4"
- airflow: "2.2"
spark: "3.3.0"
hadoop: "3.3.2"
scala: "2.12"
java: "8"
python: "3.9"
sqlalchemy: "1.3"
- airflow: "2.2"
spark: "3.3.0"
hadoop: "3.3.2"
scala: "2.12"
java: "8"
python: "3.9"
sqlalchemy: "1.4"
- airflow: "2.2"
spark: "3.3.0"
hadoop: "3.3.2"
scala: "2.12"
java: "11"
python: "3.7"
sqlalchemy: "1.3"
- airflow: "2.2"
spark: "3.3.0"
hadoop: "3.3.2"
scala: "2.12"
java: "11"
python: "3.7"
sqlalchemy: "1.4"
- airflow: "2.2"
spark: "3.3.0"
hadoop: "3.3.2"
scala: "2.12"
java: "11"
python: "3.8"
sqlalchemy: "1.3"
- airflow: "2.2"
spark: "3.3.0"
hadoop: "3.3.2"
scala: "2.12"
java: "11"
python: "3.8"
sqlalchemy: "1.4"
- airflow: "2.2"
spark: "3.3.0"
hadoop: "3.3.2"
scala: "2.12"
java: "11"
python: "3.9"
sqlalchemy: "1.3"
- airflow: "2.2"
spark: "3.3.0"
hadoop: "3.3.2"
scala: "2.12"
java: "11"
python: "3.9"
sqlalchemy: "1.4"
- airflow: "2.2"
spark: "3.3.0"
hadoop: "3.3.2"
scala: "2.13"
java: "8"
python: "3.7"
sqlalchemy: "1.3"
- airflow: "2.2"
spark: "3.3.0"
hadoop: "3.3.2"
scala: "2.13"
java: "8"
python: "3.7"
sqlalchemy: "1.4"
- airflow: "2.2"
spark: "3.3.0"
hadoop: "3.3.2"
scala: "2.13"
java: "8"
python: "3.8"
sqlalchemy: "1.3"
- airflow: "2.2"
spark: "3.3.0"
hadoop: "3.3.2"
scala: "2.13"
java: "8"
python: "3.8"
sqlalchemy: "1.4"
- airflow: "2.2"
spark: "3.3.0"
hadoop: "3.3.2"
scala: "2.13"
java: "8"
python: "3.9"
sqlalchemy: "1.3"
- airflow: "2.2"
spark: "3.3.0"
hadoop: "3.3.2"
scala: "2.13"
java: "8"
python: "3.9"
sqlalchemy: "1.4"
- airflow: "2.2"
spark: "3.3.0"
hadoop: "3.3.2"
scala: "2.13"
java: "11"
python: "3.7"
sqlalchemy: "1.3"
- airflow: "2.2"
spark: "3.3.0"
hadoop: "3.3.2"
scala: "2.13"
java: "11"
python: "3.7"
sqlalchemy: "1.4"
- airflow: "2.2"
spark: "3.3.0"
hadoop: "3.3.2"
scala: "2.13"
java: "11"
python: "3.8"
sqlalchemy: "1.3"
- airflow: "2.2"
spark: "3.3.0"
hadoop: "3.3.2"
scala: "2.13"
java: "11"
python: "3.8"
sqlalchemy: "1.4"
- airflow: "2.2"
spark: "3.3.0"
hadoop: "3.3.2"
scala: "2.13"
java: "11"
python: "3.9"
sqlalchemy: "1.3"
- airflow: "2.2"
spark: "3.3.0"
hadoop: "3.3.2"
scala: "2.13"
java: "11"
python: "3.9"
sqlalchemy: "1.4"
- airflow: "2.3"
spark: "3.1.3"
hadoop: "3.2.0"
scala: "2.12"
java: "8"
python: "3.7"
sqlalchemy: "1.4"
- airflow: "2.3"
spark: "3.1.3"
hadoop: "3.2.0"
scala: "2.12"
java: "8"
python: "3.8"
sqlalchemy: "1.4"
- airflow: "2.3"
spark: "3.1.3"
hadoop: "3.2.0"
scala: "2.12"
java: "8"
python: "3.9"
sqlalchemy: "1.4"
- airflow: "2.3"
spark: "3.1.3"
hadoop: "3.2.0"
scala: "2.12"
java: "11"
python: "3.7"
sqlalchemy: "1.4"
- airflow: "2.3"
spark: "3.1.3"
hadoop: "3.2.0"
scala: "2.12"
java: "11"
python: "3.8"
sqlalchemy: "1.4"
- airflow: "2.3"
spark: "3.1.3"
hadoop: "3.2.0"
scala: "2.12"
java: "11"
python: "3.9"
sqlalchemy: "1.4"
- airflow: "2.3"
spark: "3.2.2"
hadoop: "3.3.1"
scala: "2.12"
java: "8"
python: "3.7"
sqlalchemy: "1.4"
- airflow: "2.3"
spark: "3.2.2"
hadoop: "3.3.1"
scala: "2.12"
java: "8"
python: "3.8"
sqlalchemy: "1.4"
- airflow: "2.3"
spark: "3.2.2"
hadoop: "3.3.1"
scala: "2.12"
java: "8"
python: "3.9"
sqlalchemy: "1.4"
- airflow: "2.3"
spark: "3.2.2"
hadoop: "3.3.1"
scala: "2.12"
java: "11"
python: "3.7"
sqlalchemy: "1.4"
- airflow: "2.3"
spark: "3.2.2"
hadoop: "3.3.1"
scala: "2.12"
java: "11"
python: "3.8"
sqlalchemy: "1.4"
- airflow: "2.3"
spark: "3.2.2"
hadoop: "3.3.1"
scala: "2.12"
java: "11"
python: "3.9"
sqlalchemy: "1.4"
- airflow: "2.3"
spark: "3.2.2"
hadoop: "3.3.1"
scala: "2.13"
java: "8"
python: "3.7"
sqlalchemy: "1.4"
- airflow: "2.3"
spark: "3.2.2"
hadoop: "3.3.1"
scala: "2.13"
java: "8"
python: "3.8"
sqlalchemy: "1.4"
- airflow: "2.3"
spark: "3.2.2"
hadoop: "3.3.1"
scala: "2.13"
java: "8"
python: "3.9"
sqlalchemy: "1.4"
- airflow: "2.3"
spark: "3.2.2"
hadoop: "3.3.1"
scala: "2.13"
java: "11"
python: "3.7"
sqlalchemy: "1.4"
- airflow: "2.3"
spark: "3.2.2"
hadoop: "3.3.1"
scala: "2.13"
java: "11"
python: "3.8"
sqlalchemy: "1.4"
- airflow: "2.3"
spark: "3.2.2"
hadoop: "3.3.1"
scala: "2.13"
java: "11"
python: "3.9"
sqlalchemy: "1.4"
- airflow: "2.3"
spark: "3.3.0"
hadoop: "3.3.2"
scala: "2.12"
java: "8"
python: "3.7"
sqlalchemy: "1.4"
- airflow: "2.3"
spark: "3.3.0"
hadoop: "3.3.2"
scala: "2.12"
java: "8"
python: "3.8"
sqlalchemy: "1.4"
- airflow: "2.3"
spark: "3.3.0"
hadoop: "3.3.2"
scala: "2.12"
java: "8"
python: "3.9"
sqlalchemy: "1.4"
- airflow: "2.3"
spark: "3.3.0"
hadoop: "3.3.2"
scala: "2.12"
java: "11"
python: "3.7"
sqlalchemy: "1.4"
- airflow: "2.3"
spark: "3.3.0"
hadoop: "3.3.2"
scala: "2.12"
java: "11"
python: "3.8"
sqlalchemy: "1.4"
- airflow: "2.3"
spark: "3.3.0"
hadoop: "3.3.2"
scala: "2.12"
java: "11"
python: "3.9"
sqlalchemy: "1.4"
- airflow: "2.3"
spark: "3.3.0"
hadoop: "3.3.2"
scala: "2.13"
java: "8"
python: "3.7"
sqlalchemy: "1.4"
- airflow: "2.3"
spark: "3.3.0"
hadoop: "3.3.2"
scala: "2.13"
java: "8"
python: "3.8"
sqlalchemy: "1.4"
- airflow: "2.3"
spark: "3.3.0"
hadoop: "3.3.2"
scala: "2.13"
java: "8"
python: "3.9"
sqlalchemy: "1.4"
- airflow: "2.3"
spark: "3.3.0"
hadoop: "3.3.2"
scala: "2.13"
java: "11"
python: "3.7"
sqlalchemy: "1.4"
- airflow: "2.3"
spark: "3.3.0"
hadoop: "3.3.2"
scala: "2.13"
java: "11"
python: "3.8"
sqlalchemy: "1.4"
- airflow: "2.3"
spark: "3.3.0"
hadoop: "3.3.2"
scala: "2.13"
java: "11"
python: "3.9"
sqlalchemy: "1.4"
- airflow: "2.3"
spark: "3.4.1"
hadoop: "3.3.4"
scala: "2.12"
java: "8"
python: "3.8"
sqlalchemy: "1.4"
- airflow: "2.3"
spark: "3.4.1"
hadoop: "3.3.4"
scala: "2.12"
java: "8"
python: "3.9"
sqlalchemy: "1.4"
- airflow: "2.3"
spark: "3.4.1"
hadoop: "3.3.4"
scala: "2.12"
java: "11"
python: "3.8"
sqlalchemy: "1.4"
- airflow: "2.3"
spark: "3.4.1"
hadoop: "3.3.4"
scala: "2.12"
java: "11"
python: "3.9"
sqlalchemy: "1.4"
- airflow: "2.3"
spark: "3.4.1"
hadoop: "3.3.4"
scala: "2.13"
java: "8"
python: "3.8"
sqlalchemy: "1.4"
- airflow: "2.3"
spark: "3.4.1"
hadoop: "3.3.4"
scala: "2.13"
java: "8"
python: "3.9"
sqlalchemy: "1.4"
- airflow: "2.3"
spark: "3.4.1"
hadoop: "3.3.4"
scala: "2.13"
java: "11"
python: "3.8"
sqlalchemy: "1.4"
- airflow: "2.3"
spark: "3.4.1"
hadoop: "3.3.4"
scala: "2.13"
java: "11"
python: "3.9"
sqlalchemy: "1.4"
runs-on: ubuntu-latest
env:
IMAGE_NAME: airflow-pipeline
SELF_VERSION: "v8"
BASE_VERSION: "v5"
AIRFLOW_VERSION: "${{ matrix.version.airflow }}"
SPARK_VERSION: "${{ matrix.version.spark }}"
HADOOP_VERSION: "${{ matrix.version.hadoop }}"
SCALA_VERSION: "${{ matrix.version.scala }}"
JAVA_VERSION: "${{ matrix.version.java }}"
PYTHON_VERSION: "${{ matrix.version.python }}"
SQLALCHEMY_VERSION: "${{ matrix.version.sqlalchemy }}"
steps:
- name: Set global environment variables
run: |-
echo "TAG_NAME=${SELF_VERSION}_${AIRFLOW_VERSION}_spark-${SPARK_VERSION}_hadoop-${HADOOP_VERSION}_scala-${SCALA_VERSION}_java-${JAVA_VERSION}_python-${PYTHON_VERSION}_sqlalchemy-${SQLALCHEMY_VERSION}" >> $GITHUB_ENV
- name: Checkout code
uses: actions/checkout@v2
- name: Install tera-cli
run: |-
wget https://github.com/guangie88/tera-cli/releases/download/v0.4.0/tera_linux_amd64 -O /tmp/tera
chmod +x /tmp/tera
- name: Check differences between ci.yml and ci.yml.tmpl
run: |-
cp .github/workflows/ci.yml .github/workflows/ci.yml.backup
TERA=/tmp/tera ./templates/apply-vars.sh
if ! diff .github/workflows/ci.yml .github/workflows/ci.yml.backup; then echo "ci.yml.tmpl and ci.yml differs!" && exit 1; fi
- name: Build Docker image
run: |-
docker build . -t "${IMAGE_NAME}:${TAG_NAME}" \
--build-arg "BASE_VERSION=${BASE_VERSION}" \
--build-arg "AIRFLOW_VERSION=${AIRFLOW_VERSION}" \
--build-arg "SPARK_VERSION=${SPARK_VERSION}" \
--build-arg "SCALA_VERSION=${SCALA_VERSION}" \
--build-arg "JAVA_VERSION=${JAVA_VERSION}" \
--build-arg "HADOOP_VERSION=${HADOOP_VERSION}" \
--build-arg "PYTHON_VERSION=${PYTHON_VERSION}" \
--build-arg "SQLALCHEMY_VERSION=${SQLALCHEMY_VERSION}"
- name: Push Docker image
run: bash push-images.sh
env:
DOCKER_USERNAME: ${{ secrets.DOCKER_USERNAME }}
DOCKER_PASSWORD: ${{ secrets.DOCKER_PASSWORD }}
IMAGE_ORG: ${{ secrets.IMAGE_ORG }}
if: github.event_name == 'push'