Skip to content

Commit

Permalink
Merge pull request #1796 from edx/hassan/python-upgrades
Browse files Browse the repository at this point in the history
chore: update all jobs to use python 3.9 or greater.
  • Loading branch information
HassanJaveed84 authored Dec 4, 2024
2 parents 8878e62 + f651500 commit aeb675a
Show file tree
Hide file tree
Showing 11 changed files with 32 additions and 32 deletions.
8 changes: 4 additions & 4 deletions dataeng/resources/dbt-docs.sh
Original file line number Diff line number Diff line change
@@ -1,10 +1,10 @@
#!/usr/bin/env bash
set -ex

# Creating python 3.8 virtual environment to run dbt warehouse-transform job
PYTHON38_VENV="py38_venv"
virtualenv --python=python3.8 --clear "${PYTHON38_VENV}"
source "${PYTHON38_VENV}/bin/activate"
# Creating python 3.11 virtual environment to run dbt warehouse-transform job
PYTHON311_VENV="py311_venv"
virtualenv --python=python3.11 --clear "${PYTHON311_VENV}"
source "${PYTHON311_VENV}/bin/activate"

# Setup
cd $WORKSPACE/warehouse-transforms
Expand Down
8 changes: 4 additions & 4 deletions dataeng/resources/dbt-run.sh
Original file line number Diff line number Diff line change
@@ -1,10 +1,10 @@
#!/usr/bin/env bash
set -ex

# Creating python 3.8 virtual environment to run dbt warehouse-transform job
PYTHON38_VENV="py38_venv"
virtualenv --python=python3.8 --clear "${PYTHON38_VENV}"
source "${PYTHON38_VENV}/bin/activate"
# Creating python 3.11 virtual environment to run dbt warehouse-transform job
PYTHON311_VENV="py311_venv"
virtualenv --python=python3.11 --clear "${PYTHON311_VENV}"
source "${PYTHON311_VENV}/bin/activate"

# Setup to run dbt commands
cd $WORKSPACE/warehouse-transforms
Expand Down
8 changes: 4 additions & 4 deletions dataeng/resources/dbtsource-freshness.sh
Original file line number Diff line number Diff line change
@@ -1,10 +1,10 @@
#!/usr/bin/env bash
set -ex

# Creating python 3.8 virtual environment to run dbt warehouse-transform job
PYTHON38_VENV="py38_venv"
virtualenv --python=python3.8 --clear "${PYTHON38_VENV}"
source "${PYTHON38_VENV}/bin/activate"
# Creating python 3.11 virtual environment to run dbt warehouse-transform job
PYTHON311_VENV="py311_venv"
virtualenv --python=python3.11 --clear "${PYTHON311_VENV}"
source "${PYTHON311_VENV}/bin/activate"

# Setup
cd $WORKSPACE/warehouse-transforms
Expand Down
4 changes: 2 additions & 2 deletions dataeng/resources/prefect-flows-deployment.sh
Original file line number Diff line number Diff line change
@@ -1,9 +1,9 @@
#!/usr/bin/env bash
set -ex

# Creating python3.8 virtual env
# Creating python3.9 virtual env
PYTHON_VENV="python_venv"
virtualenv --python=python3.8 --clear "${PYTHON_VENV}"
virtualenv --python=python3.9 --clear "${PYTHON_VENV}"
source "${PYTHON_VENV}/bin/activate"

# Removing prefix 'prefect-flows-deployment-'
Expand Down
2 changes: 1 addition & 1 deletion dataeng/resources/retirement-partner-report-cleanup.sh
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@ set -ex
# setting on the jenkins worker, it would be safest to keep the builds from
# clobbering each other's virtualenvs.
VENV="venv-${BUILD_NUMBER}"
virtualenv --python=python3.8 --clear "${VENV}"
virtualenv --python=python3.9 --clear "${VENV}"
source "${VENV}/bin/activate"

#Fetch secrets from AWS
Expand Down
2 changes: 1 addition & 1 deletion dataeng/resources/retirement-partner-reporter.sh
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@ env
# setting on the jenkins worker, it would be safest to keep the builds from
# clobbering each other's virtualenvs.
VENV="venv-${BUILD_NUMBER}"
virtualenv --python=python3.8 --clear "${VENV}"
virtualenv --python=python3.9 --clear "${VENV}"
source "${VENV}/bin/activate"

# Make sure that when we try to write unicode to the console, it
Expand Down
2 changes: 1 addition & 1 deletion dataeng/resources/setup-exporter-email-optin.sh
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@ mkdir -p /var/lib/jenkins/tmp/analytics-exporter/course-data

# Create and activate a virtualenv in shell script
EXPORTER_VENV="exporter_venv"
virtualenv --python=python3.8 --clear "${EXPORTER_VENV}"
virtualenv --python=python3.9 --clear "${EXPORTER_VENV}"
source "${EXPORTER_VENV}/bin/activate"

# Install requirements into this (exporter) virtual environment
Expand Down
2 changes: 1 addition & 1 deletion dataeng/resources/setup-exporter.sh
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@ mkdir -p /var/lib/jenkins/tmp/analytics-exporter/course-data

# Create and activate a virtualenv in shell script
EXPORTER_VENV="exporter_venv"
virtualenv --python=python3.8 --clear "${EXPORTER_VENV}"
virtualenv --python=python3.9 --clear "${EXPORTER_VENV}"
source "${EXPORTER_VENV}/bin/activate"

# Install requirements into this (exporter) virtual environment
Expand Down
2 changes: 1 addition & 1 deletion dataeng/resources/user-retirement-bulk-status.sh
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@ env
# setting on the jenkins worker, it would be safest to keep the builds from
# clobbering each other's virtualenvs.
VENV="venv-${BUILD_NUMBER}"
virtualenv --python=python3.8 --clear "${VENV}"
virtualenv --python=python3.9 --clear "${VENV}"
source "${VENV}/bin/activate"

# Make sure that when we try to write unicode to the console, it
Expand Down
8 changes: 4 additions & 4 deletions dataeng/resources/warehouse-transforms-ci-manual.sh
Original file line number Diff line number Diff line change
@@ -1,10 +1,10 @@
#!/usr/bin/env bash
set -ex

# Creating python 3.8 virtual environment to run dbt warehouse-transform job
PYTHON38_VENV="py38_venv"
virtualenv --python=python3.8 --clear "${PYTHON38_VENV}"
source "${PYTHON38_VENV}/bin/activate"
# Creating python 3.11 virtual environment to run dbt warehouse-transform job
PYTHON311_VENV="py311_venv"
virtualenv --python=python3.11 --clear "${PYTHON311_VENV}"
source "${PYTHON311_VENV}/bin/activate"

# Specifying GITHUB_PR_ID and WAREHOUSE_TRANSFORMS_BRANCH is a must
if [[ "$GITHUB_PR_ID" == "" || "$WAREHOUSE_TRANSFORMS_BRANCH" == "" ]]
Expand Down
18 changes: 9 additions & 9 deletions dataeng/resources/warehouse-transforms-ci-master-merges.sh
Original file line number Diff line number Diff line change
@@ -1,10 +1,10 @@
#!/usr/bin/env bash
set -ex

# Creating python 3.8 virtual environment to run dbt warehouse-transform job
PYTHON38_VENV="py38_venv"
virtualenv --python=python3.8 --clear "${PYTHON38_VENV}"
source "${PYTHON38_VENV}/bin/activate"
# Creating python 3.11 virtual environment to run dbt warehouse-transform job
PYTHON311_VENV="py311_venv"
virtualenv --python=python3.11 --clear "${PYTHON311_VENV}"
source "${PYTHON311_VENV}/bin/activate"

# Setup to run python script to create snowflake schema
cd $WORKSPACE/analytics-tools/snowflake
Expand Down Expand Up @@ -44,24 +44,24 @@ then
# Schema_Name will be the Github Commit ID e.g. 1724 prefixed with 'merged' and sufixed with project name e.g. 1724_reporting
export CI_SCHEMA_NAME=merged_${COMMIT_ID}_reporting
# Schema is dynamically created against each run.
# profiles.yml contains the name of Schema which is used to create output models when dbt runs.
python create_ci_schema.py --key_path $KEY_PATH --passphrase_path $PASSPHRASE_PATH --automation_user $USER --account $ACCOUNT --db_name $DB_NAME --schema_name $CI_SCHEMA_NAME
# profiles.yml contains the name of Schema which is used to create output models when dbt runs.
python create_ci_schema.py --key_path $KEY_PATH --passphrase_path $PASSPHRASE_PATH --automation_user $USER --account $ACCOUNT --db_name $DB_NAME --schema_name $CI_SCHEMA_NAME
# create_ci_schema python script not just create schema but also drops the schema if it exists already, and the reason for doing so is if dbt model changes tables that are
# created in seed job it will fail, so dropping those tables or deleting the whole schema is important to avoid such failure. We noticed while create_ci_schema being running
# the dbt commands below starts running as they were using different sessions (warehouse and users), in order to complete the drop and create operation before running dbt adding sleep
sleep 10s
DBT_PROJECT_PATH='reporting'
# Full dbt run on merges to master CI (Might decide to run Slim CI in future)
DBT_RUN_OPTIONS=''
DBT_RUN_OPTIONS=''
DBT_RUN_EXCLUDE='' ## Add excluded models here if any
# Full dbt test on merges to master CI (Might decide to run Slim CI in future)
DBT_TEST_OPTIONS=''
DBT_TEST_EXCLUDE=''
DBT_TEST_EXCLUDE=''

source $WORKSPACE/jenkins-job-dsl/dataeng/resources/warehouse-transforms-ci-dbt.sh

cd $WORKSPACE/analytics-tools/snowflake
python remove_ci_schema.py --key_path $KEY_PATH --passphrase_path $PASSPHRASE_PATH --automation_user $USER --account $ACCOUNT --db_name $DB_NAME --schema_name $CI_SCHEMA_NAME
python remove_ci_schema.py --key_path $KEY_PATH --passphrase_path $PASSPHRASE_PATH --automation_user $USER --account $ACCOUNT --db_name $DB_NAME --schema_name $CI_SCHEMA_NAME

fi

Expand Down

0 comments on commit aeb675a

Please sign in to comment.