-
Notifications
You must be signed in to change notification settings - Fork 1
113 lines (94 loc) · 3.71 KB
/
upload-and-deploy-to-prod-main.yaml
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
name: upload-and-deploy-to-prod-main
on:
push:
tags:
- "v[0-9]+.[0-9]+.[0-9]+"
env:
NAMESPACE: main
PYTHON_VERSION: 3.9
jobs:
pre-commit:
name: Run pre-commit hooks against all files
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- uses: actions/setup-python@v4
- uses: pre-commit/[email protected]
upload-files:
name: Upload files to S3 bucket in prod/main
runs-on: ubuntu-latest
needs: pre-commit
# These permissions are needed to interact with GitHub's OIDC Token endpoint.
permissions:
id-token: write
contents: read
environment: prod
steps:
- name: Setup code, pipenv, aws
uses: Sage-Bionetworks/action-pipenv-aws-setup@v3
with:
role_to_assume: ${{ vars.AWS_CREDENTIALS_IAM_ROLE }}
role_session_name: GitHubActions-${{ github.repository_owner }}-${{ github.event.repository.name }}-${{ github.run_id }}
python_version: ${{ env.PYTHON_VERSION }}
- name: Copy files to templates bucket
run: python src/scripts/manage_artifacts/artifacts.py --upload --namespace $NAMESPACE --cfn_bucket ${{ vars.CFN_BUCKET }}
sceptre-deploy-main:
name: Deploys to prod/main using sceptre
runs-on: ubuntu-latest
needs: [pre-commit, upload-files]
environment: prod
# These permissions are needed to interact with GitHub's OIDC Token endpoint.
permissions:
id-token: write
contents: read
steps:
- name: Setup code, pipenv, aws
uses: Sage-Bionetworks/action-pipenv-aws-setup@v3
with:
role_to_assume: ${{ vars.AWS_CREDENTIALS_IAM_ROLE }}
role_session_name: GitHubActions-${{ github.repository_owner }}-${{ github.event.repository.name }}-${{ github.run_id }}
python_version: ${{ env.PYTHON_VERSION }}
- name: Create directory for remote sceptre templates
run: mkdir -p templates/remote/
- name: Deploy sceptre stacks to prod/main
run: pipenv run sceptre --var "namespace=${{ env.NAMESPACE }}" launch prod --yes
- name: Configure S3 to Glue lambda with S3 trigger
uses: gagoar/invoke-aws-lambda@v3
with:
AWS_ACCESS_KEY_ID: ${{ env.AWS_ACCESS_KEY_ID }}
AWS_SECRET_ACCESS_KEY: ${{ env.AWS_SECRET_ACCESS_KEY }}
AWS_SESSION_TOKEN: ${{ env.AWS_SESSION_TOKEN }}
REGION: ${{ env.AWS_REGION }}
FunctionName: ${{ env.NAMESPACE }}-S3EventConfig
Payload: '{"RequestType": "Create"}'
LogType: Tail
sts-access-test:
name: Runs STS access tests on prod synapse folders
runs-on: ubuntu-latest
needs: sceptre-deploy-main
environment: prod
# These permissions are needed to interact with GitHub's OIDC Token endpoint.
permissions:
id-token: write
contents: read
steps:
- name: Setup code, pipenv, aws
uses: Sage-Bionetworks/action-pipenv-aws-setup@v3
with:
role_to_assume: ${{ vars.AWS_CREDENTIALS_IAM_ROLE }}
role_session_name: ${{ github.event.repository.name }}-${{ github.run_id }}-nonglue-unit-tests
python_version: ${{ env.PYTHON_VERSION }}
- name: Test prod synapse folders for STS access with pytest
run: >
pipenv run python -m pytest tests/test_setup_external_storage.py
--test-bucket recover-input-data
--test-synapse-folder-id syn51714264
--namespace $NAMESPACE
--test-sts-permission read_only
-v
pipenv run python -m pytest tests/test_setup_external_storage.py
--test-bucket recover-processed-data
--test-synapse-folder-id syn51406699
--namespace $NAMESPACE/parquet
--test-sts-permission read_write
-v