diff --git a/.github/dependabot.yml b/.github/dependabot.yml index a107609a1..9c253b318 100644 --- a/.github/dependabot.yml +++ b/.github/dependabot.yml @@ -10,4 +10,4 @@ updates: interval: "weekly" labels: - "dependencies" - - "skip changelog" + - "skip changelog" \ No newline at end of file diff --git a/CHANGELOG.md b/CHANGELOG.md index 552d856ee..fc9f00a06 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -2,6 +2,7 @@ ### Added - Rust Quickstarter with Axum web framework simple boilerplate ([#980](https://github.com/opendevstack/ods-quickstarters/issues/980)) +- Added ETL pipeline testing QS (e2e-python) ([#985](https://github.com/opendevstack/ods-quickstarters/pull/985)) - Update gateway-Nginx quickstarter ([#983](https://github.com/opendevstack/ods-quickstarters/pull/983)) - Added secret scanning in docker plain ([#963](https://github.com/opendevstack/ods-quickstarters/pull/963)) - Added Nodejs20 agent ([#962](https://github.com/opendevstack/ods-quickstarters/issues/962)) diff --git a/docs/modules/quickstarters/nav.adoc b/docs/modules/quickstarters/nav.adoc index e8a0583a7..600b9ba4e 100644 --- a/docs/modules/quickstarters/nav.adoc +++ b/docs/modules/quickstarters/nav.adoc @@ -13,6 +13,7 @@ ** xref:quickstarters:ds-rshiny.adoc[Data Science RShiny app] ** xref:quickstarters:ds-streamlit.adoc[Data Science Streamlit app] ** xref:quickstarters:e2e-cypress.adoc[Cypress E2E testing] +** xref:quickstarters:e2e-etl-python.adoc[ETL Python E2E testing] ** xref:quickstarters:e2e-spock-geb.adoc[Spock, Geb and Unirest E2E testing] ** xref:quickstarters:inf-terraform-aws.adoc[INF Terraform AWS] ** xref:quickstarters:inf-terraform-azure.adoc[INF Terraform AZURE] diff --git a/docs/modules/quickstarters/pages/e2e-etl-python.adoc b/docs/modules/quickstarters/pages/e2e-etl-python.adoc new file mode 100644 index 000000000..3902da385 --- /dev/null +++ b/docs/modules/quickstarters/pages/e2e-etl-python.adoc @@ -0,0 +1,46 @@ += End-to-end tests with Great Expectations and Pytest (e2e-etl-python) + +End-to-end tests for ETLs quickstarter project + +== Purpose of this quickstarter + +This is a python based quicktarter intended to develop end-to-end tests for data pipelines. +In order to do that it uses two testing technologies: + 1. Great Expectations, meant for data transformation testing data within relational tables. + e.g.: You could test the schema of a database, the number of rows, that a specific column has no null values, etc + 2. Pytest together with Boto it allows for testing etl triggers, notification system, content of S3 buckets, etc + +== What files / architecture is generated? + +---- +├── Jenkinsfile - This file contains Jenkins stages. +├── README.md +├── environments +│ ├── dev.json - This file describes parameters for the development AWS environment. +│ ├── test.json - This file describes parameters for the test AWS environment. +│ └── prod.json - This file describes parameters for the production AWS environment. +├── tests - This folder contains the root for test-kitchen +│ ├── acceptance/great_expectations - This folder contains the Great Expecations tests to test +│ └── acceptance/pytest - This folder contains the pytest tests to test + + +---- + +== Frameworks used + +* https://greatexpectations.io[Great-expectations] +* https://pytest.org[Pytest] + + +== Usage - how do you start after you provisioned this quickstarter + +Check the README.md file at root level for further instructions after the quickstarter has been provisioned. + + +== Builder agent used + +This quickstarter uses https://github.com/opendevstack/ods-quickstarters/tree/master/common/jenkins-agents/terraform[terraform] Jenkins agent. + +== Known limitations + +Let us know if you find any, thanks! diff --git a/docs/modules/quickstarters/pages/index.adoc b/docs/modules/quickstarters/pages/index.adoc index 0a447eaa9..284808a55 100644 --- a/docs/modules/quickstarters/pages/index.adoc +++ b/docs/modules/quickstarters/pages/index.adoc @@ -42,6 +42,7 @@ Quickstarters are used from the https://github.com/opendevstack/ods-provisioning === E2E Test Quickstarter * xref::e2e-cypress.adoc[E2E test - Cypress] * xref::e2e-spock-geb.adoc[E2E test - Spock / Geb] +* xref::e2e-etl-python.adoc[E2E test - ETL Python] === Infrastructure Terraform Quickstarter * xref::inf-terraform-aws.adoc[AWS deployments utilizing terraform tooling] diff --git a/e2e-etl-python/Jenkinsfile b/e2e-etl-python/Jenkinsfile new file mode 100644 index 000000000..39b9f3744 --- /dev/null +++ b/e2e-etl-python/Jenkinsfile @@ -0,0 +1,48 @@ +def odsNamespace = '' +def odsGitRef = '' +def odsImageTag = '' +def sharedLibraryRef = '' +def agentImageTag = '' + +node { + odsNamespace = env.ODS_NAMESPACE ?: 'ods' + odsGitRef = env.ODS_GIT_REF ?: 'master' + odsImageTag = env.ODS_IMAGE_TAG ?: 'latest' + sharedLibraryRef = env.SHARED_LIBRARY_REF ?: odsImageTag + agentImageTag = env.AGENT_IMAGE_TAG ?: odsImageTag +} + +library("ods-jenkins-shared-library@${sharedLibraryRef}") + +odsQuickstarterPipeline( + imageStreamTag: "${odsNamespace}/jenkins-agent-base:${agentImageTag}", +) { context -> + + odsQuickstarterStageCopyFiles(context) + + odsQuickstarterStageRenderJenkinsfile(context) + + odsQuickstarterStageRenderJenkinsfile( + context, + [source: 'dev.yml.template', + target: 'environments/dev.yml'] + ) + + odsQuickstarterStageRenderJenkinsfile( + context, + [source: 'test.yml.template', + target: 'environments/test.yml'] + ) + + odsQuickstarterStageRenderJenkinsfile( + context, + [source: 'prod.yml.template', + target: 'environments/prod.yml'] + ) + + odsQuickstarterStageRenderJenkinsfile( + context, + [source: 'testing.yml.template', + target: 'environments/testing.yml'] + ) +} \ No newline at end of file diff --git a/e2e-etl-python/Jenkinsfile.template b/e2e-etl-python/Jenkinsfile.template new file mode 100644 index 000000000..88cef29b9 --- /dev/null +++ b/e2e-etl-python/Jenkinsfile.template @@ -0,0 +1,183 @@ +/* generated jenkins file used for building and deploying AWS-infrastructure in projects */ + +@Library('ods-jenkins-shared-library@@shared_library_ref@') _ + +node { + aws_region = env.AWS_REGION ?: 'eu-west-1' + dockerRegistry = env.DOCKER_REGISTRY +} + +odsComponentPipeline( + podContainers: [ + containerTemplate( + name: 'jnlp', + image: "${dockerRegistry}/ods/jenkins-agent-terraform-2306:@shared_library_ref@", + envVars: [ + envVar(key: 'AWS_REGION', value: aws_region) + ], + alwaysPullImage: true, + args: '${computer.jnlpmac} ${computer.name}' + ) + ], + branchToEnvironmentMapping: [ + '*': 'dev', + // 'release/': 'test' + ] +) { context -> + getEnvironment(context) + addVars2envJsonFile(context) + odsComponentStageInfrastructure(context, [cloudProvider: 'AWS']) + + withEnv(["AWS_ACCESS_KEY_ID=${AWS_ACCESS_KEY_ID}", + "AWS_SECRET_ACCESS_KEY=${AWS_SECRET_ACCESS_KEY}" + ]) + { + stage ("AWS Testing Preparation"){ + generateTerraformOutputsFile() + } + + def outputNames = stageGetNamesFromOutputs() + def aws_pipelineName = outputNames.aws_codepipeline_name + def bitbuckets3_name = outputNames.bitbuckets3_name + def results3_name = outputNames.results3_name + + stage ("Publish Bitbucket Code To AWS"){ + publishBitbucketCodeToAWS(context, bitbuckets3_name) + } + + stage ("Run Tests"){ + awsCodePipelineTrigger(context, aws_pipelineName) + awsCodePipelineWaitForExecution(context, aws_pipelineName) + } + + stage ("Test Results"){ + retrieveReportsFromAWS(context, results3_name) + archiveArtifacts artifacts: "build/test-results/test/**", allowEmptyArchive: true + junit(testResults:'build/test-results/test/*.xml', allowEmptyResults: true) + stash(name: "acceptance-test-reports-junit-xml-${context.componentId}-${context.buildNumber}", includes: "build/test-results/test/acceptance*junit.xml", allowEmpty: true) + stash(name: "installation-test-reports-junit-xml-${context.componentId}-${context.buildNumber}", includes: "build/test-results/test/installation*junit.xml", allowEmpty: true) + stash(name: "integration-test-reports-junit-xml-${context.componentId}-${context.buildNumber}", includes: "build/test-results/test/integration*junit.xml", allowEmpty: true) + } + } + +} + +def getEnvironment(def context){ + sh "echo Get Environment Variables" + AWS_ACCESS_KEY_ID = sh(returnStdout: true, script:"oc get secret aws-access-key-id-${context.environment} --namespace ${context.cdProject} --output jsonpath='{.data.secrettext}' | base64 -d") + AWS_SECRET_ACCESS_KEY = sh(returnStdout: true, script:"oc get secret aws-secret-access-key-${context.environment} --namespace ${context.cdProject} --output jsonpath='{.data.secrettext}' | base64 -d") + +} + + +def generateTerraformOutputsFile() { + sh 'terraform output -json > terraform_outputs.json' + sh 'cat terraform_outputs.json' +} + +def stageGetNamesFromOutputs() { + def outputNames = [:] + def terraformOutputJson = readJSON file: 'terraform_outputs.json' + + outputNames.aws_codepipeline_name = terraformOutputJson.codepipeline_name.value + outputNames.bitbuckets3_name = terraformOutputJson.bitbucket_s3bucket_name.value + outputNames.results3_name = terraformOutputJson.e2e_results_bucket_name.value + + return outputNames +} + +def awsCodePipelineTrigger(def context, pipelineName) { + sh "aws codepipeline start-pipeline-execution --name ${pipelineName}" +} + + +def awsCodePipelineWaitForExecution(def context, pipelineName) { + def pipelineExecutionStatus = '' + + while (true) { + pipelineExecutionStatus = '' + sleep(time: 40, unit: 'SECONDS') + def pipelineState = sh( + script: "aws codepipeline get-pipeline-state --name ${pipelineName} --query 'stageStates[*]' --output json", + returnStdout: true + ).trim() + + def pipelineStages = readJSON(text: pipelineState) + + pipelineStages.each { stage -> + def stageName = stage.stageName + def stageStatus = stage.latestExecution.status + echo "Stage: ${stageName}, Status: ${stageStatus}" + + if (stageStatus == 'InProgress') { + pipelineExecutionStatus = 'InProgress' + return + } else if (stageStatus == 'Failed') { + pipelineExecutionStatus = 'Failed' + echo "Pipeline execution failed at stage ${stageName}" + error("Pipeline execution failed at stage ${stageName}") + return + } + } + + if (pipelineExecutionStatus == 'InProgress') { + continue + } else if (pipelineExecutionStatus == 'Failed') { + echo "Pipeline execution failed at stage ${stageName}" + break + } else { + echo 'Pipeline execution completed successfully.' + break + } + } +} + + + +def publishBitbucketCodeToAWS(def context, bitbuckets3_name) { + def branch = context.gitBranch + def repository = context.componentId + zip zipFile: "${repository}-${branch}.zip", archive: false, dir: '.' + sh " aws s3 cp ${repository}-${branch}.zip s3://${bitbuckets3_name}/${repository}-${branch}.zip" +} + +def retrieveReportsFromAWS(def context, results3_name) { + sh "aws s3 cp s3://${results3_name}/junit/acceptance_GX_junit.xml ./build/test-results/test/acceptance_GX_junit.xml" + sh "aws s3 cp s3://${results3_name}/junit/acceptance_pytest_junit.xml ./build/test-results/test/acceptance_pytest_junit.xml" + sh "aws s3 cp s3://${results3_name}/junit/installation_pytest_junit.xml ./build/test-results/test/installation_pytest_junit.xml" + sh "aws s3 cp s3://${results3_name}/junit/integration_pytest_junit.xml ./build/test-results/test/integration_pytest_junit.xml" + + sh "aws s3 cp s3://${results3_name}/GX_test_results ./build/test-results/test/artifacts/acceptance/acceptance_GX_report --recursive" + sh "aws s3 cp s3://${results3_name}/GX_jsons ./build/test-results/test/artifacts/acceptance/GX_jsons --recursive" + sh "aws s3 cp s3://${results3_name}/pytest_results/acceptance/acceptance_allure_report_complete.html ./build/test-results/test/artifacts/acceptance/acceptance_pytest_report.html" + sh "aws s3 cp s3://${results3_name}/pytest_results/installation/installation_allure_report_complete.html ./build/test-results/test/artifacts/installation/installation_pytest_report.html" + sh "aws s3 cp s3://${results3_name}/pytest_results/integration/integration_allure_report_complete.html ./build/test-results/test/artifacts/integration/integration_pytest_report.html" + + sh "ls build/test-results/test" +} + +def addVars2envJsonFile(def context) { + echo "Starting addVars2envJsonFile" + def environment = context.environment + def projectId = context.projectId + def branch_name = context.gitBranch + def repository = context.componentId + def filePath = "./environments/${environment}.json" + + def existingJson = readFile file: filePath + def existingData = readJSON text: existingJson + + existingData.environment = environment + existingData.projectId = projectId + existingData.aws_region = aws_region + existingData.repository = repository + existingData.branch_name = branch_name + + echo "Environment: ${existingData}" + + def updatedJson = groovy.json.JsonOutput.toJson(existingData) + writeFile file: filePath, text: updatedJson + + echo "Finishing addVars2envJsonFile" +} + diff --git a/e2e-etl-python/README.md b/e2e-etl-python/README.md new file mode 100644 index 000000000..0e8c848bb --- /dev/null +++ b/e2e-etl-python/README.md @@ -0,0 +1,5 @@ +# e2e-etl-python Quickstarter (e2e-etl-python) + +Documentation is located in our [official documentation](https://www.opendevstack.org/ods-documentation/opendevstack/latest/getting-started/index.html) + +Please update documentation in the [antora page directory](https://github.com/opendevstack/ods-quickstarters/tree/master/docs/modules/quickstarters/pages) diff --git a/e2e-etl-python/dev.yml.template b/e2e-etl-python/dev.yml.template new file mode 100644 index 000000000..9307c56ec --- /dev/null +++ b/e2e-etl-python/dev.yml.template @@ -0,0 +1,7 @@ +region: eu-west-1 + +credentials: + key: @project_id@-cd-aws-access-key-id-dev + secret: @project_id@-cd-aws-secret-access-key-dev + +account: "" diff --git a/e2e-etl-python/files/.editorconfig b/e2e-etl-python/files/.editorconfig new file mode 100644 index 000000000..147abfb08 --- /dev/null +++ b/e2e-etl-python/files/.editorconfig @@ -0,0 +1,19 @@ +# EditorConfig is awesome: http://EditorConfig.org + +# top-most EditorConfig file +root = true + +[*] +charset = utf-8 +end_of_line = lf +indent_size = 2 +indent_style = space +insert_final_newline = true +trim_trailing_whitespace = true + +[*.md] +trim_trailing_whitespace = false ; trimming trailing whitespace may break Markdown + +[Makefile] +tab_width = 2 +indent_style = tab diff --git a/e2e-etl-python/files/.gitignore b/e2e-etl-python/files/.gitignore new file mode 100644 index 000000000..df45f2c18 --- /dev/null +++ b/e2e-etl-python/files/.gitignore @@ -0,0 +1,20 @@ +.bundle +.kitchen +.terraform +.terraform.lock.hcl +.terraform-data.json +.vscode +.devcontainer/devcontainer.json +*.auto.tfvars* +inspec.lock +outputs.json +terraform.tfvars* +terraform.tfstate* +tfplan +vendor +test/integration/*/files/*.json +test/integration/*/files/*.yml +reports/install/* +!reports/install/.gitkeep +Pipfile.lock +.venv diff --git a/e2e-etl-python/files/.pre-commit-config.yaml b/e2e-etl-python/files/.pre-commit-config.yaml new file mode 100644 index 000000000..aee89823b --- /dev/null +++ b/e2e-etl-python/files/.pre-commit-config.yaml @@ -0,0 +1,83 @@ +exclude: '.terraform' +fail_fast: true + +repos: +- repo: https://github.com/pre-commit/pre-commit-hooks.git + rev: v4.4.0 + hooks: + - id: check-added-large-files + args: ['--maxkb=5000'] + - id: check-byte-order-marker + - id: check-case-conflict + - id: check-json + - id: check-merge-conflict + - id: check-symlinks + - id: check-yaml + args: [--allow-multiple-documents, --unsafe] + - id: detect-aws-credentials + args: [--allow-missing-credentials] + - id: detect-private-key + - id: mixed-line-ending + args: [--fix=lf] + - id: pretty-format-json + exclude: ^.devcontainer/.*$ + args: [--autofix, --indent=2, --no-ensure-ascii] + - id: trailing-whitespace + +- repo: https://github.com/psf/black.git + rev: 23.3.0 + hooks: + - id: black + args: [--line-length=90] + files: (\.py)$ + +- repo: https://github.com/PyCQA/flake8.git + rev: 6.0.0 + hooks: + - id: flake8 + args: [--max-line-length=90] + files: (\.py)$ + +- repo: https://github.com/awslabs/cfn-python-lint + rev: v0.77.5 + hooks: + - id: cfn-python-lint + files: cfn-templates/.*\.(json|yml|yaml)$ + +- repo: https://github.com/antonbabenko/pre-commit-terraform + rev: v1.79.1 + hooks: + - id: terraform_fmt + +- repo: https://github.com/nichtraunzer/ods-pre-commit-hooks + rev: v0.4.2 + hooks: + - id: terraformcreatei2o + files: (\.tf)$ + - id: terraformstackmoduleoutputs + files: (\.tf)$ + +- repo: https://github.com/antonbabenko/pre-commit-terraform + rev: v1.79.1 + hooks: + - id: terraform_fmt + - id: terraform_docs + +- repo: https://github.com/jumanjihouse/pre-commit-hooks + rev: 3.0.0 + hooks: + - id: rubocop + args: + - --fix-layout + +- repo: local + hooks: + - id: test + name: make test + description: Run integration tests. + entry: make test + language: system + files: (\.tf|\.rb)$ + pass_filenames: false + verbose: true + diff --git a/e2e-etl-python/files/.rubocop.yml b/e2e-etl-python/files/.rubocop.yml new file mode 100644 index 000000000..c09f7c5c1 --- /dev/null +++ b/e2e-etl-python/files/.rubocop.yml @@ -0,0 +1,4 @@ +--- +Layout/LineLength: + Max: 150 + diff --git a/e2e-etl-python/files/.ruby-version b/e2e-etl-python/files/.ruby-version new file mode 100644 index 000000000..be94e6f53 --- /dev/null +++ b/e2e-etl-python/files/.ruby-version @@ -0,0 +1 @@ +3.2.2 diff --git a/e2e-etl-python/files/.terraform-version b/e2e-etl-python/files/.terraform-version new file mode 100644 index 000000000..c514bd85c --- /dev/null +++ b/e2e-etl-python/files/.terraform-version @@ -0,0 +1 @@ +1.4.6 diff --git a/e2e-etl-python/files/Gemfile b/e2e-etl-python/files/Gemfile new file mode 100644 index 000000000..a809dff57 --- /dev/null +++ b/e2e-etl-python/files/Gemfile @@ -0,0 +1,19 @@ +source 'https://rubygems.org' do + gem 'activesupport' + gem 'irb' + gem 'kitchen-terraform', '~> 7.0' + gem 'test-kitchen' + gem 'rspec-retry' + gem 'aws-sdk', '~> 3' +end + +source 'https://packagecloud.io/cinc-project/stable' do + gem 'inspec-core' + gem 'inspec', '~> 5.21' + gem 'cinc-auditor-bin' + gem 'unf_ext' + gem 'chef-config' + gem 'chef-utils' + gem 'mixlib-install' + gem 'mixlib-versioning' +end diff --git a/e2e-etl-python/files/Gemfile.lock b/e2e-etl-python/files/Gemfile.lock new file mode 100644 index 000000000..2c90c4b6a --- /dev/null +++ b/e2e-etl-python/files/Gemfile.lock @@ -0,0 +1,1883 @@ +GEM + remote: https://packagecloud.io/cinc-project/stable/ + specs: + chef-config (18.2.7) + addressable + chef-utils (= 18.2.7) + fuzzyurl + mixlib-config (>= 2.2.12, < 4.0) + mixlib-shellout (>= 2.0, < 4.0) + tomlrb (~> 1.2) + chef-utils (18.2.7) + concurrent-ruby + cinc-auditor-bin (5.21.29) + inspec (= 5.21.29) + inspec (5.21.29) + cookstyle + faraday_middleware (>= 0.12.2, < 1.1) + inspec-core (= 5.21.29) + mongo (= 2.13.2) + progress_bar (~> 1.3.3) + rake + train (~> 3.10) + train-aws (~> 0.2) + train-habitat (~> 0.1) + train-winrm (~> 0.2) + inspec-core (5.21.29) + addressable (~> 2.4) + chef-telemetry (~> 1.0, >= 1.0.8) + faraday (>= 1, < 3) + faraday-follow_redirects (~> 0.3) + hashie (>= 3.4, < 5.0) + license-acceptance (>= 0.2.13, < 3.0) + method_source (>= 0.8, < 2.0) + mixlib-log (~> 3.0) + multipart-post (~> 2.0) + parallel (~> 1.9) + parslet (>= 1.5, < 2.0) + pry (~> 0.13) + rspec (>= 3.9, <= 3.11) + rspec-its (~> 1.2) + rubyzip (>= 1.2.2, < 3.0) + semverse (~> 3.0) + sslshake (~> 1.2) + thor (>= 0.20, < 2.0) + tomlrb (>= 1.2, < 2.1) + train-core (~> 3.10) + tty-prompt (~> 0.17) + tty-table (~> 0.10) + mixlib-install (3.12.27) + mixlib-shellout + mixlib-versioning + thor + mixlib-versioning (1.2.12) + unf_ext (0.0.7.2) + +GEM + remote: https://rubygems.org/ + specs: + activesupport (7.0.4.3) + concurrent-ruby (~> 1.0, >= 1.0.2) + i18n (>= 1.6, < 2) + minitest (>= 5.1) + tzinfo (~> 2.0) + addressable (2.8.4) + public_suffix (>= 2.0.2, < 6.0) + ast (2.4.2) + aws-eventstream (1.2.0) + aws-partitions (1.763.0) + aws-sdk (3.1.0) + aws-sdk-resources (~> 3) + aws-sdk-accessanalyzer (1.34.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-account (1.11.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-acm (1.55.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-acmpca (1.53.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-alexaforbusiness (1.58.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-amplify (1.32.0) + aws-sdk-core (~> 3, >= 3.120.0) + aws-sigv4 (~> 1.1) + aws-sdk-amplifybackend (1.20.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-amplifyuibuilder (1.11.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-apigateway (1.81.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-apigatewaymanagementapi (1.32.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-apigatewayv2 (1.44.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-appconfig (1.30.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-appconfigdata (1.8.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-appflow (1.40.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-appintegrationsservice (1.16.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-applicationautoscaling (1.51.0) + aws-sdk-core (~> 3, >= 3.112.0) + aws-sigv4 (~> 1.1) + aws-sdk-applicationcostprofiler (1.11.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-applicationdiscoveryservice (1.49.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-applicationinsights (1.33.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-appmesh (1.49.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-appregistry (1.20.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-apprunner (1.22.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-appstream (1.70.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-appsync (1.59.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-arczonalshift (1.1.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-athena (1.65.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-auditmanager (1.31.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-augmentedairuntime (1.25.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-autoscaling (1.63.0) + aws-sdk-core (~> 3, >= 3.112.0) + aws-sigv4 (~> 1.1) + aws-sdk-autoscalingplans (1.42.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-backup (1.49.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-backupgateway (1.8.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-backupstorage (1.2.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-batch (1.47.0) + aws-sdk-core (~> 3, >= 3.112.0) + aws-sigv4 (~> 1.1) + aws-sdk-billingconductor (1.7.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-braket (1.21.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-budgets (1.52.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-chime (1.71.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-chimesdkidentity (1.12.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-chimesdkmediapipelines (1.5.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-chimesdkmeetings (1.18.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-chimesdkmessaging (1.18.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-chimesdkvoice (1.5.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-cleanrooms (1.2.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-cloud9 (1.49.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-cloudcontrolapi (1.10.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-clouddirectory (1.44.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-cloudformation (1.77.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-cloudfront (1.76.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-cloudhsm (1.41.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-cloudhsmv2 (1.44.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-cloudsearch (1.42.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-cloudsearchdomain (1.34.1) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-cloudtrail (1.58.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-cloudtraildata (1.0.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-cloudwatch (1.73.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-cloudwatchevents (1.46.0) + aws-sdk-core (~> 3, >= 3.112.0) + aws-sigv4 (~> 1.1) + aws-sdk-cloudwatchevidently (1.12.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-cloudwatchlogs (1.62.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-cloudwatchrum (1.9.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-codeartifact (1.27.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-codebuild (1.90.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-codecatalyst (1.3.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sdk-codecommit (1.53.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-codedeploy (1.52.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-codeguruprofiler (1.26.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-codegurureviewer (1.35.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-codepipeline (1.55.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-codestar (1.40.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-codestarconnections (1.26.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-codestarnotifications (1.22.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-cognitoidentity (1.31.0) + aws-sdk-core (~> 3, >= 3.112.0) + aws-sigv4 (~> 1.1) + aws-sdk-cognitoidentityprovider (1.53.0) + aws-sdk-core (~> 3, >= 3.112.0) + aws-sigv4 (~> 1.1) + aws-sdk-cognitosync (1.38.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-comprehend (1.68.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-comprehendmedical (1.39.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-computeoptimizer (1.40.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-configservice (1.90.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-connect (1.106.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-connectcampaignservice (1.3.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-connectcases (1.4.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-connectcontactlens (1.13.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-connectparticipant (1.29.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-connectwisdomservice (1.13.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-controltower (1.2.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-core (3.172.0) + aws-eventstream (~> 1, >= 1.0.2) + aws-partitions (~> 1, >= 1.651.0) + aws-sigv4 (~> 1.5) + jmespath (~> 1, >= 1.6.1) + aws-sdk-costandusagereportservice (1.43.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-costexplorer (1.83.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-customerprofiles (1.27.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-databasemigrationservice (1.53.0) + aws-sdk-core (~> 3, >= 3.112.0) + aws-sigv4 (~> 1.1) + aws-sdk-dataexchange (1.33.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-datapipeline (1.38.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-datasync (1.56.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-dax (1.41.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-detective (1.32.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-devicefarm (1.54.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-devopsguru (1.30.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-directconnect (1.58.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-directoryservice (1.54.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-dlm (1.56.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-docdb (1.47.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-docdbelastic (1.1.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-drs (1.13.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-dynamodb (1.84.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-dynamodbstreams (1.43.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-ebs (1.28.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-ec2 (1.379.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-ec2instanceconnect (1.27.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-ecr (1.58.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-ecrpublic (1.16.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-ecs (1.118.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-efs (1.60.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-eks (1.83.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-elasticache (1.85.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-elasticbeanstalk (1.54.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-elasticinference (1.25.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-elasticloadbalancing (1.42.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-elasticloadbalancingv2 (1.84.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-elasticsearchservice (1.70.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-elastictranscoder (1.40.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-emr (1.53.0) + aws-sdk-core (~> 3, >= 3.121.2) + aws-sigv4 (~> 1.1) + aws-sdk-emrcontainers (1.20.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-emrserverless (1.7.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-eventbridge (1.24.0) + aws-sdk-core (~> 3, >= 3.112.0) + aws-sigv4 (~> 1.1) + aws-sdk-finspace (1.13.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-finspacedata (1.19.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-firehose (1.51.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-fis (1.16.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-fms (1.56.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-forecastqueryservice (1.24.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-forecastservice (1.40.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-frauddetector (1.39.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-fsx (1.65.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-gamelift (1.62.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-gamesparks (1.4.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-glacier (1.49.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-globalaccelerator (1.43.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-glue (1.88.0) + aws-sdk-core (~> 3, >= 3.112.0) + aws-sigv4 (~> 1.1) + aws-sdk-gluedatabrew (1.25.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-greengrass (1.53.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-greengrassv2 (1.25.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-groundstation (1.33.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-guardduty (1.70.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-health (1.50.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-healthlake (1.15.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-honeycode (1.19.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-iam (1.77.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-identitystore (1.24.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-imagebuilder (1.45.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-importexport (1.36.1) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv2 (~> 1.0) + aws-sdk-inspector (1.45.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-inspector2 (1.12.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-internetmonitor (1.2.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-iot (1.104.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-iot1clickdevicesservice (1.39.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-iot1clickprojects (1.39.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-iotanalytics (1.51.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-iotdataplane (1.44.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-iotdeviceadvisor (1.19.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-iotevents (1.35.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-ioteventsdata (1.29.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-iotfleethub (1.13.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-iotfleetwise (1.7.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-iotjobsdataplane (1.38.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-iotroborunner (1.1.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-iotsecuretunneling (1.23.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-iotsitewise (1.50.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-iotthingsgraph (1.26.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-iottwinmaker (1.11.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-iotwireless (1.32.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-ivs (1.29.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-ivschat (1.9.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-ivsrealtime (1.2.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-kafka (1.55.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-kafkaconnect (1.9.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-kendra (1.65.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-kendraranking (1.1.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-keyspaces (1.6.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-kinesis (1.45.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-kinesisanalytics (1.42.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-kinesisanalyticsv2 (1.43.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-kinesisvideo (1.46.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-kinesisvideoarchivedmedia (1.46.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-kinesisvideomedia (1.39.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-kinesisvideosignalingchannels (1.21.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-kinesisvideowebrtcstorage (1.2.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-kms (1.64.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-lakeformation (1.34.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-lambda (1.96.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-lambdapreview (1.36.1) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-lex (1.47.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-lexmodelbuildingservice (1.59.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-lexmodelsv2 (1.32.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-lexruntimev2 (1.19.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-licensemanager (1.44.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-licensemanagerlinuxsubscriptions (1.1.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-licensemanagerusersubscriptions (1.3.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-lightsail (1.75.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-locationservice (1.30.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-lookoutequipment (1.16.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-lookoutforvision (1.19.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-lookoutmetrics (1.24.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-machinelearning (1.39.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-macie (1.40.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-macie2 (1.53.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-mainframemodernization (1.4.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-managedblockchain (1.38.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-managedgrafana (1.14.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-marketplacecatalog (1.27.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-marketplacecommerceanalytics (1.43.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-marketplaceentitlementservice (1.37.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-marketplacemetering (1.46.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-mediaconnect (1.48.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-mediaconvert (1.105.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-medialive (1.98.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-mediapackage (1.59.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-mediapackagevod (1.42.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-mediastore (1.43.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-mediastoredata (1.40.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-mediatailor (1.61.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-memorydb (1.12.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-mgn (1.18.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-migrationhub (1.42.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-migrationhubconfig (1.22.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-migrationhuborchestrator (1.2.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-migrationhubrefactorspaces (1.12.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-migrationhubstrategyrecommendations (1.9.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-mobile (1.37.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-mq (1.40.0) + aws-sdk-core (~> 3, >= 3.120.0) + aws-sigv4 (~> 1.1) + aws-sdk-mturk (1.42.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-mwaa (1.20.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-neptune (1.50.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-networkfirewall (1.28.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-networkmanager (1.30.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-nimblestudio (1.18.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-oam (1.1.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-omics (1.4.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-opensearchserverless (1.2.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-opensearchservice (1.20.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-opsworks (1.43.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-opsworkscm (1.54.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-organizations (1.59.0) + aws-sdk-core (~> 3, >= 3.112.0) + aws-sigv4 (~> 1.1) + aws-sdk-osis (1.1.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-outposts (1.42.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-panorama (1.12.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-personalize (1.46.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-personalizeevents (1.30.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-personalizeruntime (1.35.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-pi (1.43.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-pinpoint (1.71.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-pinpointemail (1.37.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-pinpointsmsvoice (1.34.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-pinpointsmsvoicev2 (1.2.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-pipes (1.3.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-polly (1.66.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-pricing (1.43.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-privatenetworks (1.4.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-prometheusservice (1.17.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-proton (1.24.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-qldb (1.28.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-qldbsession (1.24.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-quicksight (1.79.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-ram (1.26.0) + aws-sdk-core (~> 3, >= 3.112.0) + aws-sigv4 (~> 1.1) + aws-sdk-rds (1.178.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-rdsdataservice (1.40.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-recyclebin (1.8.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-redshift (1.91.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-redshiftdataapiservice (1.26.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-redshiftserverless (1.7.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-rekognition (1.78.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-resiliencehub (1.13.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-resourceexplorer2 (1.4.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-resourcegroups (1.48.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-resourcegroupstaggingapi (1.49.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-resources (3.163.0) + aws-sdk-accessanalyzer (~> 1) + aws-sdk-account (~> 1) + aws-sdk-acm (~> 1) + aws-sdk-acmpca (~> 1) + aws-sdk-alexaforbusiness (~> 1) + aws-sdk-amplify (~> 1) + aws-sdk-amplifybackend (~> 1) + aws-sdk-amplifyuibuilder (~> 1) + aws-sdk-apigateway (~> 1) + aws-sdk-apigatewaymanagementapi (~> 1) + aws-sdk-apigatewayv2 (~> 1) + aws-sdk-appconfig (~> 1) + aws-sdk-appconfigdata (~> 1) + aws-sdk-appflow (~> 1) + aws-sdk-appintegrationsservice (~> 1) + aws-sdk-applicationautoscaling (~> 1) + aws-sdk-applicationcostprofiler (~> 1) + aws-sdk-applicationdiscoveryservice (~> 1) + aws-sdk-applicationinsights (~> 1) + aws-sdk-appmesh (~> 1) + aws-sdk-appregistry (~> 1) + aws-sdk-apprunner (~> 1) + aws-sdk-appstream (~> 1) + aws-sdk-appsync (~> 1) + aws-sdk-arczonalshift (~> 1) + aws-sdk-athena (~> 1) + aws-sdk-auditmanager (~> 1) + aws-sdk-augmentedairuntime (~> 1) + aws-sdk-autoscaling (~> 1) + aws-sdk-autoscalingplans (~> 1) + aws-sdk-backup (~> 1) + aws-sdk-backupgateway (~> 1) + aws-sdk-backupstorage (~> 1) + aws-sdk-batch (~> 1) + aws-sdk-billingconductor (~> 1) + aws-sdk-braket (~> 1) + aws-sdk-budgets (~> 1) + aws-sdk-chime (~> 1) + aws-sdk-chimesdkidentity (~> 1) + aws-sdk-chimesdkmediapipelines (~> 1) + aws-sdk-chimesdkmeetings (~> 1) + aws-sdk-chimesdkmessaging (~> 1) + aws-sdk-chimesdkvoice (~> 1) + aws-sdk-cleanrooms (~> 1) + aws-sdk-cloud9 (~> 1) + aws-sdk-cloudcontrolapi (~> 1) + aws-sdk-clouddirectory (~> 1) + aws-sdk-cloudformation (~> 1) + aws-sdk-cloudfront (~> 1) + aws-sdk-cloudhsm (~> 1) + aws-sdk-cloudhsmv2 (~> 1) + aws-sdk-cloudsearch (~> 1) + aws-sdk-cloudsearchdomain (~> 1) + aws-sdk-cloudtrail (~> 1) + aws-sdk-cloudtraildata (~> 1) + aws-sdk-cloudwatch (~> 1) + aws-sdk-cloudwatchevents (~> 1) + aws-sdk-cloudwatchevidently (~> 1) + aws-sdk-cloudwatchlogs (~> 1) + aws-sdk-cloudwatchrum (~> 1) + aws-sdk-codeartifact (~> 1) + aws-sdk-codebuild (~> 1) + aws-sdk-codecatalyst (~> 1) + aws-sdk-codecommit (~> 1) + aws-sdk-codedeploy (~> 1) + aws-sdk-codeguruprofiler (~> 1) + aws-sdk-codegurureviewer (~> 1) + aws-sdk-codepipeline (~> 1) + aws-sdk-codestar (~> 1) + aws-sdk-codestarconnections (~> 1) + aws-sdk-codestarnotifications (~> 1) + aws-sdk-cognitoidentity (~> 1) + aws-sdk-cognitoidentityprovider (~> 1) + aws-sdk-cognitosync (~> 1) + aws-sdk-comprehend (~> 1) + aws-sdk-comprehendmedical (~> 1) + aws-sdk-computeoptimizer (~> 1) + aws-sdk-configservice (~> 1) + aws-sdk-connect (~> 1) + aws-sdk-connectcampaignservice (~> 1) + aws-sdk-connectcases (~> 1) + aws-sdk-connectcontactlens (~> 1) + aws-sdk-connectparticipant (~> 1) + aws-sdk-connectwisdomservice (~> 1) + aws-sdk-controltower (~> 1) + aws-sdk-costandusagereportservice (~> 1) + aws-sdk-costexplorer (~> 1) + aws-sdk-customerprofiles (~> 1) + aws-sdk-databasemigrationservice (~> 1) + aws-sdk-dataexchange (~> 1) + aws-sdk-datapipeline (~> 1) + aws-sdk-datasync (~> 1) + aws-sdk-dax (~> 1) + aws-sdk-detective (~> 1) + aws-sdk-devicefarm (~> 1) + aws-sdk-devopsguru (~> 1) + aws-sdk-directconnect (~> 1) + aws-sdk-directoryservice (~> 1) + aws-sdk-dlm (~> 1) + aws-sdk-docdb (~> 1) + aws-sdk-docdbelastic (~> 1) + aws-sdk-drs (~> 1) + aws-sdk-dynamodb (~> 1) + aws-sdk-dynamodbstreams (~> 1) + aws-sdk-ebs (~> 1) + aws-sdk-ec2 (~> 1) + aws-sdk-ec2instanceconnect (~> 1) + aws-sdk-ecr (~> 1) + aws-sdk-ecrpublic (~> 1) + aws-sdk-ecs (~> 1) + aws-sdk-efs (~> 1) + aws-sdk-eks (~> 1) + aws-sdk-elasticache (~> 1) + aws-sdk-elasticbeanstalk (~> 1) + aws-sdk-elasticinference (~> 1) + aws-sdk-elasticloadbalancing (~> 1) + aws-sdk-elasticloadbalancingv2 (~> 1) + aws-sdk-elasticsearchservice (~> 1) + aws-sdk-elastictranscoder (~> 1) + aws-sdk-emr (~> 1) + aws-sdk-emrcontainers (~> 1) + aws-sdk-emrserverless (~> 1) + aws-sdk-eventbridge (~> 1) + aws-sdk-finspace (~> 1) + aws-sdk-finspacedata (~> 1) + aws-sdk-firehose (~> 1) + aws-sdk-fis (~> 1) + aws-sdk-fms (~> 1) + aws-sdk-forecastqueryservice (~> 1) + aws-sdk-forecastservice (~> 1) + aws-sdk-frauddetector (~> 1) + aws-sdk-fsx (~> 1) + aws-sdk-gamelift (~> 1) + aws-sdk-gamesparks (~> 1) + aws-sdk-glacier (~> 1) + aws-sdk-globalaccelerator (~> 1) + aws-sdk-glue (~> 1) + aws-sdk-gluedatabrew (~> 1) + aws-sdk-greengrass (~> 1) + aws-sdk-greengrassv2 (~> 1) + aws-sdk-groundstation (~> 1) + aws-sdk-guardduty (~> 1) + aws-sdk-health (~> 1) + aws-sdk-healthlake (~> 1) + aws-sdk-honeycode (~> 1) + aws-sdk-iam (~> 1) + aws-sdk-identitystore (~> 1) + aws-sdk-imagebuilder (~> 1) + aws-sdk-importexport (~> 1) + aws-sdk-inspector (~> 1) + aws-sdk-inspector2 (~> 1) + aws-sdk-internetmonitor (~> 1) + aws-sdk-iot (~> 1) + aws-sdk-iot1clickdevicesservice (~> 1) + aws-sdk-iot1clickprojects (~> 1) + aws-sdk-iotanalytics (~> 1) + aws-sdk-iotdataplane (~> 1) + aws-sdk-iotdeviceadvisor (~> 1) + aws-sdk-iotevents (~> 1) + aws-sdk-ioteventsdata (~> 1) + aws-sdk-iotfleethub (~> 1) + aws-sdk-iotfleetwise (~> 1) + aws-sdk-iotjobsdataplane (~> 1) + aws-sdk-iotroborunner (~> 1) + aws-sdk-iotsecuretunneling (~> 1) + aws-sdk-iotsitewise (~> 1) + aws-sdk-iotthingsgraph (~> 1) + aws-sdk-iottwinmaker (~> 1) + aws-sdk-iotwireless (~> 1) + aws-sdk-ivs (~> 1) + aws-sdk-ivschat (~> 1) + aws-sdk-ivsrealtime (~> 1) + aws-sdk-kafka (~> 1) + aws-sdk-kafkaconnect (~> 1) + aws-sdk-kendra (~> 1) + aws-sdk-kendraranking (~> 1) + aws-sdk-keyspaces (~> 1) + aws-sdk-kinesis (~> 1) + aws-sdk-kinesisanalytics (~> 1) + aws-sdk-kinesisanalyticsv2 (~> 1) + aws-sdk-kinesisvideo (~> 1) + aws-sdk-kinesisvideoarchivedmedia (~> 1) + aws-sdk-kinesisvideomedia (~> 1) + aws-sdk-kinesisvideosignalingchannels (~> 1) + aws-sdk-kinesisvideowebrtcstorage (~> 1) + aws-sdk-kms (~> 1) + aws-sdk-lakeformation (~> 1) + aws-sdk-lambda (~> 1) + aws-sdk-lambdapreview (~> 1) + aws-sdk-lex (~> 1) + aws-sdk-lexmodelbuildingservice (~> 1) + aws-sdk-lexmodelsv2 (~> 1) + aws-sdk-lexruntimev2 (~> 1) + aws-sdk-licensemanager (~> 1) + aws-sdk-licensemanagerlinuxsubscriptions (~> 1) + aws-sdk-licensemanagerusersubscriptions (~> 1) + aws-sdk-lightsail (~> 1) + aws-sdk-locationservice (~> 1) + aws-sdk-lookoutequipment (~> 1) + aws-sdk-lookoutforvision (~> 1) + aws-sdk-lookoutmetrics (~> 1) + aws-sdk-machinelearning (~> 1) + aws-sdk-macie (~> 1) + aws-sdk-macie2 (~> 1) + aws-sdk-mainframemodernization (~> 1) + aws-sdk-managedblockchain (~> 1) + aws-sdk-managedgrafana (~> 1) + aws-sdk-marketplacecatalog (~> 1) + aws-sdk-marketplacecommerceanalytics (~> 1) + aws-sdk-marketplaceentitlementservice (~> 1) + aws-sdk-marketplacemetering (~> 1) + aws-sdk-mediaconnect (~> 1) + aws-sdk-mediaconvert (~> 1) + aws-sdk-medialive (~> 1) + aws-sdk-mediapackage (~> 1) + aws-sdk-mediapackagevod (~> 1) + aws-sdk-mediastore (~> 1) + aws-sdk-mediastoredata (~> 1) + aws-sdk-mediatailor (~> 1) + aws-sdk-memorydb (~> 1) + aws-sdk-mgn (~> 1) + aws-sdk-migrationhub (~> 1) + aws-sdk-migrationhubconfig (~> 1) + aws-sdk-migrationhuborchestrator (~> 1) + aws-sdk-migrationhubrefactorspaces (~> 1) + aws-sdk-migrationhubstrategyrecommendations (~> 1) + aws-sdk-mobile (~> 1) + aws-sdk-mq (~> 1) + aws-sdk-mturk (~> 1) + aws-sdk-mwaa (~> 1) + aws-sdk-neptune (~> 1) + aws-sdk-networkfirewall (~> 1) + aws-sdk-networkmanager (~> 1) + aws-sdk-nimblestudio (~> 1) + aws-sdk-oam (~> 1) + aws-sdk-omics (~> 1) + aws-sdk-opensearchserverless (~> 1) + aws-sdk-opensearchservice (~> 1) + aws-sdk-opsworks (~> 1) + aws-sdk-opsworkscm (~> 1) + aws-sdk-organizations (~> 1) + aws-sdk-osis (~> 1) + aws-sdk-outposts (~> 1) + aws-sdk-panorama (~> 1) + aws-sdk-personalize (~> 1) + aws-sdk-personalizeevents (~> 1) + aws-sdk-personalizeruntime (~> 1) + aws-sdk-pi (~> 1) + aws-sdk-pinpoint (~> 1) + aws-sdk-pinpointemail (~> 1) + aws-sdk-pinpointsmsvoice (~> 1) + aws-sdk-pinpointsmsvoicev2 (~> 1) + aws-sdk-pipes (~> 1) + aws-sdk-polly (~> 1) + aws-sdk-pricing (~> 1) + aws-sdk-privatenetworks (~> 1) + aws-sdk-prometheusservice (~> 1) + aws-sdk-proton (~> 1) + aws-sdk-qldb (~> 1) + aws-sdk-qldbsession (~> 1) + aws-sdk-quicksight (~> 1) + aws-sdk-ram (~> 1) + aws-sdk-rds (~> 1) + aws-sdk-rdsdataservice (~> 1) + aws-sdk-recyclebin (~> 1) + aws-sdk-redshift (~> 1) + aws-sdk-redshiftdataapiservice (~> 1) + aws-sdk-redshiftserverless (~> 1) + aws-sdk-rekognition (~> 1) + aws-sdk-resiliencehub (~> 1) + aws-sdk-resourceexplorer2 (~> 1) + aws-sdk-resourcegroups (~> 1) + aws-sdk-resourcegroupstaggingapi (~> 1) + aws-sdk-robomaker (~> 1) + aws-sdk-rolesanywhere (~> 1) + aws-sdk-route53 (~> 1) + aws-sdk-route53domains (~> 1) + aws-sdk-route53recoverycluster (~> 1) + aws-sdk-route53recoverycontrolconfig (~> 1) + aws-sdk-route53recoveryreadiness (~> 1) + aws-sdk-route53resolver (~> 1) + aws-sdk-s3 (~> 1) + aws-sdk-s3control (~> 1) + aws-sdk-s3outposts (~> 1) + aws-sdk-sagemaker (~> 1) + aws-sdk-sagemakeredgemanager (~> 1) + aws-sdk-sagemakerfeaturestoreruntime (~> 1) + aws-sdk-sagemakergeospatial (~> 1) + aws-sdk-sagemakermetrics (~> 1) + aws-sdk-sagemakerruntime (~> 1) + aws-sdk-savingsplans (~> 1) + aws-sdk-scheduler (~> 1) + aws-sdk-schemas (~> 1) + aws-sdk-secretsmanager (~> 1) + aws-sdk-securityhub (~> 1) + aws-sdk-securitylake (~> 1) + aws-sdk-serverlessapplicationrepository (~> 1) + aws-sdk-servicecatalog (~> 1) + aws-sdk-servicediscovery (~> 1) + aws-sdk-servicequotas (~> 1) + aws-sdk-ses (~> 1) + aws-sdk-sesv2 (~> 1) + aws-sdk-shield (~> 1) + aws-sdk-signer (~> 1) + aws-sdk-simpledb (~> 1) + aws-sdk-simspaceweaver (~> 1) + aws-sdk-sms (~> 1) + aws-sdk-snowball (~> 1) + aws-sdk-snowdevicemanagement (~> 1) + aws-sdk-sns (~> 1) + aws-sdk-sqs (~> 1) + aws-sdk-ssm (~> 1) + aws-sdk-ssmcontacts (~> 1) + aws-sdk-ssmincidents (~> 1) + aws-sdk-ssmsap (~> 1) + aws-sdk-ssoadmin (~> 1) + aws-sdk-states (~> 1) + aws-sdk-storagegateway (~> 1) + aws-sdk-support (~> 1) + aws-sdk-supportapp (~> 1) + aws-sdk-swf (~> 1) + aws-sdk-synthetics (~> 1) + aws-sdk-textract (~> 1) + aws-sdk-timestreamquery (~> 1) + aws-sdk-timestreamwrite (~> 1) + aws-sdk-tnb (~> 1) + aws-sdk-transcribeservice (~> 1) + aws-sdk-transcribestreamingservice (~> 1) + aws-sdk-transfer (~> 1) + aws-sdk-translate (~> 1) + aws-sdk-voiceid (~> 1) + aws-sdk-vpclattice (~> 1) + aws-sdk-waf (~> 1) + aws-sdk-wafregional (~> 1) + aws-sdk-wafv2 (~> 1) + aws-sdk-wellarchitected (~> 1) + aws-sdk-workdocs (~> 1) + aws-sdk-worklink (~> 1) + aws-sdk-workmail (~> 1) + aws-sdk-workmailmessageflow (~> 1) + aws-sdk-workspaces (~> 1) + aws-sdk-workspacesweb (~> 1) + aws-sdk-xray (~> 1) + aws-sdk-robomaker (1.53.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-rolesanywhere (1.2.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-route53 (1.71.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-route53domains (1.43.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-route53recoverycluster (1.13.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-route53recoverycontrolconfig (1.13.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-route53recoveryreadiness (1.12.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-route53resolver (1.41.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-s3 (1.122.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sdk-kms (~> 1) + aws-sigv4 (~> 1.4) + aws-sdk-s3control (1.43.0) + aws-sdk-core (~> 3, >= 3.122.0) + aws-sigv4 (~> 1.1) + aws-sdk-s3outposts (1.17.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-sagemaker (1.178.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-sagemakeredgemanager (1.14.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-sagemakerfeaturestoreruntime (1.16.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-sagemakergeospatial (1.2.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-sagemakermetrics (1.2.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-sagemakerruntime (1.49.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-savingsplans (1.28.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-scheduler (1.2.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-schemas (1.25.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-secretsmanager (1.46.0) + aws-sdk-core (~> 3, >= 3.112.0) + aws-sigv4 (~> 1.1) + aws-sdk-securityhub (1.81.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-securitylake (1.3.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-serverlessapplicationrepository (1.46.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-servicecatalog (1.60.0) + aws-sdk-core (~> 3, >= 3.112.0) + aws-sigv4 (~> 1.1) + aws-sdk-servicediscovery (1.51.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-servicequotas (1.25.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-ses (1.41.0) + aws-sdk-core (~> 3, >= 3.120.0) + aws-sigv4 (~> 1.1) + aws-sdk-sesv2 (1.32.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-shield (1.51.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-signer (1.32.0) + aws-sdk-core (~> 3, >= 3.120.0) + aws-sigv4 (~> 1.1) + aws-sdk-simpledb (1.29.0) + aws-sdk-core (~> 3, >= 3.120.0) + aws-sigv2 (~> 1.0) + aws-sdk-simspaceweaver (1.2.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-sms (1.43.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-snowball (1.54.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-snowdevicemanagement (1.9.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-sns (1.60.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-sqs (1.55.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-ssm (1.150.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-ssmcontacts (1.17.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-ssmincidents (1.22.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-ssmsap (1.3.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-ssoadmin (1.23.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-states (1.39.0) + aws-sdk-core (~> 3, >= 3.112.0) + aws-sigv4 (~> 1.1) + aws-sdk-storagegateway (1.70.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-support (1.46.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-supportapp (1.4.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-swf (1.40.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-synthetics (1.19.0) + aws-sdk-core (~> 3, >= 3.121.2) + aws-sigv4 (~> 1.1) + aws-sdk-textract (1.45.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-timestreamquery (1.18.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-timestreamwrite (1.17.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-tnb (1.1.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-transcribeservice (1.82.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-transcribestreamingservice (1.46.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-transfer (1.34.0) + aws-sdk-core (~> 3, >= 3.112.0) + aws-sigv4 (~> 1.1) + aws-sdk-translate (1.50.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-voiceid (1.12.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-vpclattice (1.1.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-waf (1.43.0) + aws-sdk-core (~> 3, >= 3.122.0) + aws-sigv4 (~> 1.1) + aws-sdk-wafregional (1.50.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-wafv2 (1.56.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-wellarchitected (1.22.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-workdocs (1.44.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-worklink (1.35.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-workmail (1.53.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-workmailmessageflow (1.23.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-workspaces (1.80.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-workspacesweb (1.8.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-xray (1.52.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sigv2 (1.1.0) + aws-sigv4 (1.5.2) + aws-eventstream (~> 1, >= 1.0.2) + azure_graph_rbac (0.17.2) + ms_rest_azure (~> 0.12.0) + azure_mgmt_key_vault (0.17.7) + ms_rest_azure (~> 0.12.0) + azure_mgmt_resources (0.18.2) + ms_rest_azure (~> 0.12.0) + azure_mgmt_security (0.19.0) + ms_rest_azure (~> 0.12.0) + azure_mgmt_storage (0.23.0) + ms_rest_azure (~> 0.12.0) + bcrypt_pbkdf (1.1.0) + bson (4.15.0) + builder (3.2.4) + chef-telemetry (1.1.1) + chef-config + concurrent-ruby (~> 1.0) + coderay (1.1.3) + concurrent-ruby (1.2.2) + cookstyle (7.32.2) + rubocop (= 1.25.1) + declarative (0.0.20) + delegate (0.3.0) + diff-lcs (1.5.0) + docker-api (2.2.0) + excon (>= 0.47.0) + multi_json + domain_name (0.5.20190701) + unf (>= 0.0.5, < 1.0.0) + dry-configurable (1.0.1) + dry-core (~> 1.0, < 2) + zeitwerk (~> 2.6) + dry-core (1.0.0) + concurrent-ruby (~> 1.0) + zeitwerk (~> 2.6) + dry-inflector (1.0.0) + dry-initializer (3.1.1) + dry-logic (1.5.0) + concurrent-ruby (~> 1.0) + dry-core (~> 1.0, < 2) + zeitwerk (~> 2.6) + dry-schema (1.13.1) + concurrent-ruby (~> 1.0) + dry-configurable (~> 1.0, >= 1.0.1) + dry-core (~> 1.0, < 2) + dry-initializer (~> 3.0) + dry-logic (>= 1.4, < 2) + dry-types (>= 1.7, < 2) + zeitwerk (~> 2.6) + dry-types (1.7.1) + concurrent-ruby (~> 1.0) + dry-core (~> 1.0) + dry-inflector (~> 1.0) + dry-logic (~> 1.4) + zeitwerk (~> 2.6) + dry-validation (1.10.0) + concurrent-ruby (~> 1.0) + dry-core (~> 1.0, < 2) + dry-initializer (~> 3.0) + dry-schema (>= 1.12, < 2) + zeitwerk (~> 2.6) + ed25519 (1.3.0) + erubi (1.12.0) + excon (0.99.0) + faraday (1.10.3) + faraday-em_http (~> 1.0) + faraday-em_synchrony (~> 1.0) + faraday-excon (~> 1.1) + faraday-httpclient (~> 1.0) + faraday-multipart (~> 1.0) + faraday-net_http (~> 1.0) + faraday-net_http_persistent (~> 1.0) + faraday-patron (~> 1.0) + faraday-rack (~> 1.0) + faraday-retry (~> 1.0) + ruby2_keywords (>= 0.0.4) + faraday-cookie_jar (0.0.7) + faraday (>= 0.8.0) + http-cookie (~> 1.0.0) + faraday-em_http (1.0.0) + faraday-em_synchrony (1.0.0) + faraday-excon (1.1.0) + faraday-follow_redirects (0.3.0) + faraday (>= 1, < 3) + faraday-httpclient (1.0.1) + faraday-multipart (1.0.4) + multipart-post (~> 2) + faraday-net_http (1.0.1) + faraday-net_http_persistent (1.2.0) + faraday-patron (1.0.0) + faraday-rack (1.0.0) + faraday-retry (1.0.3) + faraday_middleware (1.0.0) + faraday (~> 1.0) + ffi (1.15.5) + fuzzyurl (0.9.0) + google-api-client (0.52.0) + addressable (~> 2.5, >= 2.5.1) + googleauth (~> 0.9) + httpclient (>= 2.8.1, < 3.0) + mini_mime (~> 1.0) + representable (~> 3.0) + retriable (>= 2.0, < 4.0) + rexml + signet (~> 0.12) + googleauth (0.14.0) + faraday (>= 0.17.3, < 2.0) + jwt (>= 1.4, < 3.0) + memoist (~> 0.16) + multi_json (~> 1.11) + os (>= 0.9, < 2.0) + signet (~> 0.14) + gssapi (1.3.1) + ffi (>= 1.0.1) + gyoku (1.4.0) + builder (>= 2.1.2) + rexml (~> 3.0) + hashie (4.1.0) + highline (2.1.0) + http-cookie (1.0.5) + domain_name (~> 0.5) + httpclient (2.8.3) + i18n (1.13.0) + concurrent-ruby (~> 1.0) + inifile (3.0.0) + io-console (0.6.0) + irb (1.6.4) + reline (>= 0.3.0) + jmespath (1.6.2) + json (2.6.3) + jwt (2.7.0) + kitchen-terraform (7.0.2) + delegate (~> 0.3.0) + dry-validation (~> 1.6) + inspec (~> 5.21, >= 5.21.29) + json (~> 2.3) + test-kitchen (>= 2.1, < 4.0) + tty-which (~> 0.5.0) + license-acceptance (2.1.13) + pastel (~> 0.7) + tomlrb (>= 1.2, < 3.0) + tty-box (~> 0.6) + tty-prompt (~> 0.20) + little-plugger (1.1.4) + logging (2.3.1) + little-plugger (~> 1.1) + multi_json (~> 1.14) + memoist (0.16.2) + method_source (1.0.0) + mini_mime (1.1.2) + minitest (5.18.0) + mixlib-config (3.0.27) + tomlrb + mixlib-log (3.0.9) + mixlib-shellout (3.2.7) + chef-utils + mongo (2.13.2) + bson (>= 4.8.2, < 5.0.0) + ms_rest (0.7.6) + concurrent-ruby (~> 1.0) + faraday (>= 0.9, < 2.0.0) + timeliness (~> 0.3.10) + ms_rest_azure (0.12.0) + concurrent-ruby (~> 1.0) + faraday (>= 0.9, < 2.0.0) + faraday-cookie_jar (~> 0.0.6) + ms_rest (~> 0.7.6) + multi_json (1.15.0) + multipart-post (2.3.0) + net-scp (4.0.0) + net-ssh (>= 2.6.5, < 8.0.0) + net-ssh (7.1.0) + net-ssh-gateway (2.0.0) + net-ssh (>= 4.0.0) + nori (2.6.0) + options (2.3.2) + os (1.1.4) + parallel (1.23.0) + parser (3.2.2.1) + ast (~> 2.4.1) + parslet (1.8.2) + pastel (0.8.0) + tty-color (~> 0.5) + progress_bar (1.3.3) + highline (>= 1.6, < 3) + options (~> 2.3.0) + pry (0.14.2) + coderay (~> 1.1) + method_source (~> 1.0) + public_suffix (5.0.1) + rainbow (3.1.1) + rake (13.0.6) + regexp_parser (2.8.0) + reline (0.3.3) + io-console (~> 0.5) + representable (3.2.0) + declarative (< 0.1.0) + trailblazer-option (>= 0.1.1, < 0.2.0) + uber (< 0.2.0) + retriable (3.1.2) + rexml (3.2.5) + rspec (3.11.0) + rspec-core (~> 3.11.0) + rspec-expectations (~> 3.11.0) + rspec-mocks (~> 3.11.0) + rspec-core (3.11.0) + rspec-support (~> 3.11.0) + rspec-expectations (3.11.1) + diff-lcs (>= 1.2.0, < 2.0) + rspec-support (~> 3.11.0) + rspec-its (1.3.0) + rspec-core (>= 3.0.0) + rspec-expectations (>= 3.0.0) + rspec-mocks (3.11.2) + diff-lcs (>= 1.2.0, < 2.0) + rspec-support (~> 3.11.0) + rspec-retry (0.6.2) + rspec-core (> 3.3) + rspec-support (3.11.1) + rubocop (1.25.1) + parallel (~> 1.10) + parser (>= 3.1.0.0) + rainbow (>= 2.2.2, < 4.0) + regexp_parser (>= 1.8, < 3.0) + rexml + rubocop-ast (>= 1.15.1, < 2.0) + ruby-progressbar (~> 1.7) + unicode-display_width (>= 1.4.0, < 3.0) + rubocop-ast (1.28.1) + parser (>= 3.2.1.0) + ruby-progressbar (1.13.0) + ruby2_keywords (0.0.5) + rubyntlm (0.6.3) + rubyzip (2.3.2) + semverse (3.0.2) + signet (0.17.0) + addressable (~> 2.8) + faraday (>= 0.17.5, < 3.a) + jwt (>= 1.5, < 3.0) + multi_json (~> 1.10) + sslshake (1.3.1) + strings (0.2.1) + strings-ansi (~> 0.2) + unicode-display_width (>= 1.5, < 3.0) + unicode_utils (~> 1.4) + strings-ansi (0.2.0) + test-kitchen (3.5.0) + bcrypt_pbkdf (~> 1.0) + chef-utils (>= 16.4.35) + ed25519 (~> 1.2) + license-acceptance (>= 1.0.11, < 3.0) + mixlib-install (~> 3.6) + mixlib-shellout (>= 1.2, < 4.0) + net-scp (>= 1.1, < 5.0) + net-ssh (>= 2.9, < 8.0) + net-ssh-gateway (>= 1.2, < 3.0) + thor (>= 0.19, < 2.0) + winrm (~> 2.0) + winrm-elevated (~> 1.0) + winrm-fs (~> 1.1) + thor (1.2.2) + timeliness (0.3.10) + tomlrb (1.3.0) + trailblazer-option (0.1.2) + train (3.10.7) + activesupport (>= 6.0.3.1) + azure_graph_rbac (~> 0.16) + azure_mgmt_key_vault (~> 0.17) + azure_mgmt_resources (~> 0.15) + azure_mgmt_security (~> 0.18) + azure_mgmt_storage (~> 0.18) + docker-api (>= 1.26, < 3.0) + google-api-client (>= 0.23.9, <= 0.52.0) + googleauth (>= 0.6.6, <= 0.14.0) + inifile (~> 3.0) + train-core (= 3.10.7) + train-winrm (~> 0.2) + train-aws (0.2.24) + aws-sdk-alexaforbusiness (~> 1.0) + aws-sdk-amplify (~> 1.32.0) + aws-sdk-apigateway (~> 1.0) + aws-sdk-apigatewayv2 (~> 1.0) + aws-sdk-applicationautoscaling (>= 1.46, < 1.52) + aws-sdk-athena (~> 1.0) + aws-sdk-autoscaling (>= 1.22, < 1.64) + aws-sdk-batch (>= 1.36, < 1.48) + aws-sdk-budgets (~> 1.0) + aws-sdk-cloudformation (~> 1.0) + aws-sdk-cloudfront (~> 1.0) + aws-sdk-cloudhsm (~> 1.0) + aws-sdk-cloudhsmv2 (~> 1.0) + aws-sdk-cloudtrail (~> 1.8) + aws-sdk-cloudwatch (~> 1.13) + aws-sdk-cloudwatchevents (>= 1.36, < 1.47) + aws-sdk-cloudwatchlogs (~> 1.13) + aws-sdk-codecommit (~> 1.0) + aws-sdk-codedeploy (~> 1.0) + aws-sdk-codepipeline (~> 1.0) + aws-sdk-cognitoidentity (>= 1.26, < 1.32) + aws-sdk-cognitoidentityprovider (>= 1.46, < 1.54) + aws-sdk-configservice (~> 1.21) + aws-sdk-core (~> 3.0) + aws-sdk-costandusagereportservice (~> 1.6) + aws-sdk-databasemigrationservice (>= 1.42, < 1.54) + aws-sdk-dynamodb (~> 1.31) + aws-sdk-ec2 (~> 1.70) + aws-sdk-ecr (~> 1.18) + aws-sdk-ecrpublic (~> 1.3) + aws-sdk-ecs (~> 1.30) + aws-sdk-efs (~> 1.0) + aws-sdk-eks (~> 1.9) + aws-sdk-elasticache (~> 1.0) + aws-sdk-elasticbeanstalk (~> 1.0) + aws-sdk-elasticloadbalancing (~> 1.8) + aws-sdk-elasticloadbalancingv2 (~> 1.0) + aws-sdk-elasticsearchservice (~> 1.0) + aws-sdk-emr (~> 1.53.0) + aws-sdk-eventbridge (~> 1.24.0) + aws-sdk-firehose (~> 1.0) + aws-sdk-glue (>= 1.71, < 1.89) + aws-sdk-guardduty (~> 1.31) + aws-sdk-iam (~> 1.13) + aws-sdk-kafka (~> 1.0) + aws-sdk-kinesis (~> 1.0) + aws-sdk-kms (~> 1.13) + aws-sdk-lambda (~> 1.0) + aws-sdk-mq (~> 1.40.0) + aws-sdk-networkfirewall (>= 1.6.0) + aws-sdk-networkmanager (>= 1.13.0) + aws-sdk-organizations (>= 1.17, < 1.60) + aws-sdk-ram (>= 1.21, < 1.27) + aws-sdk-rds (~> 1.43) + aws-sdk-redshift (~> 1.0) + aws-sdk-route53 (~> 1.0) + aws-sdk-route53domains (~> 1.0) + aws-sdk-route53resolver (~> 1.0) + aws-sdk-s3 (~> 1.30) + aws-sdk-s3control (~> 1.43.0) + aws-sdk-secretsmanager (>= 1.42, < 1.47) + aws-sdk-securityhub (~> 1.0) + aws-sdk-servicecatalog (>= 1.48, < 1.61) + aws-sdk-ses (~> 1.41.0) + aws-sdk-shield (~> 1.30) + aws-sdk-signer (~> 1.32.0) + aws-sdk-simpledb (~> 1.29.0) + aws-sdk-sms (~> 1.0) + aws-sdk-sns (~> 1.9) + aws-sdk-sqs (~> 1.10) + aws-sdk-ssm (~> 1.0) + aws-sdk-states (>= 1.35, < 1.40) + aws-sdk-synthetics (~> 1.19.0) + aws-sdk-transfer (>= 1.26, < 1.35) + aws-sdk-waf (~> 1.43.0) + train-core (3.10.7) + addressable (~> 2.5) + ffi (!= 1.13.0) + json (>= 1.8, < 3.0) + mixlib-shellout (>= 2.0, < 4.0) + net-scp (>= 1.2, < 5.0) + net-ssh (>= 2.9, < 8.0) + train-habitat (0.2.22) + train-winrm (0.2.13) + winrm (>= 2.3.6, < 3.0) + winrm-elevated (~> 1.2.2) + winrm-fs (~> 1.0) + tty-box (0.7.0) + pastel (~> 0.8) + strings (~> 0.2.0) + tty-cursor (~> 0.7) + tty-color (0.6.0) + tty-cursor (0.7.1) + tty-prompt (0.23.1) + pastel (~> 0.8) + tty-reader (~> 0.8) + tty-reader (0.9.0) + tty-cursor (~> 0.7) + tty-screen (~> 0.8) + wisper (~> 2.0) + tty-screen (0.8.1) + tty-table (0.12.0) + pastel (~> 0.8) + strings (~> 0.2.0) + tty-screen (~> 0.8) + tty-which (0.5.0) + tzinfo (2.0.6) + concurrent-ruby (~> 1.0) + uber (0.1.0) + unf (0.1.4) + unf_ext + unicode-display_width (2.4.2) + unicode_utils (1.4.0) + winrm (2.3.6) + builder (>= 2.1.2) + erubi (~> 1.8) + gssapi (~> 1.2) + gyoku (~> 1.0) + httpclient (~> 2.2, >= 2.2.0.2) + logging (>= 1.6.1, < 3.0) + nori (~> 2.0) + rubyntlm (~> 0.6.0, >= 0.6.3) + winrm-elevated (1.2.3) + erubi (~> 1.8) + winrm (~> 2.0) + winrm-fs (~> 1.0) + winrm-fs (1.3.5) + erubi (~> 1.8) + logging (>= 1.6.1, < 3.0) + rubyzip (~> 2.0) + winrm (~> 2.0) + wisper (2.0.1) + zeitwerk (2.6.8) + +PLATFORMS + x86_64-linux + +DEPENDENCIES + activesupport! + aws-sdk (~> 3)! + chef-config! + chef-utils! + cinc-auditor-bin! + inspec (~> 5.21)! + inspec-core! + irb! + kitchen-terraform (~> 7.0)! + mixlib-install! + mixlib-versioning! + rspec-retry! + test-kitchen! + unf_ext! + +BUNDLED WITH + 2.4.13 diff --git a/e2e-etl-python/files/Makefile b/e2e-etl-python/files/Makefile new file mode 100644 index 000000000..313c6db9f --- /dev/null +++ b/e2e-etl-python/files/Makefile @@ -0,0 +1,158 @@ +NAME := AWS Quickstarter +DESCRIPTION := The '$(NAME)' is a is a prototype for an ODS quickstarter + +PWD := $(shell dirname $(realpath $(lastword $(MAKEFILE_LIST)))) +GEMS_HOME ?= $(PWD)/vendor/bundle +INSTALL_REPORT_HOME := ./reports/install +SHELL := /usr/bin/env bash +.SHELLFLAGS := -eu -o pipefail -c +.DELETE_ON_ERROR: +MAKEFLAGS += --warn-undefined-variables +MAKEFLAGS += --no-builtin-rules + +TF_WORKSPACE = default + +# tfenv hack +DEBUG := 0 + +# Statefile Parameters +ACCOUNT_ID := $(shell aws sts get-caller-identity --query 'Account' --output text) +TF_BACKEND_S3KEY_MOD := $(shell echo "$(TF_BACKEND_S3KEY)" | sed "s/\//-/g") +TF_BACKEND_S3KEY_MOD := $(shell echo "$(TF_BACKEND_S3KEY_MOD)" | sed "s/-/\//") + +TFSTATE_BUCKET := $(ACCOUNT_ID)-terraform-state-bucket +TFSTATE_KEY := $(TF_BACKEND_S3KEY_MOD)-terraform-state +TFSTATE_TABLE := $(ACCOUNT_ID)-terraform-state-lock-table + + +.PHONY: default +default: test + +.PHONY: all +all: test plan deploy deployment-test describe + +.PHONY: init +# Initialize project. +init: install-dev-deps install-test-deps + +.PHONY: create-tfvars +# create terraform.tfvars.json +create-tfvars: + terraform-docs json . | jq '.inputs | map({ (.name): .default }) | add' > terraform.tfvars.json + +.PHONY: prep-test +prep-test: + pre-commit run terraformcreatei2o -a + pre-commit run terraformstackmoduleoutputs -a + +.PHONY: test +# Run (pre-deployment) tests. +test: install-test-deps + #Needed for "Infrastructure as Code (IaC)" stage + +.PHONY: plan +# Plan infrastructure deployment. +plan: init-terraform + @$(call check_aws_credentials) + + TF_IN_AUTOMATION=1 TF_WORKSPACE="$(TF_WORKSPACE)" terraform plan -input=false -out=tfplan + +.PHONY: deploy +# Deploy infrastructure. +deploy: init-terraform plan + @$(call check_aws_credentials) + + # output aws account and user id for testing + aws sts get-caller-identity --output text | tee $(INSTALL_REPORT_HOME)/aws_deploy_account.log + + TF_IN_AUTOMATION=1 TF_WORKSPACE="$(TF_WORKSPACE)" terraform apply -auto-approve -input=false -no-color tfplan | tee "$(INSTALL_REPORT_HOME)/tf_apply.log" + @TF_IN_AUTOMATION=1 TF_WORKSPACE="$(TF_WORKSPACE)" terraform show -no-color -json | tee "$(INSTALL_REPORT_HOME)/tf_show.log" 1>/dev/null + +.PHONY: deployment-test +# Run (post-deployment) tests. +deployment-test: install-test-deps + #Needed for "Infrastructure as Code (IaC)" stage + +.PHONY: install-report +install-report: + awk '/Creation complete/ && !/terraform-data/ {print}' "$(INSTALL_REPORT_HOME)/tf_apply.log" > $(INSTALL_REPORT_HOME)/tf_created.log + +.PHONY: describe +# Describe infrastructure. +describe: init-terraform + TF_IN_AUTOMATION=1 TF_WORKSPACE="$(TF_WORKSPACE)" terraform output -json | tee outputs.json + +.PHONY: destroy +# Destroy infrastructure. +destroy: init-terraform + @$(call check_aws_credentials) + + TF_IN_AUTOMATION=1 TF_WORKSPACE="$(TF_WORKSPACE)" terraform destroy -auto-approve + +.PHONY: install-dev-deps +# Install development dependencies. +install-dev-deps: install-git-pre-commit-hooks + +.PHONY: install-git-pre-commit-hooks +# Install Git pre-commit hooks. +install-git-pre-commit-hooks: + pre-commit install --overwrite + +.PHONY: install-ruby-gems +# Install Ruby gems specified in Gemfile. +install-ruby-gems: + BUNDLE_SILENCE_ROOT_WARNING=true bundle config --local path $(GEMS_HOME) + # see https://github.com/rubygems/rubygems/issues/4466 to get rid of error messages in Jenkins + BUNDLE_SILENCE_ROOT_WARNING=true TMPDIR=./vendor/tmp bundle install --jobs=8 + +.PHONY: install-python-env +# Install python virtual environment based on Pipfile +install-python-env: + CI=true PIPENV_VENV_IN_PROJECT=true pipenv install + +.PHONY: init-terraform +# Install Terraform workspace. +init-terraform: + + @echo "Bucket: ${TFSTATE_BUCKET}" + @echo "Key : ${TFSTATE_KEY}" + @echo "Table : ${TFSTATE_TABLE}" + + echo 1 | terraform init -backend-config="bucket=$(TFSTATE_BUCKET)" -backend-config="key=$(TFSTATE_KEY)" -backend-config="dynamodb_table=$(TFSTATE_TABLE)" -force-copy -input=false + +.PHONY: install-test-deps +# Install testing dependencies. +install-test-deps: install-ruby-gems install-python-env + +.PHONY: cinc-auditor-test +# run cinc-auditor without use of kitchen-terraform and create yaml for mapping terraform outputs to inspec inputs. +cinc-auditor-test: + sh ./lib/scripts/createstackfixtureoutputs2yml.sh + bundle exec cinc-auditor exec test/integration/default --no-create-lockfile --no-distinct-exit --input-file ./test/integration/default/files/inputs-from-tfo-stack.yml --target aws:// + +.PHONY: clean +# Reset Working directory (take care if something has deployed upfront) +clean: + @rm -rf .kitchen/ + @rm -rf test/fixtures/default/terraform.tfstate.d/ + @rm -rf test/fixtures/default/.terraform/ + @rm -f test/fixtures/default/.terraform.lock.hcl + +.PHONY: check-config +# Do some basic verification of configuration files and accounts +check-config: + @sh ./lib/scripts/aws/check_conf.sh + +# Checks AWS account +check_aws_credentials = \ + exitStatus=0; \ + if [ -v AWS_ACCESS_KEY_ID ] && [ -v AWS_SECRET_ACCESS_KEY ]; then \ + echo "Info: using AWS environment variables AWS_ACCESS_KEY_ID & AWS_SECRET_ACCESS_KEY ..."; \ + else \ + aws sts get-caller-identity &> /dev/null || exitStatus=$$?; \ + if [ $$exitStatus = 0 ]; then \ + echo "Info: using alternate credentials (e.g. AWS SSO) ..."; \ + else \ + echo "Error: No AWS credentials specified ..."; exit 1; \ + fi \ + fi diff --git a/e2e-etl-python/files/Pipfile b/e2e-etl-python/files/Pipfile new file mode 100644 index 000000000..b679ceaa2 --- /dev/null +++ b/e2e-etl-python/files/Pipfile @@ -0,0 +1,14 @@ +[[source]] +name = "pypi" +url = "https://pypi.org/simple" +verify_ssl = true + +[dev-packages] + +[packages] +python-hcl2 = "~=2.0" +boto3 = "~=1.26" +yq = ">2" + +[requires] +python_version = "3" diff --git a/e2e-etl-python/files/README.md b/e2e-etl-python/files/README.md new file mode 100644 index 000000000..7ab1904b2 --- /dev/null +++ b/e2e-etl-python/files/README.md @@ -0,0 +1,140 @@ +# Python end-to-end tests + +This is a python based quicktarter intended to develop end-to-end tests for data pipelines. +In order to do that it uses two testing technologies: + 1. Great Expectations, meant for data transformation testing data within relational tables. + e.g.: You could test the schema of a database, the number of rows, that a specific column has no null values, etc + 2. Pytest together with Boto it allows for testing etl triggers, notification system, content of S3 buckets, etc + +This quickstarter project was generated from the *inf-terraform-aws* ODS quickstarter. + +How it works: + 1. The ODS Jenkins pipeline starts. + 2. It compresses the bitbucket repository containing the tests, and it places it in an S3 bucket into the AWS account specified. + 3. In AWS it creates and trigger a code pipeline that will execute the tests. + 4. When the AWS code pipeline finish, it creates the necessary reports and sends them back to Jenkins. + 5. The Jenkins pipeline finish when receiving the reports. + + + +## Stages: installation / integration / acceptance + +With the introduction of the release manager concept in OpenDevStack 3, e2e test quickstarters are expected to run tests in three different stages (installation, integration & acceptance) and generate a JUnit XML result file for each of these stages. + +Make sure to keep `junit` as reporter and to not change the output path for the JUnit results files as they will be stashed by Jenkins and reused by the release manager. + +## How to prepare data +In case that you need to prepare data before the execution of your Great Expecations tests you could use the test_preparation folder, that contains the pre_requisites.py and post_requisites.py, these scripts +will be executed before and after the execution of your Great Expectations tests. + +In the pre_requistes.py you can do things such as prepare your data sets, create temporally resources... or even trigger your ETL pipelines. +After the execution of your Great Expectations test, the post_requisites.py will be executed. It is intended to be used as a clean-up step to remove any data set, +or reset your system to its initial state. + +For pytest you can configure pre and post requistes on your own since it's much more flexible than Great Expectations. +The tests will be executed in this order: + 1. pre_requistes.py + 2. Great Expecations test suite + 3. post_requistes.py + 4. Pytest test suite + +## Running end-to-end tests + +To execute all end-to-end tests: + +1. Set up AWS account credentials in environment folder's yml files. +2. Customize json files with the desired identification namings for the AWS resources that will be created with the quickestarters execution. +3. Modify the great_expectations and pytes folder to execute your tests located in the 'tests/acceptance/' directory. + +# Pipeline execution options +- By a commit with a change in the code the pipeline in jenkins will be automatically executed +- From jenkins manually +- Automatic from a test (create a function to automatize the trigger of the pipeline) + +## How to use this Stack? + +The behavior of a stack is determined by its purpose and the set of input parameters. Here is an overview of the *inputs* and *outputs* available for this stack. + + +## Requirements + +| Name | Version | +|------|---------| +| [terraform](#requirement\_terraform) | >= 1.0 | +| [aws](#requirement\_aws) | 4.67.0 | +| [random](#requirement\_random) | 3.5.1 | +| [great_expectations](#requirement\_great_expectations) | 0.18.3 | +| [pytest](#requirement\_pytest) | 7.4.3 | +| [boto3](#requirement\_boto3) | 1.29.6 | +| [allure-pytest](#requirement\_allure-pytest) | 2.13.2 | +| [allure-combine](#requirement\_allure-combine) | 1.0.11 | + + +## Providers + +| Name | Version | +|------|---------| +| [aws](#provider\_aws) | 4.67.0 | +| [random](#provider\_random) | 3.5.1 | + +## Modules + +| Name | Description | +|-----------------------------------------------------------------------------------------------------------------|-------------| +| [modules\codebuild]() | resource | +| [modules\codepipeline]() | resource | +| [modules\iam_roles]() | resource | +| [modules\s3-bucket]() | resource | +| [modules\s3-bucket-policy](https://registry.terraform.io/providers/hashicorp/time/latest/docs/resources/static) | resource | + +## Resources + +| Name | Type | +|--------------------------------------------------------------------------------------------------------------------------------------------|------| +| [aws_codebuild_project.build_project](https://registry.terraform.io/providers/hashicorp/...) | resource | +| [aws_codepipeline.codepipeline]() | resource | +| [aws_iam_role.codepipeline_role]() | resource | +| [aws_iam_role.codebuild_role]() | resource | +| [aws_iam_role_policy.codepipeline_policy](https://registry.terraform.io/providers/hashicorp/random/3.5.1/docs/resources/id) | resource | +| [aws_iam_role_policy.codebuild_policy](https://registry.terraform.io/providers/hashicorp/random/3.5.1/docs/resources/id) | resource | +| [aws_s3_bucket_policy.allow_access_from_another_account](https://registry.terraform.io/providers/hashicorp/random/3.5.1/docs/resources/id) | resource | +| [aws_s3_bucket.codepipeline_bucket](https://registry.terraform.io/providers/hashicorp/random/3.5.1/docs/resources/id) | resource | +| [aws_s3_bucket_versioning.s3versioning-cp](https://registry.terraform.io/providers/hashicorp/random/3.5.1/docs/resources/id) | resource | +| [aws_s3_bucket.e2e_results_bucket](https://registry.terraform.io/providers/hashicorp/random/3.5.1/docs/resources/id) | resource | +| [aws_s3_bucket_versioning.s3versioning-artfcs](https://registry.terraform.io/providers/hashicorp/random/3.5.1/docs/resources/id) | resource | +| [aws_s3_bucket.source_bitbucket_bucket](https://registry.terraform.io/providers/hashicorp/random/3.5.1/docs/resources/id) | resource | +| [aws_s3_bucket_versioning.s3versioning-bucket](https://registry.terraform.io/providers/hashicorp/random/3.5.1/docs/resources/id) | resource | +| [random_id.id](https://registry.terraform.io/providers/hashicorp/random/3.5.1/docs/resources/id) | resource | +| [local_file.terraform-data](https://registry.terraform.io/providers/hashicorp/random/3.5.1/docs/resources/id) | resource | +| [time_static.deployment](https://registry.terraform.io/providers/hashicorp/time/latest/docs/resources/static) | resource | + +## Inputs + +| Name | Description | Type | Default | Required | +|------------------------------------------------------------------------------------------------------------------------------|-------------------------------------------------------------------------------------------------------------------------|------|-----------------------|:--------:| +| [codebuild\_project\_name](#input\_codebuild\_project\_name) | The name of the AWS codebuild project. | `string` | `"codebuild-project"` | no | +| [codepipeline\_name](#input\_codepipeline\_name) | The name of the AWS codepipeline. | `string` | `"test-codepipeline"` | no | +| [codepipeline\_bucket\_name](#input\_codepipeline\_bucket\_name) | The name of the codepipeline artifacts S3 bucket. | `string` | `"cpplartifacts"` | no | +| [bitbucket\_source\_bucket\_name](#input\_bitbucket\_source\_bucket\_name) | The name of the source S3 bucket. | `string` | `"src-bitbucket"` | no | +| [e2e\_results\_bucket\_name](#input\_e2e\_results\_bucket\_name) | The name of the results S3 bucket. | `string` | `"test-results"` | no | +| [pipeline\_role\_name](#input\_pipeline\_role\_name) | The name of the codepipeline role. | `string` | `"test-codePipelineRole"` | no | +| [codebuild\_role\_name](#input\_codebuild\_role\_name) | The name of the codebuild role. | `string` | `"test-codeBuildRole"` | no | +| [codepipeline\_policy\_name](#input\_codepipeline\_policy\_name) | The name of the codepipeline policy. | `string` | `"codepipeline_policy"` | no | +| [codebuild\_policy\_name](#input\_codebuild\_policy\_name) | The name of the codebuild policy. | `string` | `"codebuild_policy"` | no | +| [meta\_environment](#input\_meta\_environment) | The type of the environment. Can be any of DEVELOPMENT, EVALUATION, PRODUCTIVE, QUALITYASSURANCE, TRAINING, VALIDATION. | `string` | `"DEVELOPMENT"` | no | +| [name](#input\_name) | The name of the stack. | `string` | `"stack-aws-quickstarter"` | no | + +## Outputs + +The output generated by terraform are used for internal quickestarter's purposes. + + +## Environments +The pipeline supports multiple environments (DEV/QA/PROD) within OpenDevStack. The behaviour of the pipeline in the environments can be controlled within the **environments** directory. +The *.yml files define the Jenkins secrets to read and are used to deploy into the right environments. +The *.json files can override variables from **variables.tf** in case different environments request different inputs (e.g. deploy a smaller version of the stack in DEV). + +## Problems? Questions? Suggestions? + +In case of problems, questions or suggestions, feel free to file an issue with the respective project's repository. Thanks! + diff --git a/e2e-etl-python/files/backend.tf b/e2e-etl-python/files/backend.tf new file mode 100644 index 000000000..824d6152d --- /dev/null +++ b/e2e-etl-python/files/backend.tf @@ -0,0 +1,5 @@ +terraform { + backend "s3" { + region = "eu-west-1" + } +} diff --git a/e2e-etl-python/files/cfn-templates/.gitkeep b/e2e-etl-python/files/cfn-templates/.gitkeep new file mode 100644 index 000000000..e69de29bb diff --git a/e2e-etl-python/files/cfn-templates/cfs3.json b/e2e-etl-python/files/cfn-templates/cfs3.json new file mode 100644 index 000000000..374d38831 --- /dev/null +++ b/e2e-etl-python/files/cfn-templates/cfs3.json @@ -0,0 +1,29 @@ +{ + "AWSTemplateFormatVersion": "2010-09-09", + "Description": "CloudFormation template of an S3 bucket for the AWS Quickstarter.", + "Outputs": { + "S3BucketName": { + "Description": "Bucket Created using this template.", + "Value": { + "Ref": "S3Bucket" + } + } + }, + "Resources": { + "S3Bucket": { + "Properties": { + "AccessControl": "Private", + "BucketEncryption": { + "ServerSideEncryptionConfiguration": [ + { + "ServerSideEncryptionByDefault": { + "SSEAlgorithm": "AES256" + } + } + ] + } + }, + "Type": "AWS::S3::Bucket" + } + } +} diff --git a/e2e-etl-python/files/common-tags.tf b/e2e-etl-python/files/common-tags.tf new file mode 100644 index 000000000..56a0f42df --- /dev/null +++ b/e2e-etl-python/files/common-tags.tf @@ -0,0 +1,5 @@ +locals { + common_tags = { + Environment = upper(var.meta_environment) + } +} diff --git a/e2e-etl-python/files/environments/dev.json b/e2e-etl-python/files/environments/dev.json new file mode 100644 index 000000000..e9a0fd4b0 --- /dev/null +++ b/e2e-etl-python/files/environments/dev.json @@ -0,0 +1,13 @@ +{ + "meta_environment" : "DEVELOPMENT", + + "codebuild_project_name" : "codebuild-project", + "codepipeline_name" : "test-codepipeline", + "codepipeline_bucket_name" : "cpplartifacts", + "bitbucket_source_bucket_name" : "src-bitbucket", + "e2e_results_bucket_name" : "test-results", + "pipeline_role_name" : "test-codePipelineRole", + "codebuild_role_name" : "test-codeBuildRole", + "codepipeline_policy_name" : "codepipeline_policy", + "codebuild_policy_name" : "codebuild_policy" +} diff --git a/e2e-etl-python/files/environments/prod.json b/e2e-etl-python/files/environments/prod.json new file mode 100644 index 000000000..1952c66d4 --- /dev/null +++ b/e2e-etl-python/files/environments/prod.json @@ -0,0 +1,13 @@ +{ + "meta_environment" : "PRODUCTIVE", + + "codebuild_project_name" : "codebuild-project", + "codepipeline_name" : "test-codepipeline", + "codepipeline_bucket_name" : "cpplartifacts", + "bitbucket_source_bucket_name" : "src-bitbucket", + "e2e_results_bucket_name" : "test-results", + "pipeline_role_name" : "test-codePipelineRole", + "codebuild_role_name" : "test-codeBuildRole", + "codepipeline_policy_name" : "codepipeline_policy", + "codebuild_policy_name" : "codebuild_policy" +} diff --git a/e2e-etl-python/files/environments/test.json b/e2e-etl-python/files/environments/test.json new file mode 100644 index 000000000..db57fb13f --- /dev/null +++ b/e2e-etl-python/files/environments/test.json @@ -0,0 +1,13 @@ +{ + "meta_environment" : "QUALITYASSURANCE", + + "codebuild_project_name" : "codebuild-project", + "codepipeline_name" : "test-codepipeline", + "codepipeline_bucket_name" : "cpplartifacts", + "bitbucket_source_bucket_name" : "src-bitbucket", + "e2e_results_bucket_name" : "test-results", + "pipeline_role_name" : "test-codePipelineRole", + "codebuild_role_name" : "test-codeBuildRole", + "codepipeline_policy_name" : "codepipeline_policy", + "codebuild_policy_name" : "codebuild_policy" +} diff --git a/e2e-etl-python/files/inputs2outputs.tf b/e2e-etl-python/files/inputs2outputs.tf new file mode 100644 index 000000000..13ae9729f --- /dev/null +++ b/e2e-etl-python/files/inputs2outputs.tf @@ -0,0 +1,11 @@ +# This file has been created automatically. +# terraform variables are passed to outputs. +# Following variable names are skipped: '.*[password|secret].*'. + +output "inputs2outputs" { + description = "all inputs passed to outputs" + value = [{ + meta_environment = var.meta_environment + name = var.name + }] +} diff --git a/e2e-etl-python/files/kitchen.yml b/e2e-etl-python/files/kitchen.yml new file mode 100644 index 000000000..bfc77c464 --- /dev/null +++ b/e2e-etl-python/files/kitchen.yml @@ -0,0 +1,29 @@ +driver: + name: terraform + command_timeout: 5400 + +provisioner: + name: terraform + +platforms: +- name: aws + +verifier: + name: terraform + +lifecycle: + pre_verify: + - local: mkdir -p test/integration/${KITCHEN_SUITE_NAME}/files + - local: ./.venv/bin/python3 ./.venv/bin/hcl2tojson test/fixtures/${KITCHEN_SUITE_NAME}/main.tf test/integration/${KITCHEN_SUITE_NAME}/files/main.json + +suites: +- name: default + driver: + root_module_directory: test/fixtures/default + verifier: + systems: + - name: aws + backend: aws + reporter: + - cli + - json:reports/install/data/inspec/pre-install/default.json diff --git a/e2e-etl-python/files/lib/scripts/aws/check_conf.sh b/e2e-etl-python/files/lib/scripts/aws/check_conf.sh new file mode 100644 index 000000000..84ab3cc54 --- /dev/null +++ b/e2e-etl-python/files/lib/scripts/aws/check_conf.sh @@ -0,0 +1,122 @@ +#!/usr/bin/env bash +# +# Author: Erhard Wais +# erhard.wais@boehringer-ingelheim.com +# +# This script does some basic checks on the AWS QS and reports potential issues. +# It is triggered via "make check-config" + +# TODO: +# - Return error in case of missconfig + +set -e +set -o pipefail + +#CONST + +DEFAULTBUCKET="" +DEFAULTACCOUNT="" +DOTS="........................................................................." + +BUCKET= +ACCOUNT= +MESSAGE= +HASAWSCONFIGURED=0 + +# functions +function format_message() { + MESSAGE=$1 + local offset=${#MESSAGE} + MESSAGE="$MESSAGE${DOTS:offset:((${#DOTS} - offset))}" +} + +function ok() { + format_message "$1" + echo -e "$MESSAGE\033[42mPassed\033[0m" +} +function nok() { + format_message "$1" + echo -e "$MESSAGE\033[41mFailed\033[0m" +} +function warn() { + format_message "$1" + echo -e "$MESSAGE\033[44m Warn \033[0m" +} +function note() { + format_message "$1" + echo -e "$MESSAGE" +} + +function check_backend() { + BUCKET="$ACCOUNT-terraform-state-bucket" + if [ -n "$BUCKET" ]; then + if [ "$BUCKET" = "$DEFAULTBUCKET" ]; then + nok "TF Backend is not configured. Check your backend.tf file" + else + ok "TF Backend is set to \"$BUCKET\"" + fi + else + nok "TF Backend is not specified. Update your backend.tf file" + fi +} + +function check_env() { + local envaccount=$(grep "account" environments/"$1".yml | awk -F ':' '{print $2}'|tr -d '"'|xargs) + if [ "$envaccount" = "$DEFAULTACCOUNT" ]; then + warn "There is no account configured for the \"$1\" environment" + else + ok "Account \"$envaccount\" is configured for the \"$1\" environment" + fi +} + +function check_aws_credentials() { + local exitStatus=0 + local arn + local user + + if [ -v AWS_ACCESS_KEY_ID ] && [ -v AWS_SECRET_ACCESS_KEY ]; then + ok "AWS account specified using environment variables" + HASAWSCONFIGURED=1 + else + aws sts get-caller-identity &> /dev/null || exitStatus=$? + if [ $exitStatus = 0 ]; then + ok "AWS account configured using SSO" + HASAWSCONFIGURED=1 + else + nok "No AWS account information specified for local development" + fi + fi + + # Check IAM user, Group and Policy + if [[ $HASAWSCONFIGURED = 1 ]]; then + arn=$(aws sts get-caller-identity --query "Arn" --output text) + arn=${arn:13} + ACCOUNT=${arn%:*} + user=${arn##*/} + + ok "Using \"$ACCOUNT:$user\"" + fi +} + +function check_backend_access() { + local exitStatus=0 + + if [ -n "$BUCKET" ] && [ "$BUCKET" != "$DEFAULTBUCKET" ]; then + if [[ "$HASAWSCONFIGURED" = 1 ]]; then + echo touch | aws s3 cp - s3://"$1"/"$2"/testaccess &> /dev/null || exitStatus=$? + if [ $exitStatus = 0 ]; then + ok "Configured AWS credentials have write access to TF Bucket" + else + warn "AWS credentials have no write access to TF Bucket" + fi + fi + fi +} + +# Rund different tests +check_env dev +check_env test +check_env prod +check_aws_credentials +check_backend +check_backend_access "$BUCKET" "$ACCOUNT" diff --git a/e2e-etl-python/files/lib/scripts/createstackfixtureoutputs2yml.sh b/e2e-etl-python/files/lib/scripts/createstackfixtureoutputs2yml.sh new file mode 100644 index 000000000..fced6fcd0 --- /dev/null +++ b/e2e-etl-python/files/lib/scripts/createstackfixtureoutputs2yml.sh @@ -0,0 +1,37 @@ +#!/usr/bin/env bash +# +# Author: Josef Hartmann +# josef.hartmann@boehringer-ingelheim.com +# +# This script creates terraform json output, converts it to yaml. +# This yaml file is used for loading terraform outputs as cinc-auditor/inspec inputs using option --input-file= +# +# + +set -e +set -o pipefail + +if [ "x${KITCHEN_SUITE_NAME}" == "x" ]; then + echo "Not running within kitchen." + KITCHEN_SUITE_NAME="default" +fi + +CWD="$(cd -P -- "$(dirname -- "${BASH_SOURCE[0]}")/../.." && pwd -P)" +TOJFILE=${CWD}/test/integration/${KITCHEN_SUITE_NAME}/files/tf-stack-output.json +TOYFILE=${CWD}/test/integration/${KITCHEN_SUITE_NAME}/files/inputs-from-tfo-stack.yml + +pushd . + +# +# A TF_WORKSPACE might be applied by the environment. +# +cd ${CWD}/test/fixtures/default +terraform output -json > "${TOJFILE}" + +# Convert terraform json outputs to yaml. +# Do not use symbolize_names for keys ("id" -> :id). +# Symbolize_names is no longer required, as kitchen-terraform outputs are created as inspec inputs using this type for keys. +jq 'with_entries(.value |= .value)|with_entries(.key = "output_" + .key)' "${TOJFILE}" | \ + ruby -ryaml -rjson -e 'puts YAML.dump(JSON.parse(STDIN.read, :symbolize_names => false))' > "${TOYFILE}" + +popd diff --git a/e2e-etl-python/files/lib/scripts/createstackoutputs2yml.sh b/e2e-etl-python/files/lib/scripts/createstackoutputs2yml.sh new file mode 100644 index 000000000..88c30d319 --- /dev/null +++ b/e2e-etl-python/files/lib/scripts/createstackoutputs2yml.sh @@ -0,0 +1,36 @@ +#!/usr/bin/env bash +# +# Author: Josef Hartmann +# josef.hartmann@boehringer-ingelheim.com +# +# This script creates terraform json output, converts it to yaml. +# This yaml file is used for loading terraform outputs as cinc-auditor/inspec inputs using option --input-file= +# +# + +set -e +set -o pipefail + +if [ "x${KITCHEN_SUITE_NAME}" == "x" ]; then + echo "Not running within kitchen." + KITCHEN_SUITE_NAME="default" +fi + +CWD="$(cd -P -- "$(dirname -- "${BASH_SOURCE[0]}")/../.." && pwd -P)" +TOJFILE=${CWD}/test/integration/${KITCHEN_SUITE_NAME}/files/tf-stack-output.json +TOYFILE=${CWD}/test/integration/${KITCHEN_SUITE_NAME}/files/inputs-from-tfo-stack.yml + +pushd . + +# +# A TF_WORKSPACE might be applied by the environment. +# +terraform output -json > "${TOJFILE}" + +# Convert terraform json outputs to yaml. +# Do not use symbolize_names for keys ("id" -> :id). +# Symbolize_names is no longer required, as kitchen-terraform outputs are created as inspec inputs using this type for keys. +jq 'with_entries(.value |= .value)|with_entries(.key = "output_" + .key)' "${TOJFILE}" | \ + ruby -ryaml -rjson -e 'puts YAML.dump(JSON.parse(STDIN.read, :symbolize_names => false))' > "${TOYFILE}" + +popd diff --git a/e2e-etl-python/files/main.tf b/e2e-etl-python/files/main.tf new file mode 100644 index 000000000..a758e80dc --- /dev/null +++ b/e2e-etl-python/files/main.tf @@ -0,0 +1,74 @@ +locals { + account_id = data.aws_caller_identity.current.account_id + unique_name = var.name + + tags = merge(local.common_tags, { + DeploymentDate = formatdate("YYYYMMDD", timestamp()) + InitialDeploymentDate = time_static.deployment.rfc3339 + }) +} + +resource "time_static" "deployment" {} + +data "aws_region" "current" {} +data "aws_caller_identity" "current" {} + +module "codebuild_terraform" { + depends_on = [ module.iam_roles ] + source = "./modules/codebuild" + +# build_project_name = var.build_project_name +# environment_type = var.environment_type +# environment_image = var.environment_image +# image_pull_credentials_type = var.image_pull_credentials_type +# testing_project_name = var.testing_project_name + + codebuild_role_arn = module.iam_roles.codebuild_role_arn + codepipeline_bucket_name = module.s3_artifacts_bucket.cp_bucket_name + e2e_results_bucket_name = module.s3_artifacts_bucket.e2e_results_bucket_name + local_id = local.id + projectId = var.projectId + environment = var.environment +} + +module "codepipeline_terraform" { + + source = "./modules/codepipeline" + +# codepipeline_name = var.codepipeline_name + + codepipeline_bucket_name = module.s3_artifacts_bucket.cp_bucket_name + codepipeline_role_arn = module.iam_roles.codepipeline_role_arn + bitbucket_source_bucket_name = module.s3_artifacts_bucket.bitbucket_s3bucket_name + codebuild_project_name = module.codebuild_terraform.codebuild_project_name + + local_id = local.id + projectId = var.projectId + repository = var.repository + branch_name = var.branch_name +} + +module "iam_roles" { + source = "./modules/iam_roles" + +# pipeline_role_name = var.pipeline_role_name +# codebuild_role_name = var.codebuild_role_name +# codepipeline_policy_name = var.codepipeline_policy_name +# codebuild_policy_name = var.codebuild_policy_name + + local_id = local.id + projectId = var.projectId +} + +module "s3_artifacts_bucket" { + source = "./modules/s3-bucket" + +# codepipeline_bucket_name = var.codepipeline_bucket_name +# bitbucket_source_bucket_name = var.bitbucket_source_bucket_name +# e2e_results_bucket_name = var.codepipeline_bucket_name + + local_id = local.id + projectId = var.projectId +} + + diff --git a/e2e-etl-python/files/metadata.yml b/e2e-etl-python/files/metadata.yml new file mode 100644 index 000000000..0c2a142b6 --- /dev/null +++ b/e2e-etl-python/files/metadata.yml @@ -0,0 +1,7 @@ +--- +name: e2e-etl-python +# yamllint disable-line rule:line-length +description: "This end-to-end testing project was generated from the e2e-etl-python ODS quickstarter." +supplier: https://es.python.org/ +version: 1.0 +type: ods-test diff --git a/e2e-etl-python/files/modules/codebuild/main.tf b/e2e-etl-python/files/modules/codebuild/main.tf new file mode 100644 index 000000000..dbfa2a1c6 --- /dev/null +++ b/e2e-etl-python/files/modules/codebuild/main.tf @@ -0,0 +1,96 @@ + +resource "aws_codebuild_project" "build_project" { + name = "${var.projectId}-e2e-cb-${var.aws_region}-${var.codebuild_project_name}-${var.local_id}" //"CodeBuild-project-test" + service_role = var.codebuild_role_arn + build_timeout = var.build_timeout + + artifacts { + type = var.artifacts_type + } + + environment { + compute_type = var.environment_compute_type + image = var.environment_image + type = var.environment_type + image_pull_credentials_type = var.image_pull_credentials_type + + environment_variable { + name = "ENVIRONMENT" + value = var.environment + } + } + + + source { + type = var.source_type + report_build_status = var.report_build_status + buildspec = <<-EOT + version: 0.2 + + phases: + install: + runtime-versions: + python: ${var.env_version} + + pre_build: + commands: + - pip install -r requirements.txt + - npm install -g allure-commandline --save-dev + + build: + commands: + - python tests/acceptance/great_expectations/test_preparation/pre_requisites.py + - python utils/checkpoints_executions.py + - python tests/acceptance/great_expectations/test_preparation/post_requisites.py + - python -m pytest --alluredir=pytest/test_results/acceptance --junitxml=pytest/test_results/junit/acceptance_pytest_junit.xml tests/acceptance/pytest + - python -m pytest --alluredir=pytest/test_results/installation --junitxml=pytest/test_results/junit/installation_pytest_junit.xml tests/installation + - python -m pytest --alluredir=pytest/test_results/integration --junitxml=pytest/test_results/junit/integration_pytest_junit.xml tests/integration + + post_build: + commands: + - (cd tests/acceptance && great_expectations -y docs build) + - aws s3 cp tests/acceptance/great_expectations/uncommitted/data_docs/local_site s3://${var.e2e_results_bucket_name}/GX_test_results --recursive + - aws s3 cp tests/acceptance/great_expectations/uncommitted/validations s3://${var.e2e_results_bucket_name}/GX_jsons --recursive + - python utils/json2JUnit.py + + - aws s3 cp s3://${var.e2e_results_bucket_name}/pytest_results/acceptance/history pytest/test_results/acceptance/history --recursive + - aws s3 cp s3://${var.e2e_results_bucket_name}/pytest_results/installation/history pytest/test_results/installation/history --recursive + - aws s3 cp s3://${var.e2e_results_bucket_name}/pytest_results/integration/history pytest/test_results/integration/history --recursive + + - allure generate pytest/test_results/acceptance -o pytest/acceptance_allure_report --clean + - allure generate pytest/test_results/installation -o pytest/installation_allure_report --clean + - allure generate pytest/test_results/integration -o pytest/integration_allure_report --clean + + + - allure-combine pytest/acceptance_allure_report + - allure-combine pytest/installation_allure_report + - allure-combine pytest/integration_allure_report + + - aws s3 cp pytest/acceptance_allure_report/history s3://${var.e2e_results_bucket_name}/pytest_results/acceptance/history --recursive + - aws s3 cp pytest/installation_allure_report/history s3://${var.e2e_results_bucket_name}/pytest_results/installation/history --recursive + - aws s3 cp pytest/integration_allure_report/history s3://${var.e2e_results_bucket_name}/pytest_results/integration/history --recursive + + - aws s3 cp pytest/acceptance_allure_report/complete.html s3://${var.e2e_results_bucket_name}/pytest_results/acceptance/acceptance_allure_report_complete.html + - aws s3 cp pytest/installation_allure_report/complete.html s3://${var.e2e_results_bucket_name}/pytest_results/installation/installation_allure_report_complete.html + - aws s3 cp pytest/integration_allure_report/complete.html s3://${var.e2e_results_bucket_name}/pytest_results/integration/integration_allure_report_complete.html + + - aws s3 cp tests/acceptance/great_expectations/uncommitted/validations/junit.xml s3://${var.e2e_results_bucket_name}/junit/acceptance_GX_junit.xml + - aws s3 cp pytest/test_results/junit/acceptance_pytest_junit.xml s3://${var.e2e_results_bucket_name}/junit/acceptance_pytest_junit.xml + - aws s3 cp pytest/test_results/junit/integration_pytest_junit.xml s3://${var.e2e_results_bucket_name}/junit/integration_pytest_junit.xml + - aws s3 cp pytest/test_results/junit/installation_pytest_junit.xml s3://${var.e2e_results_bucket_name}/junit/installation_pytest_junit.xml + + reports: + GX_reports: + files: + - junit.xml + base-directory: tests/acceptance/great_expectations/uncommitted/validations/ + file-format: JUNITXML + Allure_report: + files: + - acceptance_pytest_junit.xml + base-directory: pytest/test_results/junit/ + file-format: JUNITXML + + EOT + } +} diff --git a/e2e-etl-python/files/modules/codebuild/output.tf b/e2e-etl-python/files/modules/codebuild/output.tf new file mode 100644 index 000000000..d2d239345 --- /dev/null +++ b/e2e-etl-python/files/modules/codebuild/output.tf @@ -0,0 +1,12 @@ +output "codebuild_project_name" { + value = aws_codebuild_project.build_project.name + description = "Name of the CodeBuild project" +} +output "codebuild_project_arn" { + value = aws_codebuild_project.build_project.arn + description = "ARN of the CodeBuild project" +} +output "codebuild_project_id" { + value = aws_codebuild_project.build_project.id + description = "ID of the CodeBuild project" +} diff --git a/e2e-etl-python/files/modules/codebuild/variables.tf b/e2e-etl-python/files/modules/codebuild/variables.tf new file mode 100644 index 000000000..6c6ab022d --- /dev/null +++ b/e2e-etl-python/files/modules/codebuild/variables.tf @@ -0,0 +1,117 @@ +variable "codebuild_project_name" { + description = "codebuild project name" + type = string + default = "codebuild-project" +} + +variable "codebuild_role_arn" { + description = "Codebuild IAM role arn. " + type = string +} + +variable "build_timeout" { + description = "Build Timeout" + type = number + default = 60 +} + +variable "artifacts_type" { + description = "type to store Artifacts" + type = string + default = "CODEPIPELINE" +} + +variable "environment_compute_type" { + description = "environment_compute_type" + type = string + default = "BUILD_GENERAL1_SMALL" +} + +variable "environment_image" { + description = "environment_image" + type = string + default = "aws/codebuild/standard:5.0" +} + +variable "environment_type" { + description = "environment_type" + type = string + default = "LINUX_CONTAINER" +} + +variable "image_pull_credentials_type" { + description = "image_pull_credentials_type" + type = string + default = "CODEBUILD" +} + +variable "source_type" { + description = "Artifacts_source_type" + type = string + default = "CODEPIPELINE" +} + +variable "env_version" { + type = string + default = "3.9" +} + +variable "report_build_status" { + description = "report_build_status" + type = bool + default = false +} + +variable "GXtest_project_name" { + description = "codebuild Great Expectation project name" + type = string + default = "GXtest-project" +} +variable "GX_reporting_project_name" { + description = "Great Expectations reporting project name" + type = string + default = "GX_reporting-project" +} +variable "Pytest_project_name" { + description = "Pytest testing project name" + type = string + default = "Pytest-project" +} +variable "Pytest_reporting_project_name" { + description = "Pytest reporting project name" + type = string + default = "Pytest_reporting-project" +} + +variable "codepipeline_bucket_name" { + description = "s3_bucket_name" + type = string +} + +variable "e2e_results_bucket_name" { + description = "s3_bucket_for_results_artifacts" + type = string +} + +variable "local_id" { + description = "id for unique s3buckets " + type = string +} + +variable "projectId" { + description = "EDP project name" + type = string + default = "projectId" +} + +variable "aws_region" { + description = "AWS infrastructure regio" + type = string + default = "eu-west-1" +} + +variable "environment" { + description = "The project execution environment." + type = string + default = "dev" +} diff --git a/e2e-etl-python/files/modules/codepipeline/main.tf b/e2e-etl-python/files/modules/codepipeline/main.tf new file mode 100644 index 000000000..f16ac6378 --- /dev/null +++ b/e2e-etl-python/files/modules/codepipeline/main.tf @@ -0,0 +1,53 @@ + +provider "aws" { + region = var.aws_region +} + +resource "aws_codepipeline" "codepipeline" { + name = "${var.projectId}-e2e-cppl-${var.aws_region}-${var.codepipeline_name}-${var.local_id}" + role_arn = var.codepipeline_role_arn + + artifact_store { + type = var.artifacts_store_type + location = var.codepipeline_bucket_name + } + + stage { + name = "Source" + + action { + name = "Source" + category = "Source" + owner = "AWS" + provider = var.source_provider + version = "1" + output_artifacts = ["source_output"] + + configuration = { + S3Bucket = var.bitbucket_source_bucket_name + S3ObjectKey = "${var.repository}-${var.branch_name}.zip" + PollForSourceChanges = false + } + } + } + + stage { + name = "Test" + + action { + name = "Test" + category = "Build" + provider = "CodeBuild" + owner = "AWS" + input_artifacts = ["source_output"] + output_artifacts = ["install_output"] + version = "1" + configuration = { + ProjectName = var.codebuild_project_name + } + } + } +} + + + diff --git a/e2e-etl-python/files/modules/codepipeline/output.tf b/e2e-etl-python/files/modules/codepipeline/output.tf new file mode 100644 index 000000000..bda8f4ea9 --- /dev/null +++ b/e2e-etl-python/files/modules/codepipeline/output.tf @@ -0,0 +1,14 @@ +output "aws_codepipeline_arn" { + value = aws_codepipeline.codepipeline.arn + description = "The ARN of the CodePipeline" +} + +output "aws_codepipeline_id" { + value = aws_codepipeline.codepipeline.id + description = "The id of the CodePipeline" +} + +output "aws_codepipeline_name" { + value = aws_codepipeline.codepipeline.name + description = "The name of the CodePipeline" +} diff --git a/e2e-etl-python/files/modules/codepipeline/variables.tf b/e2e-etl-python/files/modules/codepipeline/variables.tf new file mode 100644 index 000000000..5a89ce6c3 --- /dev/null +++ b/e2e-etl-python/files/modules/codepipeline/variables.tf @@ -0,0 +1,67 @@ +variable "codepipeline_name" { + description = "the codepipeline name" + type = string + default = "test-codepipeline" +} + +variable "codepipeline_bucket_name" { + description = "s3_bucket_name" + type = string +} + +variable "codepipeline_role_arn" { + description = "ARN of the codepipeline IAM role" + type = string +} + +variable "bitbucket_source_bucket_name" { + description = "s3_source_bucket" + type = string +} + +variable "artifacts_store_type" { + description = "Artifacts store type" + type = string + default = "S3" +} + +variable "source_provider" { + description = "source_provider" + type = string + default = "S3" +} + +variable "branch_name" { + description = "branch_name" + type = string + default = "master" +} + +variable "codebuild_project_name" { + description = "codebuild project name" + type = string +} + + +variable "local_id" { + description = "id for unique s3buckets " + type = string +} + +variable "projectId" { + description = "EDP project name" + type = string + default = "projectId" +} + +variable "aws_region" { + description = "AWS infrastructure region" + type = string + default = "eu-west-1" +} + +variable "repository" { + description = "QS bitbucket repository" + type = string + default = "e2e-etl-python" +} diff --git a/e2e-etl-python/files/modules/iam_roles/main.tf b/e2e-etl-python/files/modules/iam_roles/main.tf new file mode 100644 index 000000000..e1d6a9144 --- /dev/null +++ b/e2e-etl-python/files/modules/iam_roles/main.tf @@ -0,0 +1,79 @@ +resource "aws_iam_role" "codepipeline_role" { + name = "${var.projectId}-e2e-IAMrole-${var.aws_region}-${var.pipeline_role_name}-${var.local_id}" + assume_role_policy = data.aws_iam_policy_document.codepipeline_assume_role.json +} + +resource "aws_iam_role" "codebuild_role" { + name = "${var.projectId}-e2e-IAMrole-${var.aws_region}-${var.codebuild_role_name}-${var.local_id}" + assume_role_policy = data.aws_iam_policy_document.codebuild_assume_role.json +} + +resource "aws_iam_role_policy" "codepipeline_policy" { + name = "${var.projectId}-e2e-policy-${var.aws_region}-${var.codepipeline_policy_name}-${var.local_id}" + role = aws_iam_role.codepipeline_role.id + policy = data.aws_iam_policy_document.codepipeline_policy.json +} + +resource "aws_iam_role_policy" "codebuild_policy" { + name = "${var.projectId}-e2e-policy-${var.aws_region}-${var.codebuild_policy_name}-${var.local_id}" + role = aws_iam_role.codebuild_role.id + policy = data.aws_iam_policy_document.codebuild_policy.json +} + +data "aws_iam_policy_document" "codepipeline_assume_role" { + statement { + effect = "Allow" + + principals { + type = "Service" + identifiers = ["codepipeline.amazonaws.com"] + } + + actions = ["sts:AssumeRole"] + } +} + +data "aws_iam_policy_document" "codebuild_assume_role" { + statement { + effect = "Allow" + + principals { + type = "Service" + identifiers = ["codebuild.amazonaws.com"] + } + + actions = ["sts:AssumeRole"] + } +} + +data "aws_iam_policy_document" "codepipeline_policy" { + statement { + sid = "" + actions = [ + "cloudwatch:*", + "s3:*", + "codebuild:*" + ] + resources = ["*"] + effect = "Allow" + } +} + +data "aws_iam_policy_document" "codebuild_policy" { + statement { + sid = "" + actions = [ + "cloudwatch:*", + "logs:*", + "s3:*", + "codebuild:*", + "secretsmanager:*", + "iam:*", + "athena:*", + "glue:*", + "codepipeline:*" + ] + resources = ["*"] + effect = "Allow" + } +} diff --git a/e2e-etl-python/files/modules/iam_roles/outputs.tf b/e2e-etl-python/files/modules/iam_roles/outputs.tf new file mode 100644 index 000000000..188da5286 --- /dev/null +++ b/e2e-etl-python/files/modules/iam_roles/outputs.tf @@ -0,0 +1,11 @@ +output "codepipeline_role_arn" { + value = try(aws_iam_role.codepipeline_role.arn, "") + description = "role arn" +} + +output "codebuild_role_arn" { + value = try(aws_iam_role.codebuild_role.arn, "") + description = "role arn" +} + + diff --git a/e2e-etl-python/files/modules/iam_roles/variables.tf b/e2e-etl-python/files/modules/iam_roles/variables.tf new file mode 100644 index 000000000..78d9706a3 --- /dev/null +++ b/e2e-etl-python/files/modules/iam_roles/variables.tf @@ -0,0 +1,38 @@ +variable "pipeline_role_name" { + description = "role_name" + type = string + default = "test-codePipelineRole" +} +variable "codebuild_role_name" { + description = "role_name" + type = string + default = "test-codeBuildRole" +} + +variable "codepipeline_policy_name" { + description = "Codepipeline_policy_name" + type = string + default = "codepipeline_policy" +} +variable "codebuild_policy_name" { + description = "Codebuild_policy_name" + type = string + default = "codebuild_policy" +} + + +variable "local_id" { + description = "id for unique s3buckets " + type = string +} + +variable "projectId" { + description = "EDP project name" + type = string +} + +variable "aws_region" { + description = "AWS infrastructure region" + type = string + default = "eu-west-1" +} diff --git a/e2e-etl-python/files/modules/s3-bucket/main.tf b/e2e-etl-python/files/modules/s3-bucket/main.tf new file mode 100644 index 000000000..49603f50e --- /dev/null +++ b/e2e-etl-python/files/modules/s3-bucket/main.tf @@ -0,0 +1,33 @@ +resource "aws_s3_bucket" "codepipeline_bucket" { + bucket = "${var.projectId}-e2e-s3-${var.aws_region}-${var.codepipeline_bucket_name}-${var.local_id}" +} +resource "aws_s3_bucket_versioning" "s3versioning-cp" { + bucket = aws_s3_bucket.codepipeline_bucket.id + + versioning_configuration { + status = var.s3_versioning_cp + } +} + +resource "aws_s3_bucket" "e2e_results_bucket" { + bucket = "${var.projectId}-e2e-s3-${var.aws_region}-${var.e2e_results_bucket_name}-${var.local_id}" +} +resource "aws_s3_bucket_versioning" "s3versioning-artfcs" { + bucket = aws_s3_bucket.e2e_results_bucket.id + + versioning_configuration { + status = var.s3_versioning_results + } +} + +resource "aws_s3_bucket" "source_bitbucket_bucket" { + bucket = "${var.projectId}-e2e-s3-${var.aws_region}-${var.bitbucket_source_bucket_name}-${var.local_id}" +} +resource "aws_s3_bucket_versioning" "s3versioning-bucket" { + bucket = aws_s3_bucket.source_bitbucket_bucket.id + + versioning_configuration { + status = var.s3_versioning_bitbuckets3 + } +} + diff --git a/e2e-etl-python/files/modules/s3-bucket/outputs.tf b/e2e-etl-python/files/modules/s3-bucket/outputs.tf new file mode 100644 index 000000000..696d46579 --- /dev/null +++ b/e2e-etl-python/files/modules/s3-bucket/outputs.tf @@ -0,0 +1,38 @@ +output "cp_bucket_arn" { + value = aws_s3_bucket.codepipeline_bucket.arn + description = "The ARN of the S3 Bucket" +} +output "cp_bucket_name" { + value = aws_s3_bucket.codepipeline_bucket.bucket + description = "The Name of the S3 Bucket" +} +output "cp_bucket_id" { + value = aws_s3_bucket.codepipeline_bucket.id + description = "The ID of the S3 Bucket" +} + +output "e2e_results_bucket_arn" { + value = aws_s3_bucket.e2e_results_bucket.arn + description = "The ARN of the results artifacts S3 Bucket" +} +output "e2e_results_bucket_name" { + value = aws_s3_bucket.e2e_results_bucket.bucket + description = "The Name of the results artifacts S3 Bucket" +} +output "e2e_results_bucket_id" { + value = aws_s3_bucket.e2e_results_bucket.id + description = "The ID of the results artifacts S3 Bucket" +} + +output "bitbucket_s3bucket_arn" { + value = aws_s3_bucket.source_bitbucket_bucket.arn + description = "The ARN of the bitbucket S3 Bucket" +} +output "bitbucket_s3bucket_name" { + value = aws_s3_bucket.source_bitbucket_bucket.bucket + description = "The Name of the bitbucket S3 Bucket" +} +output "bitbucket_s3bucket_id" { + value = aws_s3_bucket.source_bitbucket_bucket.id + description = "The ID of the bitbucket S3 Bucket" +} diff --git a/e2e-etl-python/files/modules/s3-bucket/variables.tf b/e2e-etl-python/files/modules/s3-bucket/variables.tf new file mode 100644 index 000000000..5f37563c8 --- /dev/null +++ b/e2e-etl-python/files/modules/s3-bucket/variables.tf @@ -0,0 +1,51 @@ +variable "codepipeline_bucket_name" { + type = string + default = "cpplartifacts" +} + +variable "bitbucket_source_bucket_name" { + description = "Source bitbucket s3 bucket name" + type = string + default = "src-bitbucket" +} + +variable "e2e_results_bucket_name" { + description = "s3_bucket_for_results_artifacts" + type = string + default = "test-results" +} + +variable "s3_versioning_cp" { + description = "s3 versioning for codepipeline bucket" + type = string + default = "Enabled" +} + +variable "s3_versioning_bitbuckets3" { + description = "s3 versioning for source bucket" + type = string + default = "Enabled" +} + +variable "s3_versioning_results" { + description = "s3 versioning for results bucket" + type = string + default = "Enabled" +} + + +variable "local_id" { + description = "id for unique s3buckets " + type = string +} + +variable "projectId" { + description = "EDP project name" + type = string +} + +variable "aws_region" { + description = "AWS infrastructure region" + type = string + default = "eu-west-1" +} diff --git a/e2e-etl-python/files/outputs.tf b/e2e-etl-python/files/outputs.tf new file mode 100644 index 000000000..4c9d98a4b --- /dev/null +++ b/e2e-etl-python/files/outputs.tf @@ -0,0 +1,61 @@ +# ----------------------------------------------------------------------------- +# OUTPUTS +# This stack supports the following output values. +# Documentation: https://www.terraform.io/docs/configuration/outputs.html +# ----------------------------------------------------------------------------- + +output "name" { + description = "The name of the stack." + value = var.name +} + +output "meta_environment" { + description = "The type of the environment." + value = var.meta_environment +} + +output "aws_region" { + description = "The current region." + value = data.aws_region.current.name +} + + +output "codebuild_name" { + value = module.codebuild_terraform.codebuild_project_name + description = "The Name of the Codebuild Project" +} +output "codebuild_arn" { + value = module.codebuild_terraform.codebuild_project_arn + description = "The ARN of the Codebuild Project" +} + +output "codepipeline_name" { + value = module.codepipeline_terraform.aws_codepipeline_name + description = "The Name of the CodePipeline" +} +output "codepipeline_arn" { + value = module.codepipeline_terraform.aws_codepipeline_arn + description = "The ARN of the CodePipeline" +} + +output "cp_iam_arn" { + value = module.iam_roles.codepipeline_role_arn + description = "The ARN of the IAM Role used by the CodePipeline" +} +output "cb_iam_arn" { + value = module.iam_roles.codebuild_role_arn + description = "The ARN of the IAM Role used by the CodePipeline" +} + +output "bitbucket_s3bucket_name" { + value = module.s3_artifacts_bucket.bitbucket_s3bucket_name + description = "The Name of the bitbucket S3 Bucket" +} +output "cp_bucket_name" { + value = module.s3_artifacts_bucket.cp_bucket_name + description = "The Name of the S3 Bucket" +} +output "e2e_results_bucket_name" { + value = module.s3_artifacts_bucket.e2e_results_bucket_name + description = "The Name of the results artifacts S3 Bucket" +} diff --git a/e2e-etl-python/files/pytest.ini b/e2e-etl-python/files/pytest.ini new file mode 100644 index 000000000..79d7a8825 --- /dev/null +++ b/e2e-etl-python/files/pytest.ini @@ -0,0 +1,2 @@ +[pytest] +python_functions = *_test diff --git a/e2e-etl-python/files/random.tf b/e2e-etl-python/files/random.tf new file mode 100644 index 000000000..b0e6c90df --- /dev/null +++ b/e2e-etl-python/files/random.tf @@ -0,0 +1,13 @@ +resource "random_id" "id" { + keepers = { + # Create a new random ID iff the workspace name changes. + lifecycle = terraform.workspace + } + + byte_length = 4 +} + +locals { + id = random_id.id.hex +} + diff --git a/e2e-etl-python/files/release-manager.yml b/e2e-etl-python/files/release-manager.yml new file mode 100644 index 000000000..23d65c7ef --- /dev/null +++ b/e2e-etl-python/files/release-manager.yml @@ -0,0 +1,2 @@ +--- +dependencies: [] diff --git a/e2e-etl-python/files/reports/install/.gitkeep b/e2e-etl-python/files/reports/install/.gitkeep new file mode 100644 index 000000000..9074a39d4 --- /dev/null +++ b/e2e-etl-python/files/reports/install/.gitkeep @@ -0,0 +1 @@ +/report.* diff --git a/e2e-etl-python/files/requirements.txt b/e2e-etl-python/files/requirements.txt new file mode 100644 index 000000000..fd149701f --- /dev/null +++ b/e2e-etl-python/files/requirements.txt @@ -0,0 +1,19 @@ +great_expectations == 0.18.3 +sqlalchemy == 2.0.23 +pyathena[SQLAlchemy] == 3.0.10 +pytest == 7.4.3 +allure-pytest == 2.13.2 +allure-combine == 1.0.11 +boto3 == 1.29.6 +pandas == 2.1.3 +numpy == 1.26.2 +pytest-timeout == 2.2.0 +pytest-ordering == 0.6 +pytest-repeat == 0.9.3 +pyspark == 3.5.0 +pytz == 2023.3.post1 +snowflake-connector-python == 3.6.0 +cryptography == 41.0.7 +psycopg2-binary == 2.9.1 +snowflake-snowpark-python == 1.11.1 + diff --git a/e2e-etl-python/files/stackmodulesoutputs.tf b/e2e-etl-python/files/stackmodulesoutputs.tf new file mode 100644 index 000000000..5c5d93d1d --- /dev/null +++ b/e2e-etl-python/files/stackmodulesoutputs.tf @@ -0,0 +1,2 @@ +# This file has been created automatically. + diff --git a/e2e-etl-python/files/terraform-data.tf b/e2e-etl-python/files/terraform-data.tf new file mode 100644 index 000000000..63265d73c --- /dev/null +++ b/e2e-etl-python/files/terraform-data.tf @@ -0,0 +1,14 @@ +locals { + terraform-data = { + id = local.id + name = var.name + tags = local.tags + current_region = data.aws_region.current.name + } +} + +resource "local_file" "terraform-data" { + filename = "${path.module}/.terraform-data.json" + content = jsonencode(local.terraform-data) +} + diff --git a/e2e-etl-python/files/tests/acceptance/great_expectations/.gitignore b/e2e-etl-python/files/tests/acceptance/great_expectations/.gitignore new file mode 100644 index 000000000..40e0c4641 --- /dev/null +++ b/e2e-etl-python/files/tests/acceptance/great_expectations/.gitignore @@ -0,0 +1,2 @@ +uncommitted/ +expectaions/.ge_store_backend_id diff --git a/e2e-etl-python/files/tests/acceptance/great_expectations/checkpoints/Demo_athena_checkpoint.yml b/e2e-etl-python/files/tests/acceptance/great_expectations/checkpoints/Demo_athena_checkpoint.yml new file mode 100644 index 000000000..3bcdcf4d4 --- /dev/null +++ b/e2e-etl-python/files/tests/acceptance/great_expectations/checkpoints/Demo_athena_checkpoint.yml @@ -0,0 +1,32 @@ +name: athena_checkpoint +config_version: 1.0 +template_name: +module_name: great_expectations.checkpoint +class_name: Checkpoint +run_name_template: '%Y%m%d-%H%M%S-verification-no-failures' +expectation_suite_name: +batch_request: {} +action_list: + - name: store_validation_result + action: + class_name: StoreValidationResultAction + - name: store_evaluation_params + action: + class_name: StoreEvaluationParametersAction + - name: update_data_docs + action: + class_name: UpdateDataDocsAction + site_names: [] +evaluation_parameters: {} +runtime_configuration: {} +validations: + - batch_request: + datasource_name: AWS-Athena-datasource + data_connector_name: default_configured_data_connector_name + data_asset_name: address + data_connector_query: + index: -1 + expectation_suite_name: athena_validation_suite +profilers: [] +ge_cloud_id: +expectation_suite_ge_cloud_id: diff --git a/e2e-etl-python/files/tests/acceptance/great_expectations/checkpoints/Demo_person_checkpoint.yml b/e2e-etl-python/files/tests/acceptance/great_expectations/checkpoints/Demo_person_checkpoint.yml new file mode 100644 index 000000000..aac60db9d --- /dev/null +++ b/e2e-etl-python/files/tests/acceptance/great_expectations/checkpoints/Demo_person_checkpoint.yml @@ -0,0 +1,32 @@ +name: person_checkpoint +config_version: 1.0 +template_name: +module_name: great_expectations.checkpoint +class_name: Checkpoint +run_name_template: '%Y%m%d-%H%M%S-verification-no-failures' +expectation_suite_name: +batch_request: {} +action_list: + - name: store_validation_result + action: + class_name: StoreValidationResultAction + - name: store_evaluation_params + action: + class_name: StoreEvaluationParametersAction + - name: update_data_docs + action: + class_name: UpdateDataDocsAction + site_names: [] +evaluation_parameters: {} +runtime_configuration: {} +validations: + - batch_request: + datasource_name: AWS-Athena-datasource + data_connector_name: default_configured_data_connector_name + data_asset_name: person + data_connector_query: + index: -1 + expectation_suite_name: person_validation_suite +profilers: [] +ge_cloud_id: +expectation_suite_ge_cloud_id: diff --git a/e2e-etl-python/files/tests/acceptance/great_expectations/expectations/athena_validation_suite.json b/e2e-etl-python/files/tests/acceptance/great_expectations/expectations/athena_validation_suite.json new file mode 100644 index 000000000..949ba1234 --- /dev/null +++ b/e2e-etl-python/files/tests/acceptance/great_expectations/expectations/athena_validation_suite.json @@ -0,0 +1,68 @@ +{ + "data_asset_type": null, + "expectation_suite_name": "athena_validation_suite", + "expectations": [ + { + "expectation_type": "expect_table_columns_to_match_set", + "kwargs": { + "column_set": [ + "_hoodie_commit_time", + "_hoodie_commit_seqno", + "_hoodie_record_key", + "_hoodie_partition_path", + "_hoodie_file_name", + "address_id", + "address_line_1", + "address_line_2", + "address_line_3", + "address_line_4", + "address_owner_key_1", + "address_owner_key_2", + "address_owner_key_3", + "address_owner_key_4", + "address_owner_key_5", + "address_type_code", + "country_code", + "last_updated_date", + "owner", + "post_zip_code", + "primary_address_flag", + "province_county", + "province_county_code", + "table_short_name", + "town_city", + "updated_during_mon_visit_by", + "update_count", + "aud_action_flag", + "aud_date_changed", + "aud_personnel_no", + "time_zone_offset", + "transaction_no", + "ingestion_timestamp", + "pr_tab_hist_hkey", + "pr_tab_hkey", + "audit_id", + "audit_task_id", + "int_tec_from_dt", + "int_tec_to_dt", + "curr_flg", + "del_flg", + "modulekey" + ] + }, + "meta": {} + }, + { + "expectation_type": "expect_table_row_count_to_equal", + "kwargs": { + "value": 0 + }, + "meta": {} + } + ], + + "ge_cloud_id": null, + "meta": { + "great_expectations_version": "0.17.9" + } +} diff --git a/e2e-etl-python/files/tests/acceptance/great_expectations/expectations/person_validation_suite.json b/e2e-etl-python/files/tests/acceptance/great_expectations/expectations/person_validation_suite.json new file mode 100644 index 000000000..db8ea030a --- /dev/null +++ b/e2e-etl-python/files/tests/acceptance/great_expectations/expectations/person_validation_suite.json @@ -0,0 +1,40 @@ +{ + "data_asset_type": null, + "expectation_suite_name": "person_validation_suite", + "expectations": [ + { + "expectation_type": "expect_table_columns_to_match_set", + "kwargs": { + "column_set": [ + "name", + "surname", + "age", + "location" ] + }, + "meta": {} + }, + { + "expectation_type": "expect_column_value_lengths_to_be_between", + "kwargs": { + "column": "name", + "min_value": 3, + "max_value": 10 + + }, + "meta": {} + }, + { + "expectation_type": "expect_column_values_to_not_be_null", + "kwargs": { + "column": "name", + "mostly": 0.8 + }, + "meta": {} + } + ], + + "ge_cloud_id": null, + "meta": { + "great_expectations_version": "0.17.9" + } +} diff --git a/e2e-etl-python/files/tests/acceptance/great_expectations/great_expectations.yml b/e2e-etl-python/files/tests/acceptance/great_expectations/great_expectations.yml new file mode 100644 index 000000000..642b55d73 --- /dev/null +++ b/e2e-etl-python/files/tests/acceptance/great_expectations/great_expectations.yml @@ -0,0 +1,106 @@ +# Welcome to Great Expectations! Always know what to expect from your data. +# +# Here you can define datasources, batch kwargs generators, integrations and +# more. This file is intended to be committed to your repo. For help with +# configuration please: +# - Read our docs: https://docs.greatexpectations.io/docs/guides/connecting_to_your_data/connect_to_data_overview/#2-configure-your-datasource +# - Join our slack channel: http://greatexpectations.io/slack + +# config_version refers to the syntactic version of this config file, and is used in maintaining backwards compatibility +# It is auto-generated and usually does not need to be changed. +config_version: 3.0 + +# Datasources tell Great Expectations where your data lives and how to get it. +# You can use the CLI command `great_expectations datasource new` to help you +# add a new datasource. Read more at https://docs.greatexpectations.io/docs/guides/connecting_to_your_data/connect_to_data_overview +datasources: + AWS-Athena-datasource: + class_name: Datasource + module_name: great_expectations.datasource + execution_engine: + class_name: SqlAlchemyExecutionEngine + module_name: great_expectations.execution_engine + connection_string: ${connection_string} + data_connectors: + default_configured_data_connector_name: + name: default_configured_data_connector_name + class_name: ConfiguredAssetSqlDataConnector + module_name: great_expectations.datasource.data_connector + assets: + address: + class_name: Asset + module_name: great_expectations.datasource.data_connector.asset + schema_name: greatexpectationsdb + person: + class_name: Asset + module_name: great_expectations.datasource.data_connector.asset + schema_name: greatexpectationsdb +config_variables_file_path: uncommitted/config_variables.yml + +# The plugins_directory will be added to your python path for custom modules +# used to override and extend Great Expectations. +plugins_directory: plugins/ + +stores: +# Stores are configurable places to store things like Expectations, Validations +# Data Docs, and more. These are for advanced users only - most users can simply +# leave this section alone. +# +# Three stores are required: expectations, validations, and +# evaluation_parameters, and must exist with a valid store entry. Additional +# stores can be configured for uses such as data_docs, etc. + expectations_store: + class_name: ExpectationsStore + store_backend: + class_name: TupleFilesystemStoreBackend + base_directory: expectations/ + + validations_store: + class_name: ValidationsStore + store_backend: + class_name: TupleFilesystemStoreBackend + base_directory: uncommitted/validations/ + + evaluation_parameter_store: + class_name: EvaluationParameterStore + checkpoint_store: + class_name: CheckpointStore + store_backend: + class_name: TupleFilesystemStoreBackend + suppress_store_backend_id: true + base_directory: checkpoints/ + + profiler_store: + class_name: ProfilerStore + store_backend: + class_name: TupleFilesystemStoreBackend + suppress_store_backend_id: true + base_directory: profilers/ + +expectations_store_name: expectations_store +validations_store_name: validations_store +evaluation_parameter_store_name: evaluation_parameter_store +checkpoint_store_name: checkpoint_store + +data_docs_sites: + # Data Docs make it simple to visualize data quality in your project. These + # include Expectations, Validations & Profiles. The are built for all + # Datasources from JSON artifacts in the local repo including validations & + # profiles from the uncommitted directory. Read more at https://docs.greatexpectations.io/docs/terms/data_docs + local_site: + class_name: SiteBuilder + show_how_to_buttons: true + store_backend: + class_name: TupleFilesystemStoreBackend + base_directory: uncommitted/data_docs/local_site/ + site_index_builder: + class_name: DefaultSiteIndexBuilder + +anonymous_usage_statistics: + data_context_id: caaa9cd1-4ad3-47ad-b01c-614ea6ad8e89 + enabled: true +notebooks: +include_rendered_content: + globally: false + expectation_suite: false + expectation_validation_result: false diff --git a/e2e-etl-python/files/tests/acceptance/great_expectations/plugins/custom_data_docs/styles/data_docs_custom_styles.css b/e2e-etl-python/files/tests/acceptance/great_expectations/plugins/custom_data_docs/styles/data_docs_custom_styles.css new file mode 100644 index 000000000..8bf5a1521 --- /dev/null +++ b/e2e-etl-python/files/tests/acceptance/great_expectations/plugins/custom_data_docs/styles/data_docs_custom_styles.css @@ -0,0 +1,22 @@ +/*index page*/ +.ge-index-page-site-name-title {} +.ge-index-page-table-container {} +.ge-index-page-table {} +.ge-index-page-table-profiling-links-header {} +.ge-index-page-table-expectations-links-header {} +.ge-index-page-table-validations-links-header {} +.ge-index-page-table-profiling-links-list {} +.ge-index-page-table-profiling-links-item {} +.ge-index-page-table-expectation-suite-link {} +.ge-index-page-table-validation-links-list {} +.ge-index-page-table-validation-links-item {} + +/*breadcrumbs*/ +.ge-breadcrumbs {} +.ge-breadcrumbs-item {} + +/*navigation sidebar*/ +.ge-navigation-sidebar-container {} +.ge-navigation-sidebar-content {} +.ge-navigation-sidebar-title {} +.ge-navigation-sidebar-link {} diff --git a/e2e-etl-python/files/tests/acceptance/great_expectations/test_preparation/post_requisites.py b/e2e-etl-python/files/tests/acceptance/great_expectations/test_preparation/post_requisites.py new file mode 100644 index 000000000..01c315bde --- /dev/null +++ b/e2e-etl-python/files/tests/acceptance/great_expectations/test_preparation/post_requisites.py @@ -0,0 +1,65 @@ +import boto3 +import json + +""" + +This is an example of what you could do as a post-requisite. In the pre_requistie.py. It is intended to be used as a clean up step +to remove any data set, or reset your system to its initial state. + +In this scenario we deployed on the prerequisites an athena dabase that would be tested. +Now, in this post_requisite.py we delete the database and the tables inside of it. + +""" + + +def get_terraform_outputs(): + with open('terraform_outputs.json') as file: + output_json = json.load(file) + + return output_json + +def delete_test_database(): + tf_outputs = get_terraform_outputs() + aws_region = tf_outputs["aws_region"]["value"] + client = boto3.client('athena', region_name=aws_region) + q_delete_address_table = "DROP TABLE IF EXISTS address" + q_delete_person_table = "DROP TABLE IF EXISTS person" + q_delete_db = "DROP DATABASE IF EXISTS greatexpectationsdb" + execute_query(client, q_delete_address_table) + execute_query(client, q_delete_person_table) + execute_query(client, q_delete_db) + +def execute_query(client, query): + tf_outputs = get_terraform_outputs() + bucket_name = tf_outputs["bitbucket_s3bucket_name"]["value"] + response = client.start_query_execution( + QueryString=query, + QueryExecutionContext={ + 'Database': 'greatexpectationsdb' + }, + ResultConfiguration={ + 'OutputLocation': f's3://{bucket_name}/db_test_outputs/', + } + ) + print("Query execution ID: ", response['QueryExecutionId']) + + +def remove_unnecesarry_objects_s3src(): + tf_outputs = get_terraform_outputs() + bucket_name = tf_outputs["bitbucket_s3bucket_name"]["value"] + s3 = boto3.resource('s3') + bucket = s3.Bucket(bucket_name) + for obj in bucket.objects.all(): + if not obj.key.endswith('.zip'): + obj.delete() + + + + +def main(): + delete_test_database() + remove_unnecesarry_objects_s3src() + + +if __name__ == "__main__": + main() diff --git a/e2e-etl-python/files/tests/acceptance/great_expectations/test_preparation/pre_requisites.py b/e2e-etl-python/files/tests/acceptance/great_expectations/test_preparation/pre_requisites.py new file mode 100644 index 000000000..698e5b9b1 --- /dev/null +++ b/e2e-etl-python/files/tests/acceptance/great_expectations/test_preparation/pre_requisites.py @@ -0,0 +1,190 @@ +import os +import boto3 +import json +from pathlib import Path + +""" + +This is an example of what you could do as a pre-requisite before executing your great expectations tests. +This is intended to prepare your data sets or even trigger your ETL pipelines. + + +In this specific example we deploy a sample athena database +with two tables: person and address taht will be used on the Demo test cases. + +In the post_requisite.py we will delete this athena database. + + +""" + + + + +def get_terraform_outputs(): + with open('terraform_outputs.json') as file: + output_json = json.load(file) + return output_json + + +def setup_test_database(tf_outputs): + aws_region = tf_outputs["aws_region"]["value"] + # Create Athena tables + client = boto3.client('athena', region_name=aws_region) + created = create_database(client, tf_outputs) + if created: + address_table_creation_query(client, tf_outputs) + person_table_creation_query(client, tf_outputs) + + +def create_database(client, tf_outputs): + bucket_name = tf_outputs["bitbucket_s3bucket_name"]["value"] + + query = "CREATE DATABASE greatexpectationsdb" + response = client.start_query_execution( + QueryString=query, + ResultConfiguration={ + 'OutputLocation': f's3://{bucket_name}/db_test_outputs/', + } + ) + print('Database created.') + return 1 + + +def address_table_creation_query(client, tf_outputs): + bucket_name = tf_outputs["bitbucket_s3bucket_name"]["value"] + formated_string = "'spark.sql.sources.schema.part.0'='{\"type\":\"struct\",\"fields\":[{\"name\":\"_hoodie_commit_time\",\"type\":\"string\",\"nullable\":true,\"metadata\":{}},{\"name\":\"_hoodie_commit_seqno\",\"type\":\"string\",\"nullable\":true,\"metadata\":{}},{\"name\":\"_hoodie_record_key\",\"type\":\"string\",\"nullable\":true,\"metadata\":{}},{\"name\":\"_hoodie_partition_path\",\"type\":\"string\",\"nullable\":true,\"metadata\":{}},{\"name\":\"_hoodie_file_name\",\"type\":\"string\",\"nullable\":true,\"metadata\":{}},{\"name\":\"address_id\",\"type\":\"decimal(10,0)\",\"nullable\":true,\"metadata\":{}},{\"name\":\"address_line_1\",\"type\":\"string\",\"nullable\":true,\"metadata\":{}},{\"name\":\"address_line_2\",\"type\":\"string\",\"nullable\":true,\"metadata\":{}},{\"name\":\"address_line_3\",\"type\":\"string\",\"nullable\":true,\"metadata\":{}},{\"name\":\"address_line_4\",\"type\":\"string\",\"nullable\":true,\"metadata\":{}},{\"name\":\"address_owner_key_1\",\"type\":\"string\",\"nullable\":true,\"metadata\":{}},{\"name\":\"address_owner_key_2\",\"type\":\"string\",\"nullable\":true,\"metadata\":{}},{\"name\":\"address_owner_key_3\",\"type\":\"string\",\"nullable\":true,\"metadata\":{}},{\"name\":\"address_owner_key_4\",\"type\":\"string\",\"nullable\":true,\"metadata\":{}},{\"name\":\"address_owner_key_5\",\"type\":\"string\",\"nullable\":true,\"metadata\":{}},{\"name\":\"address_type_code\",\"type\":\"decimal(6,0)\",\"nullable\":true,\"metadata\":{}},{\"name\":\"country_code\",\"type\":\"string\",\"nullable\":true,\"metadata\":{}},{\"name\":\"last_updated_date\",\"type\":\"timestamp\",\"nullable\":true,\"metadata\":{}},{\"name\":\"owner\",\"type\":\"string\",\"nullable\":true,\"metadata\":{}},{\"name\":\"post_zip_code\",\"type\":\"string\",\"nullable\":true,\"metadata\":{}},{\"name\":\"primary_address_flag\",\"type\":\"string\",\"nullable\":true,\"metadata\":{}},{\"name\":\"province_county\",\"type\":\"string\",\"nullable\":true,\"metadata\":{}},{\"name\":\"province_county_code\",\"type\":\"string\",\"nullable\":true,\"metadata\":{}},{\"name\":\"table_short_name\",\"type\":\"string\",\"nullable\":true,\"metadata\":{}},{\"name\":\"town_city\",\"type\":\"string\",\"nullable\":true,\"metadata\":{}},{\"name\":\"updated_during_mon_visit_by\",\"type\":\"decimal(6,0)\",\"nullable\":true,\"metadata\":{}},{\"name\":\"update_count\",\"type\":\"decimal(8,0)\",\"nullable\":true,\"metadata\":{}},{\"name\":\"aud_action_flag\",\"type\":\"string\",\"nullable\":true,\"metadata\":{}},{\"name\":\"aud_date_changed\",\"type\":\"timestamp\",\"nullable\":true,\"metadata\":{}},{\"name\":\"aud_personnel_no\",\"type\":\"decimal(6,0)\",\"nullable\":true,\"metadata\":{}},{\"name\":\"time_zone_offset\",\"type\":\"decimal(4,2)\",\"nullable\":true,\"metadata\":{}},{\"name\":\"transaction_no\",\"type\":\"decimal(38,0)\",\"nullable\":true,\"metadata\":{}},{\"name\":\"ingestion_timestamp\",\"type\":\"long\",\"nullable\":true,\"metadata\":{}},{\"name\":\"pr_tab_hist_hkey\",\"type\":\"string\",\"nullable\":true,\"metadata\":{}},{\"name\":\"pr_tab_hkey\",\"type\":\"string\",\"nullable\":true,\"metadata\":{}},{\"name\":\"audit_id\",\"type\":\"string\",\"nullable\":true,\"metadata\":{}},{\"name\":\"audit_task_id\",\"type\":\"string\",\"nullable\":true,\"metadata\":{}},{\"name\":\"int_tec_from_dt\",\"type\":\"date\",\"nullable\":true,\"metadata\":{}},{\"name\":\"int_tec_to_dt\",\"type\":\"date\",\"nullable\":true,\"metadata\":{}},{\"name\":\"curr_flg\",\"type\":\"integer\",\"nullable\":true,\"metadata\":{}},{\"name\":\"del_flg\",\"type\":\"integer\",\"nullable\":true,\"metadata\":{}},{\"name\":\"moduleKey\",\"type\":\"long\",\"nullable\":true,\"metadata\":{}}]}'," + query = f""" + CREATE EXTERNAL TABLE IF NOT EXISTS `address`( + `_hoodie_commit_time` string COMMENT '', + `_hoodie_commit_seqno` string COMMENT '', + `_hoodie_record_key` string COMMENT '', + `_hoodie_partition_path` string COMMENT '', + `_hoodie_file_name` string COMMENT '', + `address_id` decimal(10,0) COMMENT '', + `address_line_1` string COMMENT '', + `address_line_2` string COMMENT '', + `address_line_3` string COMMENT '', + `address_line_4` string COMMENT '', + `address_owner_key_1` string COMMENT '', + `address_owner_key_2` string COMMENT '', + `address_owner_key_3` string COMMENT '', + `address_owner_key_4` string COMMENT '', + `address_owner_key_5` string COMMENT '', + `address_type_code` decimal(6,0) COMMENT '', + `country_code` string COMMENT '', + `last_updated_date` timestamp COMMENT '', + `owner` string COMMENT '', + `post_zip_code` string COMMENT '', + `primary_address_flag` string COMMENT '', + `province_county` string COMMENT '', + `province_county_code` string COMMENT '', + `table_short_name` string COMMENT '', + `town_city` string COMMENT '', + `updated_during_mon_visit_by` decimal(6,0) COMMENT '', + `update_count` decimal(8,0) COMMENT '', + `aud_action_flag` string COMMENT '', + `aud_date_changed` timestamp COMMENT '', + `aud_personnel_no` decimal(6,0) COMMENT '', + `time_zone_offset` decimal(4,2) COMMENT '', + `transaction_no` decimal(38,0) COMMENT '', + `ingestion_timestamp` bigint COMMENT '', + `pr_tab_hist_hkey` string COMMENT '', + `pr_tab_hkey` string COMMENT '', + `audit_id` string COMMENT '', + `audit_task_id` string COMMENT '', + `int_tec_from_dt` date COMMENT '', + `int_tec_to_dt` date COMMENT '', + `curr_flg` int COMMENT '', + `del_flg` int COMMENT '') + PARTITIONED BY ( + `modulekey` bigint COMMENT '') + ROW FORMAT SERDE + 'org.apache.hadoop.hive.ql.io.parquet.serde.ParquetHiveSerDe' + WITH SERDEPROPERTIES ( + 'hoodie.query.as.ro.table'='false', + 'path'='s3://{bucket_name}/clean/address') + STORED AS INPUTFORMAT + 'org.apache.hudi.hadoop.HoodieParquetInputFormat' + OUTPUTFORMAT + 'org.apache.hadoop.hive.ql.io.parquet.MapredParquetOutputFormat' + LOCATION + 's3://{bucket_name}/clean/address' + TBLPROPERTIES ( + 'last_commit_time_sync'='20230725080610371', + 'spark.sql.create.version'='3.3.0-amzn-1', + 'spark.sql.sources.provider'='hudi', + 'spark.sql.sources.schema.numPartCols'='1', + 'spark.sql.sources.schema.numParts'='1', + {formated_string} + 'spark.sql.sources.schema.partCol.0'='moduleKey', + 'transient_lastDdlTime'='1690272434') + """ + execute_query(client, query, tf_outputs) + + +def person_table_creation_query(client, tf_outputs): + bucket_name = tf_outputs["bitbucket_s3bucket_name"]["value"] + + query = f""" + CREATE EXTERNAL TABLE IF NOT EXISTS person ( + name VARCHAR(50), + surname VARCHAR(50), + age INT, + location VARCHAR(100) + ) + ROW FORMAT DELIMITED + FIELDS TERMINATED BY ',' + LOCATION 's3://{bucket_name}/clean/person'; + """ + execute_query(client, query, tf_outputs) + + query_insert_data = """ + INSERT INTO person (name, surname, age, location) + VALUES + ('John', 'Doe', 25, 'New York'), + ('Jane', 'Smith', 32, 'London'), + ('Michael', 'Johnson', 45, 'Berlin'); + """ + execute_query(client, query_insert_data, tf_outputs) + + +def execute_query(client, query, tf_outputs): + bucket_name = tf_outputs["bitbucket_s3bucket_name"]["value"] + response = client.start_query_execution( + QueryString=query, + QueryExecutionContext={ + 'Database': 'greatexpectationsdb' + }, + ResultConfiguration={ + 'OutputLocation': f's3://{bucket_name}/db_test_outputs/', + } + ) + print("Query execution ID: ", response['QueryExecutionId']) + + + + +def setup_config_yml(tf_outputs): + bucket_name = tf_outputs["bitbucket_s3bucket_name"]["value"] + aws_region = tf_outputs["aws_region"]["value"] + + # Create 'uncommitted' directory if it doesn't exist + uncommitted_path = Path('tests/acceptance/great_expectations/uncommitted') + uncommitted_path.mkdir(parents=True, exist_ok=True) + + # Write environment variables to config_variables.yml + config_file_path = uncommitted_path / 'config_variables.yml' + # Write environment variables to config_variables.yml + with open(config_file_path, 'w') as config_file: + connection_string = f"awsathena+rest://@athena.{aws_region}.amazonaws.com:443/greatexpectationsdb?s3_staging_dir=s3://{bucket_name}/great_expectations" + config_file.write(f"connection_string: {connection_string}\n") + + print("Config yml setted") + +def main(): + tf_outputs = get_terraform_outputs() + setup_test_database(tf_outputs) + setup_config_yml(tf_outputs) + +if __name__ == "__main__": + main() diff --git a/e2e-etl-python/files/tests/acceptance/pytest/Demo_allure_pytest_test.py b/e2e-etl-python/files/tests/acceptance/pytest/Demo_allure_pytest_test.py new file mode 100644 index 000000000..2059ba3de --- /dev/null +++ b/e2e-etl-python/files/tests/acceptance/pytest/Demo_allure_pytest_test.py @@ -0,0 +1,66 @@ +import boto3 +import pytest +import allure +import json +import os +import datetime +import pytz + + +def get_terraform_outputs(): + with open('terraform_outputs.json') as file: + output_json = json.load(file) + + return output_json + + +def get_env_vars(): + environment = os.environ['ENVIRONMENT'] + env_vars_path = f"environments/{environment}.json" + with open(env_vars_path, 'r') as file: + data = json.load(file) + + return data + +'''Remember to rename the test with this format __test e.g: EDPTP457_s3_file_present_test''' +def Demo_s3_file_present_test(record_property): + outputs_tf = get_terraform_outputs() + bucket_name = outputs_tf["bitbucket_s3bucket_name"]["value"] + env_vars = get_env_vars() + file_key = env_vars['repository'] + '-' + env_vars['branch_name'] + '.zip' + + record_property( + "test_evidence_1", + f"Name of the bucket search: {bucket_name}, file to search in the bucket: {file_key}" + ) + + s3_client = boto3.client('s3') + with allure.step("Check if file exists in S3 bucket"): + response = s3_client.list_objects_v2(Bucket=bucket_name, Prefix=file_key) + file_present = 'Contents' in response + record_property( + "test_evidence_2", + f"Response form the call to the S3 bucket: {file_key}" + ) + assert file_present, f"File '{file_key}' not found in S3 bucket '{bucket_name}'" + +'''Remember to rename the test with this format __test e.g: EDPTP456_s3_file_present_test''' +def Demo_test_pipeline_execution_time_test(record_property): + outputs_tf = get_terraform_outputs() + codepipeline_name = outputs_tf['codepipeline_name']['value'] + client = boto3.client('codepipeline') + + record_property( + "test_evidence_1", + f"Name of the pipeline: {codepipeline_name}" + ) + + with allure.step("Check aws pipeline last execution"): + response = client.get_pipeline_state(name=codepipeline_name) + last_execution = response['stageStates'][0]['actionStates'][0]['latestExecution']['lastStatusChange'] + record_property( + "test_evidence_2", + f"Response from the Pipeline, last execution was on date: {last_execution}" + ) + now = datetime.datetime.now(pytz.UTC) + assert last_execution > now - datetime.timedelta(hours=24), f"Pipeline has not been executed in the last 24 hours" diff --git a/e2e-etl-python/files/tests/installation/installation_test.py b/e2e-etl-python/files/tests/installation/installation_test.py new file mode 100644 index 000000000..54154591b --- /dev/null +++ b/e2e-etl-python/files/tests/installation/installation_test.py @@ -0,0 +1,4 @@ +#In this folder you can place your installation test cases if needed + +def dummy_test(): + assert 1==1 diff --git a/e2e-etl-python/files/tests/integration/integration_test.py b/e2e-etl-python/files/tests/integration/integration_test.py new file mode 100644 index 000000000..45516ba1f --- /dev/null +++ b/e2e-etl-python/files/tests/integration/integration_test.py @@ -0,0 +1,4 @@ +#In this folder you can place your integration test cases if needed + +def dummy_test(): + assert 1==1 diff --git a/e2e-etl-python/files/utils/checkpoints_executions.py b/e2e-etl-python/files/utils/checkpoints_executions.py new file mode 100644 index 000000000..0fb945482 --- /dev/null +++ b/e2e-etl-python/files/utils/checkpoints_executions.py @@ -0,0 +1,17 @@ +import os +from great_expectations import DataContext + + +folder_name = "tests/acceptance/great_expectations" + +context = DataContext(folder_name) + +checkpoints_dir = os.path.join(folder_name, "checkpoints") + +for filename in os.listdir(checkpoints_dir): + + if filename.endswith(".yml"): + checkpoint_name = os.path.splitext(filename)[0] + checkpoint_path = os.path.join(checkpoints_dir, filename) + + context.run_checkpoint(checkpoint_name=checkpoint_name) diff --git a/e2e-etl-python/files/utils/json2JUnit.py b/e2e-etl-python/files/utils/json2JUnit.py new file mode 100644 index 000000000..df679adde --- /dev/null +++ b/e2e-etl-python/files/utils/json2JUnit.py @@ -0,0 +1,77 @@ +import json +import glob +import xml.etree.ElementTree as ET +import datetime +import xml.sax.saxutils as saxutils + +folder_name = "tests/acceptance/great_expectations" +output_path = folder_name + "/uncommitted/validations/junit.xml" +jsons_location_path = folder_name + "/uncommitted/validations/**/*.json" + +json_files = glob.glob(jsons_location_path, recursive=True) + +root = ET.Element("testsuites", name="GreatExpectations") + +total_tests = 0 +total_failures = 0 + +def escape_string(data): + json_str = json.dumps(data) + return saxutils.escape(json_str) + +for json_file_path in json_files: + + with open(json_file_path, "r") as json_file: + data = json.load(json_file) + + validation_time = datetime.datetime.strptime(data["meta"]["validation_time"], "%Y%m%dT%H%M%S.%fZ") + ge_load_time = datetime.datetime.strptime(data["meta"]["batch_markers"]["ge_load_time"], "%Y%m%dT%H%M%S.%fZ") + execution_time = validation_time - ge_load_time + + failures_checkpoint = 0 + if data['statistics']['unsuccessful_expectations'] > 0: + failures_checkpoint = 1 + + testsuite = ET.SubElement( + root, "testsuite", + # id=data["meta"]["run_id"]["run_name"], --Not necessary for now + name=data["meta"]["checkpoint_name"], + tests="1", + failures=str(failures_checkpoint), + time=str(execution_time.total_seconds()) + ) + + total_tests += 1 + if data["statistics"]["unsuccessful_expectations"] > 0: + total_failures += 1 + + testcase = ET.SubElement( + testsuite, + "testcase", + name=data["meta"]["checkpoint_name"], + evaluated_expectations=escape_string(data['statistics']['evaluated_expectations']), + successful_expectations=escape_string(data['statistics']['successful_expectations']), + unsuccessful_expectations=escape_string(data['statistics']['unsuccessful_expectations']), + log=escape_string(data["results"]) + ) + + for idx, result in enumerate(data["results"], start=1): + + if not result["success"]: + exception_message = str(escape_string(result["exception_info"]["exception_message"]) if result["exception_info"][ + "raised_exception"] else None) + expectation_config = escape_string(result["expectation_config"]) + observed_vaue = escape_string(result["result"]) + failure = ET.SubElement( + testcase, + "failure", + message=exception_message + expectation_config + observed_vaue + ) + failure.text = exception_message + expectation_config + observed_vaue + +root.set("tests", str(total_tests)) +root.set("failures", str(total_failures)) +tree = ET.ElementTree(root) + +with open(output_path, 'wb') as f: + tree.write(f, encoding="utf-8", xml_declaration=True) diff --git a/e2e-etl-python/files/variables.tf b/e2e-etl-python/files/variables.tf new file mode 100644 index 000000000..020005f4d --- /dev/null +++ b/e2e-etl-python/files/variables.tf @@ -0,0 +1,76 @@ +# ----------------------------------------------------------------------------- +# ENVIRONMENT VARIABLES +# This stack supports the following secrets as environment variables. +# ----------------------------------------------------------------------------- + +# AWS_ACCESS_KEY_ID +# AWS_SECRET_ACCESS_KEY +# AWS_DEFAULT_REGION + +# ----------------------------------------------------------------------------- +# REQUIRED PARAMETERS +# The following parameters require a value. +# Documentation: https://www.terraform.io/docs/configuration/variables.html +# ----------------------------------------------------------------------------- + +# ----------------------------------------------------------------------------- +# OPTIONAL PARAMETERS +# The following parameters are optional with sensible defaults. +# Documentation: https://www.terraform.io/docs/configuration/variables.html +# ----------------------------------------------------------------------------- + +variable "name" { + description = "The name of the stack." + type = string + default = "stack-aws-quickstarter" +} + +variable "meta_environment" { + description = "The type of the environment. Can be any of DEVELOPMENT, EVALUATION, PRODUCTIVE, QUALITYASSURANCE, TRAINING, VALIDATION." + type = string + default = "DEVELOPMENT" +} + + +/* +# CodeBuild +variable "build_project_name" {} + +# CodePipeline +variable "codepipeline_name" {} + +# iam_roles +variable "pipeline_role_name" {} +variable "codebuild_role_name" {} +variable "codepipeline_policy_name" {} +variable "codebuild_policy_name" {} + +# s3 +variable "codepipeline_bucket_name" {} +variable "e2e_results_bucket_name" {} +variable "bitbucket_source_bucket_name" {} +*/ + +variable "projectId" { + description = "EDP project name" + type = string + default = "projectId" +} + +variable "environment" { + description = "The project execution environment." + type = string + default = "dev" +} + +variable "repository" { + description = "QS bitbucket repository" + type = string + default = "e2e-etl-python" +} + +variable "branch_name" { + description = "repository branch_name" + type = string + default = "master" +} diff --git a/e2e-etl-python/files/versions.tf b/e2e-etl-python/files/versions.tf new file mode 100644 index 000000000..88efbc759 --- /dev/null +++ b/e2e-etl-python/files/versions.tf @@ -0,0 +1,13 @@ +terraform { + required_version = ">= 1.0" + required_providers { + aws = { + source = "hashicorp/aws" + version = "4.67.0" + } + random = { + source = "hashicorp/random" + version = "3.5.1" + } + } +} diff --git a/e2e-etl-python/prod.yml.template b/e2e-etl-python/prod.yml.template new file mode 100644 index 000000000..b9d192b92 --- /dev/null +++ b/e2e-etl-python/prod.yml.template @@ -0,0 +1,7 @@ +region: eu-west-1 + +credentials: + key: @project_id@-cd-aws-access-key-id-prod + secret: @project_id@-cd-aws-secret-access-key-prod + +account: "" diff --git a/e2e-etl-python/test.yml.template b/e2e-etl-python/test.yml.template new file mode 100644 index 000000000..0f6413d32 --- /dev/null +++ b/e2e-etl-python/test.yml.template @@ -0,0 +1,8 @@ +region: eu-west-1 + +credentials: + key: @project_id@-cd-aws-access-key-id-test + secret: @project_id@-cd-aws-secret-access-key-test + +account: "" + diff --git a/e2e-etl-python/testdata/golden/jenkins-build-stages.json b/e2e-etl-python/testdata/golden/jenkins-build-stages.json new file mode 100644 index 000000000..32f609fbe --- /dev/null +++ b/e2e-etl-python/testdata/golden/jenkins-build-stages.json @@ -0,0 +1,18 @@ +[ + { + "stage": "odsPipeline start", + "status": "SUCCESS" + }, + { + "stage": "Integration Test", + "status": "SUCCESS" + }, + { + "stage": "SonarQube Analysis", + "status": "SUCCESS" + }, + { + "stage": "odsPipeline finished", + "status": "SUCCESS" + } +] \ No newline at end of file diff --git a/e2e-etl-python/testdata/golden/jenkins-provision-stages.json b/e2e-etl-python/testdata/golden/jenkins-provision-stages.json new file mode 100644 index 000000000..9e9ae3cb8 --- /dev/null +++ b/e2e-etl-python/testdata/golden/jenkins-provision-stages.json @@ -0,0 +1,26 @@ +[ + { + "stage": "Checkout quickstarter", + "status": "SUCCESS" + }, + { + "stage": "Initialize output directory", + "status": "SUCCESS" + }, + { + "stage": "Copy files from quickstarter", + "status": "SUCCESS" + }, + { + "stage": "Create Jenkinsfile", + "status": "SUCCESS" + }, + { + "stage": "Create sonar-project.properties", + "status": "SUCCESS" + }, + { + "stage": "Push to remote", + "status": "SUCCESS" + } +] diff --git a/e2e-etl-python/testdata/golden/sonar-scan.json b/e2e-etl-python/testdata/golden/sonar-scan.json new file mode 100644 index 000000000..22413e7c9 --- /dev/null +++ b/e2e-etl-python/testdata/golden/sonar-scan.json @@ -0,0 +1,30 @@ +{ + "key": "{{.ProjectID}}-{{.ComponentID}}", + "name": "{{.ProjectID}}-{{.ComponentID}}", + "isFavorite": false, + "visibility": "public", + "extensions": [], + "qualityProfiles": [ + { + "name": "{{.SonarQualityProfile}}", + "language": "js", + "deleted": false + }, + { + "name": "{{.SonarQualityProfile}}", + "language": "ts", + "deleted": false + } + ], + "qualityGate": { + "name": "Sonar way", + "isDefault": true + }, + "breadcrumbs": [ + { + "key": "{{.ProjectID}}-{{.ComponentID}}", + "name": "{{.ProjectID}}-{{.ComponentID}}", + "qualifier": "TRK" + } + ] +} \ No newline at end of file diff --git a/e2e-etl-python/testdata/steps.yml b/e2e-etl-python/testdata/steps.yml new file mode 100644 index 000000000..293fcfcbc --- /dev/null +++ b/e2e-etl-python/testdata/steps.yml @@ -0,0 +1,15 @@ +componentID: e2e-etl-python +steps: +- type: provision + provisionParams: + verify: + jenkinsStages: golden/jenkins-provision-stages.json +- type: build + buildParams: + verify: + jenkinsStages: golden/jenkins-build-stages.json + sonarScan: golden/sonar-scan.json + runAttachments: + - SCRR-{{.ProjectID}}-{{.ComponentID}}.docx + - SCRR-{{.ProjectID}}-{{.ComponentID}}.md + testResults: 2 diff --git a/e2e-etl-python/testing.yml.template b/e2e-etl-python/testing.yml.template new file mode 100644 index 000000000..8c93f3dcc --- /dev/null +++ b/e2e-etl-python/testing.yml.template @@ -0,0 +1,5 @@ +region: eu-west-1 + +credentials: + key: @project_id@-cd-aws-access-key-id-testing + secret: @project_id@-cd-aws-secret-access-key-testing diff --git a/release-manager/files/README.md b/release-manager/files/README.md index dd59682e6..4d1c1144f 100644 --- a/release-manager/files/README.md +++ b/release-manager/files/README.md @@ -354,4 +354,4 @@ When you author templates, you can also store the secrets in the param files GPG - Passwords etc. in the OpenShift configuration are stored in clear text in the export (this can be prevented by authoring templates and using a private key for encryption of param files) - During export, the templates are parameterized automatically, but this is done using string search-and-replace and unwanted replacements might occur (this can be prevented by authoring the templates manually). - By default, SonarQube scans (and reports) are only generated for the `master` branch of each component. As the orchestration pipeline automatically creates release branches for each version, no scans and reports are created on those. This can be changed by configuring `sonarQubeBranch: '*'`` in each component's `Jenkinsfile`, however keep in mind that quality trends etc. will be mixed up if you use the free version of SonarQube as that version does not have support for multiple branches. -- An existing QA-tag cannot be deployed again in PROD. This has been intentionally designed that way as any change to PROD needs its unique change ID, which results in a new tag. \ No newline at end of file +- An existing QA-tag cannot be deployed again in PROD. This has been intentionally designed that way as any change to PROD needs its unique change ID, which results in a new tag.