From 56ced8407374444c454600ee0c10b7811083eea7 Mon Sep 17 00:00:00 2001 From: Itxaro Aizpurua Arconada <139855821+iaizarc@users.noreply.github.com> Date: Tue, 25 Jul 2023 09:36:40 +0200 Subject: [PATCH 01/32] Add files via upload --- requirements.txt | 5 +++++ 1 file changed, 5 insertions(+) create mode 100644 requirements.txt diff --git a/requirements.txt b/requirements.txt new file mode 100644 index 000000000..41a37d3c7 --- /dev/null +++ b/requirements.txt @@ -0,0 +1,5 @@ +boto +allure +great_expectations +pytest +behave \ No newline at end of file From 7a086e42721ecce0b6dfbbb424a5bd2d9da67c59 Mon Sep 17 00:00:00 2001 From: Itxaro Aizpurua Arconada <139855821+iaizarc@users.noreply.github.com> Date: Tue, 25 Jul 2023 09:38:35 +0200 Subject: [PATCH 02/32] Delete requirements.txt --- requirements.txt | 5 ----- 1 file changed, 5 deletions(-) delete mode 100644 requirements.txt diff --git a/requirements.txt b/requirements.txt deleted file mode 100644 index 41a37d3c7..000000000 --- a/requirements.txt +++ /dev/null @@ -1,5 +0,0 @@ -boto -allure -great_expectations -pytest -behave \ No newline at end of file From ebadf37286681ec9bb7ce44cf98a7e7dc9b94731 Mon Sep 17 00:00:00 2001 From: Itxaro Aizpurua Arconada <139855821+iaizarc@users.noreply.github.com> Date: Tue, 25 Jul 2023 10:20:39 +0200 Subject: [PATCH 03/32] Create e2e-python --- e2e-python | 1 + 1 file changed, 1 insertion(+) create mode 100644 e2e-python diff --git a/e2e-python b/e2e-python new file mode 100644 index 000000000..8b1378917 --- /dev/null +++ b/e2e-python @@ -0,0 +1 @@ + From 677d6dc8c49bc07027b61ab57fca42006c7a2d99 Mon Sep 17 00:00:00 2001 From: Itxaro Aizpurua Arconada <139855821+iaizarc@users.noreply.github.com> Date: Tue, 25 Jul 2023 10:21:07 +0200 Subject: [PATCH 04/32] Delete e2e-python --- e2e-python | 1 - 1 file changed, 1 deletion(-) delete mode 100644 e2e-python diff --git a/e2e-python b/e2e-python deleted file mode 100644 index 8b1378917..000000000 --- a/e2e-python +++ /dev/null @@ -1 +0,0 @@ - From 897af12c7446691d9b57bda94c81f315d9d4bef4 Mon Sep 17 00:00:00 2001 From: Itxaro Aizpurua Arconada <139855821+iaizarc@users.noreply.github.com> Date: Tue, 25 Jul 2023 10:25:19 +0200 Subject: [PATCH 05/32] Create README.md --- e2e-python/README.md | 1 + 1 file changed, 1 insertion(+) create mode 100644 e2e-python/README.md diff --git a/e2e-python/README.md b/e2e-python/README.md new file mode 100644 index 000000000..762dc7107 --- /dev/null +++ b/e2e-python/README.md @@ -0,0 +1 @@ +# End-to-end tests with python (e2e-python) From 5e0f7c855a7034fcf38fbe757c1cf02ca1589279 Mon Sep 17 00:00:00 2001 From: perezpec Date: Wed, 17 Jan 2024 15:11:54 +0100 Subject: [PATCH 06/32] remove e2e-python folder --- e2e-python/README.md | 1 - 1 file changed, 1 deletion(-) delete mode 100644 e2e-python/README.md diff --git a/e2e-python/README.md b/e2e-python/README.md deleted file mode 100644 index 762dc7107..000000000 --- a/e2e-python/README.md +++ /dev/null @@ -1 +0,0 @@ -# End-to-end tests with python (e2e-python) From da449219afddee0914b99f539aa836fedf99cda5 Mon Sep 17 00:00:00 2001 From: perezpec Date: Wed, 17 Jan 2024 15:13:59 +0100 Subject: [PATCH 07/32] add e2e-python folder and content --- e2e-python/Jenkinsfile | 48 + e2e-python/Jenkinsfile.template | 185 ++ e2e-python/README.md | 104 + e2e-python/dev.yml.template | 7 + .../.devcontainer/devcontainer.json.template | 3 + e2e-python/files/.editorconfig | 19 + e2e-python/files/.gitignore | 20 + e2e-python/files/.pre-commit-config.yaml | 83 + e2e-python/files/.rubocop.yml | 4 + e2e-python/files/.ruby-version | 1 + e2e-python/files/.terraform-version | 1 + e2e-python/files/Gemfile | 19 + e2e-python/files/Gemfile.lock | 1883 +++++++++++++++++ e2e-python/files/Makefile | 184 ++ e2e-python/files/Pipfile | 14 + e2e-python/files/backend.tf | 5 + e2e-python/files/cfn-templates/.gitkeep | 0 e2e-python/files/cfn-templates/cfs3.json | 29 + e2e-python/files/common-tags.tf | 5 + e2e-python/files/environments/dev.json | 13 + e2e-python/files/environments/prod.json | 13 + e2e-python/files/environments/test.json | 13 + e2e-python/files/inputs2outputs.tf | 11 + e2e-python/files/kitchen.yml | 29 + .../files/lib/scripts/aws/check_conf.sh | 122 ++ .../scripts/createstackfixtureoutputs2yml.sh | 37 + .../lib/scripts/createstackoutputs2yml.sh | 36 + e2e-python/files/main.tf | 74 + e2e-python/files/metadata.yml | 7 + e2e-python/files/modules/codebuild/main.tf | 96 + e2e-python/files/modules/codebuild/output.tf | 12 + .../files/modules/codebuild/variables.tf | 117 + e2e-python/files/modules/codepipeline/main.tf | 53 + .../files/modules/codepipeline/output.tf | 14 + .../files/modules/codepipeline/variables.tf | 67 + e2e-python/files/modules/iam_roles/main.tf | 79 + e2e-python/files/modules/iam_roles/outputs.tf | 11 + .../files/modules/iam_roles/variables.tf | 38 + e2e-python/files/modules/s3-bucket/main.tf | 33 + e2e-python/files/modules/s3-bucket/outputs.tf | 38 + .../files/modules/s3-bucket/variables.tf | 51 + e2e-python/files/outputs.tf | 61 + e2e-python/files/pytest.ini | 2 + e2e-python/files/random.tf | 13 + e2e-python/files/release-manager.yml | 2 + e2e-python/files/reports/install/.gitkeep | 1 + e2e-python/files/requirements.txt | 19 + e2e-python/files/stackmodulesoutputs.tf | 2 + e2e-python/files/terraform-data.tf | 14 + .../files/test/fixtures/default/backend.tf | 5 + .../files/test/fixtures/default/main.tf | 16 + .../test/fixtures/default/moduleoutputs.tf | 5 + .../files/test/fixtures/default/random.tf | 9 + .../default/controls/blueprints.rb | 1 + .../integration/default/controls/default.rb | 25 + .../test/integration/default/files/.gitkeep | 0 .../files/test/integration/default/inspec.yml | 10 + .../test/integration/default/inspec.yml.tmpl | 8 + .../test/integration/default/libraries/aws.rb | 161 ++ .../default/libraries/fixture_data.rb | 49 + .../default/libraries/terraform_data.rb | 15 + .../acceptance/great_expectations/.gitignore | 2 + .../checkpoints/Demo_athena_checkpoint.yml | 32 + .../checkpoints/Demo_person_checkpoint.yml | 32 + .../expectations/athena_validation_suite.json | 68 + .../expectations/person_validation_suite.json | 47 + .../great_expectations/great_expectations.yml | 124 ++ .../styles/data_docs_custom_styles.css | 22 + .../test_preparation/post_requisites.py | 39 + .../test_preparation/pre_requisites.py | 143 ++ .../pytest/Demo_allure_pytest_test.py | 66 + .../tests/installation/installation_test.py | 4 + .../tests/integration/integration_test.py | 4 + .../files/utils/checkpoints_executions.py | 17 + e2e-python/files/utils/json2JUnit.py | 75 + e2e-python/files/variables.tf | 76 + e2e-python/files/versions.tf | 13 + e2e-python/prod.yml.template | 7 + e2e-python/test.yml.template | 8 + .../testdata/golden/jenkins-build-stages.json | 18 + .../golden/jenkins-provision-stages.json | 26 + e2e-python/testdata/golden/sonar-scan.json | 30 + e2e-python/testdata/steps.yml | 15 + e2e-python/testing.yml.template | 5 + 84 files changed, 4869 insertions(+) create mode 100644 e2e-python/Jenkinsfile create mode 100644 e2e-python/Jenkinsfile.template create mode 100644 e2e-python/README.md create mode 100644 e2e-python/dev.yml.template create mode 100644 e2e-python/files/.devcontainer/devcontainer.json.template create mode 100644 e2e-python/files/.editorconfig create mode 100644 e2e-python/files/.gitignore create mode 100644 e2e-python/files/.pre-commit-config.yaml create mode 100644 e2e-python/files/.rubocop.yml create mode 100644 e2e-python/files/.ruby-version create mode 100644 e2e-python/files/.terraform-version create mode 100644 e2e-python/files/Gemfile create mode 100644 e2e-python/files/Gemfile.lock create mode 100644 e2e-python/files/Makefile create mode 100644 e2e-python/files/Pipfile create mode 100644 e2e-python/files/backend.tf create mode 100644 e2e-python/files/cfn-templates/.gitkeep create mode 100644 e2e-python/files/cfn-templates/cfs3.json create mode 100644 e2e-python/files/common-tags.tf create mode 100644 e2e-python/files/environments/dev.json create mode 100644 e2e-python/files/environments/prod.json create mode 100644 e2e-python/files/environments/test.json create mode 100644 e2e-python/files/inputs2outputs.tf create mode 100644 e2e-python/files/kitchen.yml create mode 100644 e2e-python/files/lib/scripts/aws/check_conf.sh create mode 100644 e2e-python/files/lib/scripts/createstackfixtureoutputs2yml.sh create mode 100644 e2e-python/files/lib/scripts/createstackoutputs2yml.sh create mode 100644 e2e-python/files/main.tf create mode 100644 e2e-python/files/metadata.yml create mode 100644 e2e-python/files/modules/codebuild/main.tf create mode 100644 e2e-python/files/modules/codebuild/output.tf create mode 100644 e2e-python/files/modules/codebuild/variables.tf create mode 100644 e2e-python/files/modules/codepipeline/main.tf create mode 100644 e2e-python/files/modules/codepipeline/output.tf create mode 100644 e2e-python/files/modules/codepipeline/variables.tf create mode 100644 e2e-python/files/modules/iam_roles/main.tf create mode 100644 e2e-python/files/modules/iam_roles/outputs.tf create mode 100644 e2e-python/files/modules/iam_roles/variables.tf create mode 100644 e2e-python/files/modules/s3-bucket/main.tf create mode 100644 e2e-python/files/modules/s3-bucket/outputs.tf create mode 100644 e2e-python/files/modules/s3-bucket/variables.tf create mode 100644 e2e-python/files/outputs.tf create mode 100644 e2e-python/files/pytest.ini create mode 100644 e2e-python/files/random.tf create mode 100644 e2e-python/files/release-manager.yml create mode 100644 e2e-python/files/reports/install/.gitkeep create mode 100644 e2e-python/files/requirements.txt create mode 100644 e2e-python/files/stackmodulesoutputs.tf create mode 100644 e2e-python/files/terraform-data.tf create mode 100644 e2e-python/files/test/fixtures/default/backend.tf create mode 100644 e2e-python/files/test/fixtures/default/main.tf create mode 100644 e2e-python/files/test/fixtures/default/moduleoutputs.tf create mode 100644 e2e-python/files/test/fixtures/default/random.tf create mode 100644 e2e-python/files/test/integration/default/controls/blueprints.rb create mode 100644 e2e-python/files/test/integration/default/controls/default.rb create mode 100644 e2e-python/files/test/integration/default/files/.gitkeep create mode 100644 e2e-python/files/test/integration/default/inspec.yml create mode 100644 e2e-python/files/test/integration/default/inspec.yml.tmpl create mode 100644 e2e-python/files/test/integration/default/libraries/aws.rb create mode 100644 e2e-python/files/test/integration/default/libraries/fixture_data.rb create mode 100644 e2e-python/files/test/integration/default/libraries/terraform_data.rb create mode 100644 e2e-python/files/tests/acceptance/great_expectations/.gitignore create mode 100644 e2e-python/files/tests/acceptance/great_expectations/checkpoints/Demo_athena_checkpoint.yml create mode 100644 e2e-python/files/tests/acceptance/great_expectations/checkpoints/Demo_person_checkpoint.yml create mode 100644 e2e-python/files/tests/acceptance/great_expectations/expectations/athena_validation_suite.json create mode 100644 e2e-python/files/tests/acceptance/great_expectations/expectations/person_validation_suite.json create mode 100644 e2e-python/files/tests/acceptance/great_expectations/great_expectations.yml create mode 100644 e2e-python/files/tests/acceptance/great_expectations/plugins/custom_data_docs/styles/data_docs_custom_styles.css create mode 100644 e2e-python/files/tests/acceptance/great_expectations/test_preparation/post_requisites.py create mode 100644 e2e-python/files/tests/acceptance/great_expectations/test_preparation/pre_requisites.py create mode 100644 e2e-python/files/tests/acceptance/pytest/Demo_allure_pytest_test.py create mode 100644 e2e-python/files/tests/installation/installation_test.py create mode 100644 e2e-python/files/tests/integration/integration_test.py create mode 100644 e2e-python/files/utils/checkpoints_executions.py create mode 100644 e2e-python/files/utils/json2JUnit.py create mode 100644 e2e-python/files/variables.tf create mode 100644 e2e-python/files/versions.tf create mode 100644 e2e-python/prod.yml.template create mode 100644 e2e-python/test.yml.template create mode 100644 e2e-python/testdata/golden/jenkins-build-stages.json create mode 100644 e2e-python/testdata/golden/jenkins-provision-stages.json create mode 100644 e2e-python/testdata/golden/sonar-scan.json create mode 100644 e2e-python/testdata/steps.yml create mode 100644 e2e-python/testing.yml.template diff --git a/e2e-python/Jenkinsfile b/e2e-python/Jenkinsfile new file mode 100644 index 000000000..39b9f3744 --- /dev/null +++ b/e2e-python/Jenkinsfile @@ -0,0 +1,48 @@ +def odsNamespace = '' +def odsGitRef = '' +def odsImageTag = '' +def sharedLibraryRef = '' +def agentImageTag = '' + +node { + odsNamespace = env.ODS_NAMESPACE ?: 'ods' + odsGitRef = env.ODS_GIT_REF ?: 'master' + odsImageTag = env.ODS_IMAGE_TAG ?: 'latest' + sharedLibraryRef = env.SHARED_LIBRARY_REF ?: odsImageTag + agentImageTag = env.AGENT_IMAGE_TAG ?: odsImageTag +} + +library("ods-jenkins-shared-library@${sharedLibraryRef}") + +odsQuickstarterPipeline( + imageStreamTag: "${odsNamespace}/jenkins-agent-base:${agentImageTag}", +) { context -> + + odsQuickstarterStageCopyFiles(context) + + odsQuickstarterStageRenderJenkinsfile(context) + + odsQuickstarterStageRenderJenkinsfile( + context, + [source: 'dev.yml.template', + target: 'environments/dev.yml'] + ) + + odsQuickstarterStageRenderJenkinsfile( + context, + [source: 'test.yml.template', + target: 'environments/test.yml'] + ) + + odsQuickstarterStageRenderJenkinsfile( + context, + [source: 'prod.yml.template', + target: 'environments/prod.yml'] + ) + + odsQuickstarterStageRenderJenkinsfile( + context, + [source: 'testing.yml.template', + target: 'environments/testing.yml'] + ) +} \ No newline at end of file diff --git a/e2e-python/Jenkinsfile.template b/e2e-python/Jenkinsfile.template new file mode 100644 index 000000000..1303f3d29 --- /dev/null +++ b/e2e-python/Jenkinsfile.template @@ -0,0 +1,185 @@ +/* generated jenkins file used for building and deploying AWS-infrastructure in projects */ + +@Library('ods-jenkins-shared-library@@shared_library_ref@') _ + +node { + aws_region = env.AWS_REGION + dockerRegistry = env.DOCKER_REGISTRY +} + +odsComponentPipeline( + podContainers: [ + containerTemplate( + name: 'jnlp', + image: "${dockerRegistry}/ods/jenkins-agent-terraform-2306:@shared_library_ref@", + envVars: [ + envVar(key: 'AWS_REGION', value: aws_region) + ], + alwaysPullImage: true, + args: '${computer.jnlpmac} ${computer.name}' + ) + ], + branchToEnvironmentMapping: [ + '*': 'dev', + // 'release/': 'test' + ] +) { context -> + getEnvironment(context) + addVars2envJsonFile(context) + odsComponentStageInfrastructure(context, [cloudProvider: 'AWS']) + + withEnv(["AWS_ACCESS_KEY_ID=${AWS_ACCESS_KEY_ID}", + "AWS_SECRET_ACCESS_KEY=${AWS_SECRET_ACCESS_KEY}" + ]) + { + stage ("AWS Testing Preparation"){ + generateTerraformOutputsFile() + } + + def outputNames = stageGetNamesFromOutputs() + def aws_pipelineName = outputNames.aws_codepipeline_name + def bitbuckets3_name = outputNames.bitbuckets3_name + def results3_name = outputNames.results3_name + + stage ("Publish Bitbucket Code To AWS"){ + publishBitbucketCodeToAWS(context, bitbuckets3_name) + } + + stage ("Run Tests"){ + awsCodePipelineTrigger(context, aws_pipelineName) + awsCodePipelineWaitForExecution(context, aws_pipelineName) + } + + stage ("Test Results"){ + retrieveReportsFromAWS(context, results3_name) + archiveArtifacts artifacts: "build/test-results/test/**", allowEmptyArchive: true + junit(testResults:'build/test-results/test/*.xml', allowEmptyResults: true) + stash(name: "acceptance-test-reports-junit-xml-${context.componentId}-${context.buildNumber}", includes: "build/test-results/test/acceptance*junit.xml", allowEmpty: true) + stash(name: "installation-test-reports-junit-xml-${context.componentId}-${context.buildNumber}", includes: "build/test-results/test/installation*junit.xml", allowEmpty: true) + stash(name: "integration-test-reports-junit-xml-${context.componentId}-${context.buildNumber}", includes: "build/test-results/test/integration*junit.xml", allowEmpty: true) + } + } + +} + +def getEnvironment(def context){ + sh "echo Get Environment Variables" + AWS_ACCESS_KEY_ID = sh(returnStdout: true, script:"oc get secret aws-access-key-id-${context.environment} --namespace ${context.cdProject} --output jsonpath='{.data.secrettext}' | base64 -d") + AWS_SECRET_ACCESS_KEY = sh(returnStdout: true, script:"oc get secret aws-secret-access-key-${context.environment} --namespace ${context.cdProject} --output jsonpath='{.data.secrettext}' | base64 -d") + +} + + +def generateTerraformOutputsFile() { + sh 'terraform output -json > terraform_outputs.json' + sh 'cat terraform_outputs.json' +} + +def stageGetNamesFromOutputs() { + def outputNames = [:] + def terraformOutputJson = readJSON file: 'terraform_outputs.json' + //def environmentVarsJson = readJSON file: env.auto.tfvars.json + + outputNames.aws_codepipeline_name = terraformOutputJson.codepipeline_name.value + outputNames.bitbuckets3_name = terraformOutputJson.bitbucket_s3bucket_name.value + outputNames.results3_name = terraformOutputJson.e2e_results_bucket_name.value + + return outputNames +} + +def awsCodePipelineTrigger(def context, pipelineName) { + sh "aws codepipeline start-pipeline-execution --name ${pipelineName}" +} + + +def awsCodePipelineWaitForExecution(def context, pipelineName) { + def pipelineExecutionStatus = '' + + while (true) { + pipelineExecutionStatus = '' + sleep(time: 40, unit: 'SECONDS') + def pipelineState = sh( + script: "aws codepipeline get-pipeline-state --name ${pipelineName} --query 'stageStates[*]' --output json", + returnStdout: true + ).trim() + + def pipelineStages = readJSON(text: pipelineState) + + pipelineStages.each { stage -> + def stageName = stage.stageName + def stageStatus = stage.latestExecution.status + echo "Stage: ${stageName}, Status: ${stageStatus}" + + if (stageStatus == 'InProgress') { + pipelineExecutionStatus = 'InProgress' + return + } else if (stageStatus == 'Failed') { + pipelineExecutionStatus = 'Failed' + echo "Pipeline execution failed at stage ${stageName}" + error("Pipeline execution failed at stage ${stageName}") + return + } + } + + if (pipelineExecutionStatus == 'InProgress') { + continue + } else if (pipelineExecutionStatus == 'Failed') { + echo "Pipeline execution failed at stage ${stageName}" + break + } else { + echo 'Pipeline execution completed successfully.' + break + } + } +} + + + +def publishBitbucketCodeToAWS(def context, bitbuckets3_name) { + def branch = context.gitBranch + def repository = context.componentId + zip zipFile: "${repository}-${branch}.zip", archive: false, dir: '.' + sh " aws s3 cp ${repository}-${branch}.zip s3://${bitbuckets3_name}/${repository}-${branch}.zip" +} + +def retrieveReportsFromAWS(def context, results3_name) { + sh "aws s3 cp s3://${results3_name}/junit/acceptance_GX_junit.xml ./build/test-results/test/acceptance_GX_junit.xml" + sh "aws s3 cp s3://${results3_name}/junit/acceptance_pytest_junit.xml ./build/test-results/test/acceptance_pytest_junit.xml" + sh "aws s3 cp s3://${results3_name}/junit/installation_pytest_junit.xml ./build/test-results/test/installation_pytest_junit.xml" + sh "aws s3 cp s3://${results3_name}/junit/integration_pytest_junit.xml ./build/test-results/test/integration_pytest_junit.xml" + + sh "aws s3 cp s3://${results3_name}/GX_test_results ./build/test-results/test/artifacts/acceptance/acceptance_GX_report --recursive" + sh "aws s3 cp s3://${results3_name}/GX_jsons ./build/test-results/test/artifacts/acceptance/GX_jsons --recursive" + sh "aws s3 cp s3://${results3_name}/pytest_results/acceptance/acceptance_allure_report_complete.html ./build/test-results/test/artifacts/acceptance/acceptance_pytest_report.html" + sh "aws s3 cp s3://${results3_name}/pytest_results/installation/installation_allure_report_complete.html ./build/test-results/test/artifacts/installation/installation_pytest_report.html" + sh "aws s3 cp s3://${results3_name}/pytest_results/integration/integration_allure_report_complete.html ./build/test-results/test/artifacts/integration/integration_pytest_report.html" + + sh "ls build/test-results/test" + sh "rm build/test-results/test/default.xml" +} + +def addVars2envJsonFile(def context) { + echo "Starting addVars2envJsonFile" + def environment = context.environment + def projectId = context.projectId + def branch_name = context.gitBranch + def repository = context.componentId + def filePath = "./environments/${environment}.json" + + def existingJson = readFile file: filePath + def existingData = readJSON text: existingJson + + existingData.environment = environment + existingData.projectId = projectId + existingData.aws_region = aws_region + existingData.repository = repository + existingData.branch_name = branch_name + + echo "Environment: ${existingData}" + + def updatedJson = groovy.json.JsonOutput.toJson(existingData) + writeFile file: filePath, text: updatedJson + + echo "Finishing addVars2envJsonFile" +} + diff --git a/e2e-python/README.md b/e2e-python/README.md new file mode 100644 index 000000000..2a6df7d86 --- /dev/null +++ b/e2e-python/README.md @@ -0,0 +1,104 @@ +# Python end-to-end tests + +This end-to-end testing project was generated from the *e2e-python* ODS quickstarter. + +## Stages: installation / integration / acceptance + +With the introduction of the release manager concept in OpenDevStack 3, e2e test quickstarters are expected to run tests in three different stages (installation, integration & acceptance) and generate a JUnit XML result file for each of these stages. + +Make sure to keep `junit` as reporter and to not change the output path for the JUnit results files as they will be stashed by Jenkins and reused by the release manager. + +## Running end-to-end tests + +To execute all end-to-end tests: + +1. Set up AWS account credentials in environment folder's yml files. +2. Customize json files with the desired identification namings for the AWS resources that will be created with the quickestarters execution. +3. Modify the great_expectations and pytes folder to execute your tests located in the 'tests/acceptance/' directory. + +# Pipeline execution options: +- By a commit with a change in the code the pipeline in jenkins will be automatically executed +- From jenkins manually +- Automatic from a test (create a function to automatize the trigger of the pipeline) + +## How to use this Stack? + +The behavior of a stack is determined by its purpose and the set of input parameters. Here is an overview of the *inputs* and *outputs* available for this stack. + + +## Requirements + +| Name | Version | +|------|---------| +| [terraform](#requirement\_terraform) | >= 1.0 | +| [aws](#requirement\_aws) | 4.67.0 | +| [random](#requirement\_random) | 3.5.1 | + +## Providers + +| Name | Version | +|------|---------| +| [aws](#provider\_aws) | 4.67.0 | +| [random](#provider\_random) | 3.5.1 | + +## Modules + +| Name | Description | +|-----------------------------------------------------------------------------------------------------------------|-------------| +| [modules\codebuild]() | resource | +| [modules\codepipeline]() | resource | +| [modules\iam_roles]() | resource | +| [modules\s3-bucket]() | resource | +| [modules\s3-bucket-policy](https://registry.terraform.io/providers/hashicorp/time/latest/docs/resources/static) | resource | + +## Resources + +| Name | Type | +|--------------------------------------------------------------------------------------------------------------------------------------------|------| +| [aws_codebuild_project.build_project](https://registry.terraform.io/providers/hashicorp/...) | resource | +| [aws_codepipeline.codepipeline]() | resource | +| [aws_iam_role.codepipeline_role]() | resource | +| [aws_iam_role.codebuild_role]() | resource | +| [aws_iam_role_policy.codepipeline_policy](https://registry.terraform.io/providers/hashicorp/random/3.5.1/docs/resources/id) | resource | +| [aws_iam_role_policy.codebuild_policy](https://registry.terraform.io/providers/hashicorp/random/3.5.1/docs/resources/id) | resource | +| [aws_s3_bucket_policy.allow_access_from_another_account](https://registry.terraform.io/providers/hashicorp/random/3.5.1/docs/resources/id) | resource | +| [aws_s3_bucket.codepipeline_bucket](https://registry.terraform.io/providers/hashicorp/random/3.5.1/docs/resources/id) | resource | +| [aws_s3_bucket_versioning.s3versioning-cp](https://registry.terraform.io/providers/hashicorp/random/3.5.1/docs/resources/id) | resource | +| [aws_s3_bucket.e2e_results_bucket](https://registry.terraform.io/providers/hashicorp/random/3.5.1/docs/resources/id) | resource | +| [aws_s3_bucket_versioning.s3versioning-artfcs](https://registry.terraform.io/providers/hashicorp/random/3.5.1/docs/resources/id) | resource | +| [aws_s3_bucket.source_bitbucket_bucket](https://registry.terraform.io/providers/hashicorp/random/3.5.1/docs/resources/id) | resource | +| [aws_s3_bucket_versioning.s3versioning-bucket](https://registry.terraform.io/providers/hashicorp/random/3.5.1/docs/resources/id) | resource | +| [random_id.id](https://registry.terraform.io/providers/hashicorp/random/3.5.1/docs/resources/id) | resource | +| [local_file.terraform-data](https://registry.terraform.io/providers/hashicorp/random/3.5.1/docs/resources/id) | resource | +| [time_static.deployment](https://registry.terraform.io/providers/hashicorp/time/latest/docs/resources/static) | resource | + +## Inputs + +| Name | Description | Type | Default | Required | +|------------------------------------------------------------------------------------------------------------------------------|-------------------------------------------------------------------------------------------------------------------------|------|-----------------------|:--------:| +| [codebuild\_project\_name](#input\_codebuild\_project\_name) | The name of the AWS codebuild project. | `string` | `"codebuild-project"` | no | +| [codepipeline\_name](#input\_codepipeline\_name) | The name of the AWS codepipeline. | `string` | `"test-codepipeline"` | no | +| [codepipeline\_bucket\_name](#input\_codepipeline\_bucket\_name) | The name of the codepipeline artifacts S3 bucket. | `string` | `"cpplartifacts"` | no | +| [bitbucket\_source\_bucket\_name](#input\_bitbucket\_source\_bucket\_name) | The name of the source S3 bucket. | `string` | `"src-bitbucket"` | no | +| [e2e\_results\_bucket\_name](#input\_e2e\_results\_bucket\_name) | The name of the results S3 bucket. | `string` | `"test-results"` | no | +| [pipeline\_role\_name](#input\_pipeline\_role\_name) | The name of the codepipeline role. | `string` | `"test-codePipelineRole"` | no | +| [codebuild\_role\_name](#input\_codebuild\_role\_name) | The name of the codebuild role. | `string` | `"test-codeBuildRole"` | no | +| [codepipeline\_policy\_name](#input\_codepipeline\_policy\_name) | The name of the codepipeline policy. | `string` | `"codepipeline_policy"` | no | +| [codebuild\_policy\_name](#input\_codebuild\_policy\_name) | The name of the codebuild policy. | `string` | `"codebuild_policy"` | no | +| [meta\_environment](#input\_meta\_environment) | The type of the environment. Can be any of DEVELOPMENT, EVALUATION, PRODUCTIVE, QUALITYASSURANCE, TRAINING, VALIDATION. | `string` | `"DEVELOPMENT"` | no | +| [name](#input\_name) | The name of the stack. | `string` | `"stack-aws-quickstarter"` | no | + +## Outputs + +The output generated by terraform are used for internal quickestarter's purposes. + + +## Environments +The pipeline supports multiple environments (Testing/DEV/QA/PROD) within OpenDevStack. The behaviour of the pipeline in the environments can be controlled within the **environments** directory. +The *.yml files define the Jenkins secrets to read and are used to deploy into the right environments. +The *.json files can override variables from **variables.tf** in case different environments request different inputs (e.g. deploy a smaller version of the stack in DEV). + +## Problems? Questions? Suggestions? + +In case of problems, questions or suggestions, feel free to file an issue with the respective project's repository. Thanks! + diff --git a/e2e-python/dev.yml.template b/e2e-python/dev.yml.template new file mode 100644 index 000000000..9307c56ec --- /dev/null +++ b/e2e-python/dev.yml.template @@ -0,0 +1,7 @@ +region: eu-west-1 + +credentials: + key: @project_id@-cd-aws-access-key-id-dev + secret: @project_id@-cd-aws-secret-access-key-dev + +account: "" diff --git a/e2e-python/files/.devcontainer/devcontainer.json.template b/e2e-python/files/.devcontainer/devcontainer.json.template new file mode 100644 index 000000000..f0810403b --- /dev/null +++ b/e2e-python/files/.devcontainer/devcontainer.json.template @@ -0,0 +1,3 @@ +{ + "image": "ghcr.io/nichtraunzer/terrarium:latest" +} diff --git a/e2e-python/files/.editorconfig b/e2e-python/files/.editorconfig new file mode 100644 index 000000000..147abfb08 --- /dev/null +++ b/e2e-python/files/.editorconfig @@ -0,0 +1,19 @@ +# EditorConfig is awesome: http://EditorConfig.org + +# top-most EditorConfig file +root = true + +[*] +charset = utf-8 +end_of_line = lf +indent_size = 2 +indent_style = space +insert_final_newline = true +trim_trailing_whitespace = true + +[*.md] +trim_trailing_whitespace = false ; trimming trailing whitespace may break Markdown + +[Makefile] +tab_width = 2 +indent_style = tab diff --git a/e2e-python/files/.gitignore b/e2e-python/files/.gitignore new file mode 100644 index 000000000..df45f2c18 --- /dev/null +++ b/e2e-python/files/.gitignore @@ -0,0 +1,20 @@ +.bundle +.kitchen +.terraform +.terraform.lock.hcl +.terraform-data.json +.vscode +.devcontainer/devcontainer.json +*.auto.tfvars* +inspec.lock +outputs.json +terraform.tfvars* +terraform.tfstate* +tfplan +vendor +test/integration/*/files/*.json +test/integration/*/files/*.yml +reports/install/* +!reports/install/.gitkeep +Pipfile.lock +.venv diff --git a/e2e-python/files/.pre-commit-config.yaml b/e2e-python/files/.pre-commit-config.yaml new file mode 100644 index 000000000..aee89823b --- /dev/null +++ b/e2e-python/files/.pre-commit-config.yaml @@ -0,0 +1,83 @@ +exclude: '.terraform' +fail_fast: true + +repos: +- repo: https://github.com/pre-commit/pre-commit-hooks.git + rev: v4.4.0 + hooks: + - id: check-added-large-files + args: ['--maxkb=5000'] + - id: check-byte-order-marker + - id: check-case-conflict + - id: check-json + - id: check-merge-conflict + - id: check-symlinks + - id: check-yaml + args: [--allow-multiple-documents, --unsafe] + - id: detect-aws-credentials + args: [--allow-missing-credentials] + - id: detect-private-key + - id: mixed-line-ending + args: [--fix=lf] + - id: pretty-format-json + exclude: ^.devcontainer/.*$ + args: [--autofix, --indent=2, --no-ensure-ascii] + - id: trailing-whitespace + +- repo: https://github.com/psf/black.git + rev: 23.3.0 + hooks: + - id: black + args: [--line-length=90] + files: (\.py)$ + +- repo: https://github.com/PyCQA/flake8.git + rev: 6.0.0 + hooks: + - id: flake8 + args: [--max-line-length=90] + files: (\.py)$ + +- repo: https://github.com/awslabs/cfn-python-lint + rev: v0.77.5 + hooks: + - id: cfn-python-lint + files: cfn-templates/.*\.(json|yml|yaml)$ + +- repo: https://github.com/antonbabenko/pre-commit-terraform + rev: v1.79.1 + hooks: + - id: terraform_fmt + +- repo: https://github.com/nichtraunzer/ods-pre-commit-hooks + rev: v0.4.2 + hooks: + - id: terraformcreatei2o + files: (\.tf)$ + - id: terraformstackmoduleoutputs + files: (\.tf)$ + +- repo: https://github.com/antonbabenko/pre-commit-terraform + rev: v1.79.1 + hooks: + - id: terraform_fmt + - id: terraform_docs + +- repo: https://github.com/jumanjihouse/pre-commit-hooks + rev: 3.0.0 + hooks: + - id: rubocop + args: + - --fix-layout + +- repo: local + hooks: + - id: test + name: make test + description: Run integration tests. + entry: make test + language: system + files: (\.tf|\.rb)$ + pass_filenames: false + verbose: true + diff --git a/e2e-python/files/.rubocop.yml b/e2e-python/files/.rubocop.yml new file mode 100644 index 000000000..c09f7c5c1 --- /dev/null +++ b/e2e-python/files/.rubocop.yml @@ -0,0 +1,4 @@ +--- +Layout/LineLength: + Max: 150 + diff --git a/e2e-python/files/.ruby-version b/e2e-python/files/.ruby-version new file mode 100644 index 000000000..be94e6f53 --- /dev/null +++ b/e2e-python/files/.ruby-version @@ -0,0 +1 @@ +3.2.2 diff --git a/e2e-python/files/.terraform-version b/e2e-python/files/.terraform-version new file mode 100644 index 000000000..c514bd85c --- /dev/null +++ b/e2e-python/files/.terraform-version @@ -0,0 +1 @@ +1.4.6 diff --git a/e2e-python/files/Gemfile b/e2e-python/files/Gemfile new file mode 100644 index 000000000..a809dff57 --- /dev/null +++ b/e2e-python/files/Gemfile @@ -0,0 +1,19 @@ +source 'https://rubygems.org' do + gem 'activesupport' + gem 'irb' + gem 'kitchen-terraform', '~> 7.0' + gem 'test-kitchen' + gem 'rspec-retry' + gem 'aws-sdk', '~> 3' +end + +source 'https://packagecloud.io/cinc-project/stable' do + gem 'inspec-core' + gem 'inspec', '~> 5.21' + gem 'cinc-auditor-bin' + gem 'unf_ext' + gem 'chef-config' + gem 'chef-utils' + gem 'mixlib-install' + gem 'mixlib-versioning' +end diff --git a/e2e-python/files/Gemfile.lock b/e2e-python/files/Gemfile.lock new file mode 100644 index 000000000..2c90c4b6a --- /dev/null +++ b/e2e-python/files/Gemfile.lock @@ -0,0 +1,1883 @@ +GEM + remote: https://packagecloud.io/cinc-project/stable/ + specs: + chef-config (18.2.7) + addressable + chef-utils (= 18.2.7) + fuzzyurl + mixlib-config (>= 2.2.12, < 4.0) + mixlib-shellout (>= 2.0, < 4.0) + tomlrb (~> 1.2) + chef-utils (18.2.7) + concurrent-ruby + cinc-auditor-bin (5.21.29) + inspec (= 5.21.29) + inspec (5.21.29) + cookstyle + faraday_middleware (>= 0.12.2, < 1.1) + inspec-core (= 5.21.29) + mongo (= 2.13.2) + progress_bar (~> 1.3.3) + rake + train (~> 3.10) + train-aws (~> 0.2) + train-habitat (~> 0.1) + train-winrm (~> 0.2) + inspec-core (5.21.29) + addressable (~> 2.4) + chef-telemetry (~> 1.0, >= 1.0.8) + faraday (>= 1, < 3) + faraday-follow_redirects (~> 0.3) + hashie (>= 3.4, < 5.0) + license-acceptance (>= 0.2.13, < 3.0) + method_source (>= 0.8, < 2.0) + mixlib-log (~> 3.0) + multipart-post (~> 2.0) + parallel (~> 1.9) + parslet (>= 1.5, < 2.0) + pry (~> 0.13) + rspec (>= 3.9, <= 3.11) + rspec-its (~> 1.2) + rubyzip (>= 1.2.2, < 3.0) + semverse (~> 3.0) + sslshake (~> 1.2) + thor (>= 0.20, < 2.0) + tomlrb (>= 1.2, < 2.1) + train-core (~> 3.10) + tty-prompt (~> 0.17) + tty-table (~> 0.10) + mixlib-install (3.12.27) + mixlib-shellout + mixlib-versioning + thor + mixlib-versioning (1.2.12) + unf_ext (0.0.7.2) + +GEM + remote: https://rubygems.org/ + specs: + activesupport (7.0.4.3) + concurrent-ruby (~> 1.0, >= 1.0.2) + i18n (>= 1.6, < 2) + minitest (>= 5.1) + tzinfo (~> 2.0) + addressable (2.8.4) + public_suffix (>= 2.0.2, < 6.0) + ast (2.4.2) + aws-eventstream (1.2.0) + aws-partitions (1.763.0) + aws-sdk (3.1.0) + aws-sdk-resources (~> 3) + aws-sdk-accessanalyzer (1.34.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-account (1.11.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-acm (1.55.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-acmpca (1.53.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-alexaforbusiness (1.58.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-amplify (1.32.0) + aws-sdk-core (~> 3, >= 3.120.0) + aws-sigv4 (~> 1.1) + aws-sdk-amplifybackend (1.20.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-amplifyuibuilder (1.11.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-apigateway (1.81.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-apigatewaymanagementapi (1.32.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-apigatewayv2 (1.44.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-appconfig (1.30.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-appconfigdata (1.8.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-appflow (1.40.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-appintegrationsservice (1.16.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-applicationautoscaling (1.51.0) + aws-sdk-core (~> 3, >= 3.112.0) + aws-sigv4 (~> 1.1) + aws-sdk-applicationcostprofiler (1.11.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-applicationdiscoveryservice (1.49.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-applicationinsights (1.33.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-appmesh (1.49.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-appregistry (1.20.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-apprunner (1.22.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-appstream (1.70.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-appsync (1.59.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-arczonalshift (1.1.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-athena (1.65.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-auditmanager (1.31.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-augmentedairuntime (1.25.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-autoscaling (1.63.0) + aws-sdk-core (~> 3, >= 3.112.0) + aws-sigv4 (~> 1.1) + aws-sdk-autoscalingplans (1.42.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-backup (1.49.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-backupgateway (1.8.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-backupstorage (1.2.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-batch (1.47.0) + aws-sdk-core (~> 3, >= 3.112.0) + aws-sigv4 (~> 1.1) + aws-sdk-billingconductor (1.7.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-braket (1.21.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-budgets (1.52.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-chime (1.71.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-chimesdkidentity (1.12.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-chimesdkmediapipelines (1.5.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-chimesdkmeetings (1.18.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-chimesdkmessaging (1.18.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-chimesdkvoice (1.5.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-cleanrooms (1.2.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-cloud9 (1.49.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-cloudcontrolapi (1.10.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-clouddirectory (1.44.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-cloudformation (1.77.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-cloudfront (1.76.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-cloudhsm (1.41.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-cloudhsmv2 (1.44.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-cloudsearch (1.42.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-cloudsearchdomain (1.34.1) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-cloudtrail (1.58.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-cloudtraildata (1.0.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-cloudwatch (1.73.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-cloudwatchevents (1.46.0) + aws-sdk-core (~> 3, >= 3.112.0) + aws-sigv4 (~> 1.1) + aws-sdk-cloudwatchevidently (1.12.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-cloudwatchlogs (1.62.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-cloudwatchrum (1.9.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-codeartifact (1.27.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-codebuild (1.90.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-codecatalyst (1.3.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sdk-codecommit (1.53.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-codedeploy (1.52.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-codeguruprofiler (1.26.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-codegurureviewer (1.35.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-codepipeline (1.55.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-codestar (1.40.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-codestarconnections (1.26.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-codestarnotifications (1.22.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-cognitoidentity (1.31.0) + aws-sdk-core (~> 3, >= 3.112.0) + aws-sigv4 (~> 1.1) + aws-sdk-cognitoidentityprovider (1.53.0) + aws-sdk-core (~> 3, >= 3.112.0) + aws-sigv4 (~> 1.1) + aws-sdk-cognitosync (1.38.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-comprehend (1.68.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-comprehendmedical (1.39.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-computeoptimizer (1.40.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-configservice (1.90.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-connect (1.106.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-connectcampaignservice (1.3.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-connectcases (1.4.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-connectcontactlens (1.13.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-connectparticipant (1.29.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-connectwisdomservice (1.13.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-controltower (1.2.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-core (3.172.0) + aws-eventstream (~> 1, >= 1.0.2) + aws-partitions (~> 1, >= 1.651.0) + aws-sigv4 (~> 1.5) + jmespath (~> 1, >= 1.6.1) + aws-sdk-costandusagereportservice (1.43.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-costexplorer (1.83.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-customerprofiles (1.27.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-databasemigrationservice (1.53.0) + aws-sdk-core (~> 3, >= 3.112.0) + aws-sigv4 (~> 1.1) + aws-sdk-dataexchange (1.33.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-datapipeline (1.38.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-datasync (1.56.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-dax (1.41.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-detective (1.32.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-devicefarm (1.54.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-devopsguru (1.30.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-directconnect (1.58.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-directoryservice (1.54.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-dlm (1.56.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-docdb (1.47.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-docdbelastic (1.1.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-drs (1.13.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-dynamodb (1.84.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-dynamodbstreams (1.43.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-ebs (1.28.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-ec2 (1.379.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-ec2instanceconnect (1.27.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-ecr (1.58.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-ecrpublic (1.16.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-ecs (1.118.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-efs (1.60.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-eks (1.83.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-elasticache (1.85.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-elasticbeanstalk (1.54.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-elasticinference (1.25.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-elasticloadbalancing (1.42.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-elasticloadbalancingv2 (1.84.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-elasticsearchservice (1.70.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-elastictranscoder (1.40.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-emr (1.53.0) + aws-sdk-core (~> 3, >= 3.121.2) + aws-sigv4 (~> 1.1) + aws-sdk-emrcontainers (1.20.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-emrserverless (1.7.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-eventbridge (1.24.0) + aws-sdk-core (~> 3, >= 3.112.0) + aws-sigv4 (~> 1.1) + aws-sdk-finspace (1.13.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-finspacedata (1.19.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-firehose (1.51.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-fis (1.16.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-fms (1.56.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-forecastqueryservice (1.24.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-forecastservice (1.40.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-frauddetector (1.39.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-fsx (1.65.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-gamelift (1.62.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-gamesparks (1.4.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-glacier (1.49.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-globalaccelerator (1.43.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-glue (1.88.0) + aws-sdk-core (~> 3, >= 3.112.0) + aws-sigv4 (~> 1.1) + aws-sdk-gluedatabrew (1.25.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-greengrass (1.53.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-greengrassv2 (1.25.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-groundstation (1.33.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-guardduty (1.70.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-health (1.50.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-healthlake (1.15.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-honeycode (1.19.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-iam (1.77.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-identitystore (1.24.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-imagebuilder (1.45.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-importexport (1.36.1) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv2 (~> 1.0) + aws-sdk-inspector (1.45.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-inspector2 (1.12.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-internetmonitor (1.2.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-iot (1.104.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-iot1clickdevicesservice (1.39.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-iot1clickprojects (1.39.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-iotanalytics (1.51.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-iotdataplane (1.44.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-iotdeviceadvisor (1.19.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-iotevents (1.35.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-ioteventsdata (1.29.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-iotfleethub (1.13.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-iotfleetwise (1.7.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-iotjobsdataplane (1.38.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-iotroborunner (1.1.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-iotsecuretunneling (1.23.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-iotsitewise (1.50.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-iotthingsgraph (1.26.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-iottwinmaker (1.11.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-iotwireless (1.32.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-ivs (1.29.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-ivschat (1.9.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-ivsrealtime (1.2.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-kafka (1.55.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-kafkaconnect (1.9.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-kendra (1.65.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-kendraranking (1.1.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-keyspaces (1.6.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-kinesis (1.45.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-kinesisanalytics (1.42.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-kinesisanalyticsv2 (1.43.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-kinesisvideo (1.46.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-kinesisvideoarchivedmedia (1.46.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-kinesisvideomedia (1.39.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-kinesisvideosignalingchannels (1.21.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-kinesisvideowebrtcstorage (1.2.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-kms (1.64.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-lakeformation (1.34.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-lambda (1.96.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-lambdapreview (1.36.1) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-lex (1.47.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-lexmodelbuildingservice (1.59.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-lexmodelsv2 (1.32.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-lexruntimev2 (1.19.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-licensemanager (1.44.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-licensemanagerlinuxsubscriptions (1.1.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-licensemanagerusersubscriptions (1.3.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-lightsail (1.75.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-locationservice (1.30.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-lookoutequipment (1.16.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-lookoutforvision (1.19.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-lookoutmetrics (1.24.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-machinelearning (1.39.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-macie (1.40.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-macie2 (1.53.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-mainframemodernization (1.4.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-managedblockchain (1.38.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-managedgrafana (1.14.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-marketplacecatalog (1.27.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-marketplacecommerceanalytics (1.43.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-marketplaceentitlementservice (1.37.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-marketplacemetering (1.46.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-mediaconnect (1.48.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-mediaconvert (1.105.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-medialive (1.98.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-mediapackage (1.59.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-mediapackagevod (1.42.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-mediastore (1.43.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-mediastoredata (1.40.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-mediatailor (1.61.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-memorydb (1.12.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-mgn (1.18.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-migrationhub (1.42.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-migrationhubconfig (1.22.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-migrationhuborchestrator (1.2.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-migrationhubrefactorspaces (1.12.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-migrationhubstrategyrecommendations (1.9.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-mobile (1.37.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-mq (1.40.0) + aws-sdk-core (~> 3, >= 3.120.0) + aws-sigv4 (~> 1.1) + aws-sdk-mturk (1.42.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-mwaa (1.20.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-neptune (1.50.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-networkfirewall (1.28.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-networkmanager (1.30.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-nimblestudio (1.18.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-oam (1.1.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-omics (1.4.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-opensearchserverless (1.2.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-opensearchservice (1.20.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-opsworks (1.43.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-opsworkscm (1.54.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-organizations (1.59.0) + aws-sdk-core (~> 3, >= 3.112.0) + aws-sigv4 (~> 1.1) + aws-sdk-osis (1.1.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-outposts (1.42.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-panorama (1.12.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-personalize (1.46.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-personalizeevents (1.30.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-personalizeruntime (1.35.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-pi (1.43.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-pinpoint (1.71.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-pinpointemail (1.37.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-pinpointsmsvoice (1.34.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-pinpointsmsvoicev2 (1.2.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-pipes (1.3.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-polly (1.66.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-pricing (1.43.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-privatenetworks (1.4.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-prometheusservice (1.17.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-proton (1.24.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-qldb (1.28.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-qldbsession (1.24.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-quicksight (1.79.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-ram (1.26.0) + aws-sdk-core (~> 3, >= 3.112.0) + aws-sigv4 (~> 1.1) + aws-sdk-rds (1.178.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-rdsdataservice (1.40.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-recyclebin (1.8.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-redshift (1.91.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-redshiftdataapiservice (1.26.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-redshiftserverless (1.7.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-rekognition (1.78.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-resiliencehub (1.13.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-resourceexplorer2 (1.4.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-resourcegroups (1.48.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-resourcegroupstaggingapi (1.49.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-resources (3.163.0) + aws-sdk-accessanalyzer (~> 1) + aws-sdk-account (~> 1) + aws-sdk-acm (~> 1) + aws-sdk-acmpca (~> 1) + aws-sdk-alexaforbusiness (~> 1) + aws-sdk-amplify (~> 1) + aws-sdk-amplifybackend (~> 1) + aws-sdk-amplifyuibuilder (~> 1) + aws-sdk-apigateway (~> 1) + aws-sdk-apigatewaymanagementapi (~> 1) + aws-sdk-apigatewayv2 (~> 1) + aws-sdk-appconfig (~> 1) + aws-sdk-appconfigdata (~> 1) + aws-sdk-appflow (~> 1) + aws-sdk-appintegrationsservice (~> 1) + aws-sdk-applicationautoscaling (~> 1) + aws-sdk-applicationcostprofiler (~> 1) + aws-sdk-applicationdiscoveryservice (~> 1) + aws-sdk-applicationinsights (~> 1) + aws-sdk-appmesh (~> 1) + aws-sdk-appregistry (~> 1) + aws-sdk-apprunner (~> 1) + aws-sdk-appstream (~> 1) + aws-sdk-appsync (~> 1) + aws-sdk-arczonalshift (~> 1) + aws-sdk-athena (~> 1) + aws-sdk-auditmanager (~> 1) + aws-sdk-augmentedairuntime (~> 1) + aws-sdk-autoscaling (~> 1) + aws-sdk-autoscalingplans (~> 1) + aws-sdk-backup (~> 1) + aws-sdk-backupgateway (~> 1) + aws-sdk-backupstorage (~> 1) + aws-sdk-batch (~> 1) + aws-sdk-billingconductor (~> 1) + aws-sdk-braket (~> 1) + aws-sdk-budgets (~> 1) + aws-sdk-chime (~> 1) + aws-sdk-chimesdkidentity (~> 1) + aws-sdk-chimesdkmediapipelines (~> 1) + aws-sdk-chimesdkmeetings (~> 1) + aws-sdk-chimesdkmessaging (~> 1) + aws-sdk-chimesdkvoice (~> 1) + aws-sdk-cleanrooms (~> 1) + aws-sdk-cloud9 (~> 1) + aws-sdk-cloudcontrolapi (~> 1) + aws-sdk-clouddirectory (~> 1) + aws-sdk-cloudformation (~> 1) + aws-sdk-cloudfront (~> 1) + aws-sdk-cloudhsm (~> 1) + aws-sdk-cloudhsmv2 (~> 1) + aws-sdk-cloudsearch (~> 1) + aws-sdk-cloudsearchdomain (~> 1) + aws-sdk-cloudtrail (~> 1) + aws-sdk-cloudtraildata (~> 1) + aws-sdk-cloudwatch (~> 1) + aws-sdk-cloudwatchevents (~> 1) + aws-sdk-cloudwatchevidently (~> 1) + aws-sdk-cloudwatchlogs (~> 1) + aws-sdk-cloudwatchrum (~> 1) + aws-sdk-codeartifact (~> 1) + aws-sdk-codebuild (~> 1) + aws-sdk-codecatalyst (~> 1) + aws-sdk-codecommit (~> 1) + aws-sdk-codedeploy (~> 1) + aws-sdk-codeguruprofiler (~> 1) + aws-sdk-codegurureviewer (~> 1) + aws-sdk-codepipeline (~> 1) + aws-sdk-codestar (~> 1) + aws-sdk-codestarconnections (~> 1) + aws-sdk-codestarnotifications (~> 1) + aws-sdk-cognitoidentity (~> 1) + aws-sdk-cognitoidentityprovider (~> 1) + aws-sdk-cognitosync (~> 1) + aws-sdk-comprehend (~> 1) + aws-sdk-comprehendmedical (~> 1) + aws-sdk-computeoptimizer (~> 1) + aws-sdk-configservice (~> 1) + aws-sdk-connect (~> 1) + aws-sdk-connectcampaignservice (~> 1) + aws-sdk-connectcases (~> 1) + aws-sdk-connectcontactlens (~> 1) + aws-sdk-connectparticipant (~> 1) + aws-sdk-connectwisdomservice (~> 1) + aws-sdk-controltower (~> 1) + aws-sdk-costandusagereportservice (~> 1) + aws-sdk-costexplorer (~> 1) + aws-sdk-customerprofiles (~> 1) + aws-sdk-databasemigrationservice (~> 1) + aws-sdk-dataexchange (~> 1) + aws-sdk-datapipeline (~> 1) + aws-sdk-datasync (~> 1) + aws-sdk-dax (~> 1) + aws-sdk-detective (~> 1) + aws-sdk-devicefarm (~> 1) + aws-sdk-devopsguru (~> 1) + aws-sdk-directconnect (~> 1) + aws-sdk-directoryservice (~> 1) + aws-sdk-dlm (~> 1) + aws-sdk-docdb (~> 1) + aws-sdk-docdbelastic (~> 1) + aws-sdk-drs (~> 1) + aws-sdk-dynamodb (~> 1) + aws-sdk-dynamodbstreams (~> 1) + aws-sdk-ebs (~> 1) + aws-sdk-ec2 (~> 1) + aws-sdk-ec2instanceconnect (~> 1) + aws-sdk-ecr (~> 1) + aws-sdk-ecrpublic (~> 1) + aws-sdk-ecs (~> 1) + aws-sdk-efs (~> 1) + aws-sdk-eks (~> 1) + aws-sdk-elasticache (~> 1) + aws-sdk-elasticbeanstalk (~> 1) + aws-sdk-elasticinference (~> 1) + aws-sdk-elasticloadbalancing (~> 1) + aws-sdk-elasticloadbalancingv2 (~> 1) + aws-sdk-elasticsearchservice (~> 1) + aws-sdk-elastictranscoder (~> 1) + aws-sdk-emr (~> 1) + aws-sdk-emrcontainers (~> 1) + aws-sdk-emrserverless (~> 1) + aws-sdk-eventbridge (~> 1) + aws-sdk-finspace (~> 1) + aws-sdk-finspacedata (~> 1) + aws-sdk-firehose (~> 1) + aws-sdk-fis (~> 1) + aws-sdk-fms (~> 1) + aws-sdk-forecastqueryservice (~> 1) + aws-sdk-forecastservice (~> 1) + aws-sdk-frauddetector (~> 1) + aws-sdk-fsx (~> 1) + aws-sdk-gamelift (~> 1) + aws-sdk-gamesparks (~> 1) + aws-sdk-glacier (~> 1) + aws-sdk-globalaccelerator (~> 1) + aws-sdk-glue (~> 1) + aws-sdk-gluedatabrew (~> 1) + aws-sdk-greengrass (~> 1) + aws-sdk-greengrassv2 (~> 1) + aws-sdk-groundstation (~> 1) + aws-sdk-guardduty (~> 1) + aws-sdk-health (~> 1) + aws-sdk-healthlake (~> 1) + aws-sdk-honeycode (~> 1) + aws-sdk-iam (~> 1) + aws-sdk-identitystore (~> 1) + aws-sdk-imagebuilder (~> 1) + aws-sdk-importexport (~> 1) + aws-sdk-inspector (~> 1) + aws-sdk-inspector2 (~> 1) + aws-sdk-internetmonitor (~> 1) + aws-sdk-iot (~> 1) + aws-sdk-iot1clickdevicesservice (~> 1) + aws-sdk-iot1clickprojects (~> 1) + aws-sdk-iotanalytics (~> 1) + aws-sdk-iotdataplane (~> 1) + aws-sdk-iotdeviceadvisor (~> 1) + aws-sdk-iotevents (~> 1) + aws-sdk-ioteventsdata (~> 1) + aws-sdk-iotfleethub (~> 1) + aws-sdk-iotfleetwise (~> 1) + aws-sdk-iotjobsdataplane (~> 1) + aws-sdk-iotroborunner (~> 1) + aws-sdk-iotsecuretunneling (~> 1) + aws-sdk-iotsitewise (~> 1) + aws-sdk-iotthingsgraph (~> 1) + aws-sdk-iottwinmaker (~> 1) + aws-sdk-iotwireless (~> 1) + aws-sdk-ivs (~> 1) + aws-sdk-ivschat (~> 1) + aws-sdk-ivsrealtime (~> 1) + aws-sdk-kafka (~> 1) + aws-sdk-kafkaconnect (~> 1) + aws-sdk-kendra (~> 1) + aws-sdk-kendraranking (~> 1) + aws-sdk-keyspaces (~> 1) + aws-sdk-kinesis (~> 1) + aws-sdk-kinesisanalytics (~> 1) + aws-sdk-kinesisanalyticsv2 (~> 1) + aws-sdk-kinesisvideo (~> 1) + aws-sdk-kinesisvideoarchivedmedia (~> 1) + aws-sdk-kinesisvideomedia (~> 1) + aws-sdk-kinesisvideosignalingchannels (~> 1) + aws-sdk-kinesisvideowebrtcstorage (~> 1) + aws-sdk-kms (~> 1) + aws-sdk-lakeformation (~> 1) + aws-sdk-lambda (~> 1) + aws-sdk-lambdapreview (~> 1) + aws-sdk-lex (~> 1) + aws-sdk-lexmodelbuildingservice (~> 1) + aws-sdk-lexmodelsv2 (~> 1) + aws-sdk-lexruntimev2 (~> 1) + aws-sdk-licensemanager (~> 1) + aws-sdk-licensemanagerlinuxsubscriptions (~> 1) + aws-sdk-licensemanagerusersubscriptions (~> 1) + aws-sdk-lightsail (~> 1) + aws-sdk-locationservice (~> 1) + aws-sdk-lookoutequipment (~> 1) + aws-sdk-lookoutforvision (~> 1) + aws-sdk-lookoutmetrics (~> 1) + aws-sdk-machinelearning (~> 1) + aws-sdk-macie (~> 1) + aws-sdk-macie2 (~> 1) + aws-sdk-mainframemodernization (~> 1) + aws-sdk-managedblockchain (~> 1) + aws-sdk-managedgrafana (~> 1) + aws-sdk-marketplacecatalog (~> 1) + aws-sdk-marketplacecommerceanalytics (~> 1) + aws-sdk-marketplaceentitlementservice (~> 1) + aws-sdk-marketplacemetering (~> 1) + aws-sdk-mediaconnect (~> 1) + aws-sdk-mediaconvert (~> 1) + aws-sdk-medialive (~> 1) + aws-sdk-mediapackage (~> 1) + aws-sdk-mediapackagevod (~> 1) + aws-sdk-mediastore (~> 1) + aws-sdk-mediastoredata (~> 1) + aws-sdk-mediatailor (~> 1) + aws-sdk-memorydb (~> 1) + aws-sdk-mgn (~> 1) + aws-sdk-migrationhub (~> 1) + aws-sdk-migrationhubconfig (~> 1) + aws-sdk-migrationhuborchestrator (~> 1) + aws-sdk-migrationhubrefactorspaces (~> 1) + aws-sdk-migrationhubstrategyrecommendations (~> 1) + aws-sdk-mobile (~> 1) + aws-sdk-mq (~> 1) + aws-sdk-mturk (~> 1) + aws-sdk-mwaa (~> 1) + aws-sdk-neptune (~> 1) + aws-sdk-networkfirewall (~> 1) + aws-sdk-networkmanager (~> 1) + aws-sdk-nimblestudio (~> 1) + aws-sdk-oam (~> 1) + aws-sdk-omics (~> 1) + aws-sdk-opensearchserverless (~> 1) + aws-sdk-opensearchservice (~> 1) + aws-sdk-opsworks (~> 1) + aws-sdk-opsworkscm (~> 1) + aws-sdk-organizations (~> 1) + aws-sdk-osis (~> 1) + aws-sdk-outposts (~> 1) + aws-sdk-panorama (~> 1) + aws-sdk-personalize (~> 1) + aws-sdk-personalizeevents (~> 1) + aws-sdk-personalizeruntime (~> 1) + aws-sdk-pi (~> 1) + aws-sdk-pinpoint (~> 1) + aws-sdk-pinpointemail (~> 1) + aws-sdk-pinpointsmsvoice (~> 1) + aws-sdk-pinpointsmsvoicev2 (~> 1) + aws-sdk-pipes (~> 1) + aws-sdk-polly (~> 1) + aws-sdk-pricing (~> 1) + aws-sdk-privatenetworks (~> 1) + aws-sdk-prometheusservice (~> 1) + aws-sdk-proton (~> 1) + aws-sdk-qldb (~> 1) + aws-sdk-qldbsession (~> 1) + aws-sdk-quicksight (~> 1) + aws-sdk-ram (~> 1) + aws-sdk-rds (~> 1) + aws-sdk-rdsdataservice (~> 1) + aws-sdk-recyclebin (~> 1) + aws-sdk-redshift (~> 1) + aws-sdk-redshiftdataapiservice (~> 1) + aws-sdk-redshiftserverless (~> 1) + aws-sdk-rekognition (~> 1) + aws-sdk-resiliencehub (~> 1) + aws-sdk-resourceexplorer2 (~> 1) + aws-sdk-resourcegroups (~> 1) + aws-sdk-resourcegroupstaggingapi (~> 1) + aws-sdk-robomaker (~> 1) + aws-sdk-rolesanywhere (~> 1) + aws-sdk-route53 (~> 1) + aws-sdk-route53domains (~> 1) + aws-sdk-route53recoverycluster (~> 1) + aws-sdk-route53recoverycontrolconfig (~> 1) + aws-sdk-route53recoveryreadiness (~> 1) + aws-sdk-route53resolver (~> 1) + aws-sdk-s3 (~> 1) + aws-sdk-s3control (~> 1) + aws-sdk-s3outposts (~> 1) + aws-sdk-sagemaker (~> 1) + aws-sdk-sagemakeredgemanager (~> 1) + aws-sdk-sagemakerfeaturestoreruntime (~> 1) + aws-sdk-sagemakergeospatial (~> 1) + aws-sdk-sagemakermetrics (~> 1) + aws-sdk-sagemakerruntime (~> 1) + aws-sdk-savingsplans (~> 1) + aws-sdk-scheduler (~> 1) + aws-sdk-schemas (~> 1) + aws-sdk-secretsmanager (~> 1) + aws-sdk-securityhub (~> 1) + aws-sdk-securitylake (~> 1) + aws-sdk-serverlessapplicationrepository (~> 1) + aws-sdk-servicecatalog (~> 1) + aws-sdk-servicediscovery (~> 1) + aws-sdk-servicequotas (~> 1) + aws-sdk-ses (~> 1) + aws-sdk-sesv2 (~> 1) + aws-sdk-shield (~> 1) + aws-sdk-signer (~> 1) + aws-sdk-simpledb (~> 1) + aws-sdk-simspaceweaver (~> 1) + aws-sdk-sms (~> 1) + aws-sdk-snowball (~> 1) + aws-sdk-snowdevicemanagement (~> 1) + aws-sdk-sns (~> 1) + aws-sdk-sqs (~> 1) + aws-sdk-ssm (~> 1) + aws-sdk-ssmcontacts (~> 1) + aws-sdk-ssmincidents (~> 1) + aws-sdk-ssmsap (~> 1) + aws-sdk-ssoadmin (~> 1) + aws-sdk-states (~> 1) + aws-sdk-storagegateway (~> 1) + aws-sdk-support (~> 1) + aws-sdk-supportapp (~> 1) + aws-sdk-swf (~> 1) + aws-sdk-synthetics (~> 1) + aws-sdk-textract (~> 1) + aws-sdk-timestreamquery (~> 1) + aws-sdk-timestreamwrite (~> 1) + aws-sdk-tnb (~> 1) + aws-sdk-transcribeservice (~> 1) + aws-sdk-transcribestreamingservice (~> 1) + aws-sdk-transfer (~> 1) + aws-sdk-translate (~> 1) + aws-sdk-voiceid (~> 1) + aws-sdk-vpclattice (~> 1) + aws-sdk-waf (~> 1) + aws-sdk-wafregional (~> 1) + aws-sdk-wafv2 (~> 1) + aws-sdk-wellarchitected (~> 1) + aws-sdk-workdocs (~> 1) + aws-sdk-worklink (~> 1) + aws-sdk-workmail (~> 1) + aws-sdk-workmailmessageflow (~> 1) + aws-sdk-workspaces (~> 1) + aws-sdk-workspacesweb (~> 1) + aws-sdk-xray (~> 1) + aws-sdk-robomaker (1.53.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-rolesanywhere (1.2.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-route53 (1.71.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-route53domains (1.43.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-route53recoverycluster (1.13.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-route53recoverycontrolconfig (1.13.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-route53recoveryreadiness (1.12.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-route53resolver (1.41.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-s3 (1.122.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sdk-kms (~> 1) + aws-sigv4 (~> 1.4) + aws-sdk-s3control (1.43.0) + aws-sdk-core (~> 3, >= 3.122.0) + aws-sigv4 (~> 1.1) + aws-sdk-s3outposts (1.17.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-sagemaker (1.178.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-sagemakeredgemanager (1.14.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-sagemakerfeaturestoreruntime (1.16.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-sagemakergeospatial (1.2.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-sagemakermetrics (1.2.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-sagemakerruntime (1.49.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-savingsplans (1.28.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-scheduler (1.2.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-schemas (1.25.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-secretsmanager (1.46.0) + aws-sdk-core (~> 3, >= 3.112.0) + aws-sigv4 (~> 1.1) + aws-sdk-securityhub (1.81.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-securitylake (1.3.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-serverlessapplicationrepository (1.46.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-servicecatalog (1.60.0) + aws-sdk-core (~> 3, >= 3.112.0) + aws-sigv4 (~> 1.1) + aws-sdk-servicediscovery (1.51.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-servicequotas (1.25.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-ses (1.41.0) + aws-sdk-core (~> 3, >= 3.120.0) + aws-sigv4 (~> 1.1) + aws-sdk-sesv2 (1.32.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-shield (1.51.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-signer (1.32.0) + aws-sdk-core (~> 3, >= 3.120.0) + aws-sigv4 (~> 1.1) + aws-sdk-simpledb (1.29.0) + aws-sdk-core (~> 3, >= 3.120.0) + aws-sigv2 (~> 1.0) + aws-sdk-simspaceweaver (1.2.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-sms (1.43.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-snowball (1.54.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-snowdevicemanagement (1.9.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-sns (1.60.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-sqs (1.55.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-ssm (1.150.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-ssmcontacts (1.17.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-ssmincidents (1.22.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-ssmsap (1.3.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-ssoadmin (1.23.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-states (1.39.0) + aws-sdk-core (~> 3, >= 3.112.0) + aws-sigv4 (~> 1.1) + aws-sdk-storagegateway (1.70.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-support (1.46.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-supportapp (1.4.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-swf (1.40.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-synthetics (1.19.0) + aws-sdk-core (~> 3, >= 3.121.2) + aws-sigv4 (~> 1.1) + aws-sdk-textract (1.45.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-timestreamquery (1.18.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-timestreamwrite (1.17.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-tnb (1.1.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-transcribeservice (1.82.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-transcribestreamingservice (1.46.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-transfer (1.34.0) + aws-sdk-core (~> 3, >= 3.112.0) + aws-sigv4 (~> 1.1) + aws-sdk-translate (1.50.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-voiceid (1.12.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-vpclattice (1.1.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-waf (1.43.0) + aws-sdk-core (~> 3, >= 3.122.0) + aws-sigv4 (~> 1.1) + aws-sdk-wafregional (1.50.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-wafv2 (1.56.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-wellarchitected (1.22.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-workdocs (1.44.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-worklink (1.35.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-workmail (1.53.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-workmailmessageflow (1.23.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-workspaces (1.80.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-workspacesweb (1.8.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sdk-xray (1.52.0) + aws-sdk-core (~> 3, >= 3.165.0) + aws-sigv4 (~> 1.1) + aws-sigv2 (1.1.0) + aws-sigv4 (1.5.2) + aws-eventstream (~> 1, >= 1.0.2) + azure_graph_rbac (0.17.2) + ms_rest_azure (~> 0.12.0) + azure_mgmt_key_vault (0.17.7) + ms_rest_azure (~> 0.12.0) + azure_mgmt_resources (0.18.2) + ms_rest_azure (~> 0.12.0) + azure_mgmt_security (0.19.0) + ms_rest_azure (~> 0.12.0) + azure_mgmt_storage (0.23.0) + ms_rest_azure (~> 0.12.0) + bcrypt_pbkdf (1.1.0) + bson (4.15.0) + builder (3.2.4) + chef-telemetry (1.1.1) + chef-config + concurrent-ruby (~> 1.0) + coderay (1.1.3) + concurrent-ruby (1.2.2) + cookstyle (7.32.2) + rubocop (= 1.25.1) + declarative (0.0.20) + delegate (0.3.0) + diff-lcs (1.5.0) + docker-api (2.2.0) + excon (>= 0.47.0) + multi_json + domain_name (0.5.20190701) + unf (>= 0.0.5, < 1.0.0) + dry-configurable (1.0.1) + dry-core (~> 1.0, < 2) + zeitwerk (~> 2.6) + dry-core (1.0.0) + concurrent-ruby (~> 1.0) + zeitwerk (~> 2.6) + dry-inflector (1.0.0) + dry-initializer (3.1.1) + dry-logic (1.5.0) + concurrent-ruby (~> 1.0) + dry-core (~> 1.0, < 2) + zeitwerk (~> 2.6) + dry-schema (1.13.1) + concurrent-ruby (~> 1.0) + dry-configurable (~> 1.0, >= 1.0.1) + dry-core (~> 1.0, < 2) + dry-initializer (~> 3.0) + dry-logic (>= 1.4, < 2) + dry-types (>= 1.7, < 2) + zeitwerk (~> 2.6) + dry-types (1.7.1) + concurrent-ruby (~> 1.0) + dry-core (~> 1.0) + dry-inflector (~> 1.0) + dry-logic (~> 1.4) + zeitwerk (~> 2.6) + dry-validation (1.10.0) + concurrent-ruby (~> 1.0) + dry-core (~> 1.0, < 2) + dry-initializer (~> 3.0) + dry-schema (>= 1.12, < 2) + zeitwerk (~> 2.6) + ed25519 (1.3.0) + erubi (1.12.0) + excon (0.99.0) + faraday (1.10.3) + faraday-em_http (~> 1.0) + faraday-em_synchrony (~> 1.0) + faraday-excon (~> 1.1) + faraday-httpclient (~> 1.0) + faraday-multipart (~> 1.0) + faraday-net_http (~> 1.0) + faraday-net_http_persistent (~> 1.0) + faraday-patron (~> 1.0) + faraday-rack (~> 1.0) + faraday-retry (~> 1.0) + ruby2_keywords (>= 0.0.4) + faraday-cookie_jar (0.0.7) + faraday (>= 0.8.0) + http-cookie (~> 1.0.0) + faraday-em_http (1.0.0) + faraday-em_synchrony (1.0.0) + faraday-excon (1.1.0) + faraday-follow_redirects (0.3.0) + faraday (>= 1, < 3) + faraday-httpclient (1.0.1) + faraday-multipart (1.0.4) + multipart-post (~> 2) + faraday-net_http (1.0.1) + faraday-net_http_persistent (1.2.0) + faraday-patron (1.0.0) + faraday-rack (1.0.0) + faraday-retry (1.0.3) + faraday_middleware (1.0.0) + faraday (~> 1.0) + ffi (1.15.5) + fuzzyurl (0.9.0) + google-api-client (0.52.0) + addressable (~> 2.5, >= 2.5.1) + googleauth (~> 0.9) + httpclient (>= 2.8.1, < 3.0) + mini_mime (~> 1.0) + representable (~> 3.0) + retriable (>= 2.0, < 4.0) + rexml + signet (~> 0.12) + googleauth (0.14.0) + faraday (>= 0.17.3, < 2.0) + jwt (>= 1.4, < 3.0) + memoist (~> 0.16) + multi_json (~> 1.11) + os (>= 0.9, < 2.0) + signet (~> 0.14) + gssapi (1.3.1) + ffi (>= 1.0.1) + gyoku (1.4.0) + builder (>= 2.1.2) + rexml (~> 3.0) + hashie (4.1.0) + highline (2.1.0) + http-cookie (1.0.5) + domain_name (~> 0.5) + httpclient (2.8.3) + i18n (1.13.0) + concurrent-ruby (~> 1.0) + inifile (3.0.0) + io-console (0.6.0) + irb (1.6.4) + reline (>= 0.3.0) + jmespath (1.6.2) + json (2.6.3) + jwt (2.7.0) + kitchen-terraform (7.0.2) + delegate (~> 0.3.0) + dry-validation (~> 1.6) + inspec (~> 5.21, >= 5.21.29) + json (~> 2.3) + test-kitchen (>= 2.1, < 4.0) + tty-which (~> 0.5.0) + license-acceptance (2.1.13) + pastel (~> 0.7) + tomlrb (>= 1.2, < 3.0) + tty-box (~> 0.6) + tty-prompt (~> 0.20) + little-plugger (1.1.4) + logging (2.3.1) + little-plugger (~> 1.1) + multi_json (~> 1.14) + memoist (0.16.2) + method_source (1.0.0) + mini_mime (1.1.2) + minitest (5.18.0) + mixlib-config (3.0.27) + tomlrb + mixlib-log (3.0.9) + mixlib-shellout (3.2.7) + chef-utils + mongo (2.13.2) + bson (>= 4.8.2, < 5.0.0) + ms_rest (0.7.6) + concurrent-ruby (~> 1.0) + faraday (>= 0.9, < 2.0.0) + timeliness (~> 0.3.10) + ms_rest_azure (0.12.0) + concurrent-ruby (~> 1.0) + faraday (>= 0.9, < 2.0.0) + faraday-cookie_jar (~> 0.0.6) + ms_rest (~> 0.7.6) + multi_json (1.15.0) + multipart-post (2.3.0) + net-scp (4.0.0) + net-ssh (>= 2.6.5, < 8.0.0) + net-ssh (7.1.0) + net-ssh-gateway (2.0.0) + net-ssh (>= 4.0.0) + nori (2.6.0) + options (2.3.2) + os (1.1.4) + parallel (1.23.0) + parser (3.2.2.1) + ast (~> 2.4.1) + parslet (1.8.2) + pastel (0.8.0) + tty-color (~> 0.5) + progress_bar (1.3.3) + highline (>= 1.6, < 3) + options (~> 2.3.0) + pry (0.14.2) + coderay (~> 1.1) + method_source (~> 1.0) + public_suffix (5.0.1) + rainbow (3.1.1) + rake (13.0.6) + regexp_parser (2.8.0) + reline (0.3.3) + io-console (~> 0.5) + representable (3.2.0) + declarative (< 0.1.0) + trailblazer-option (>= 0.1.1, < 0.2.0) + uber (< 0.2.0) + retriable (3.1.2) + rexml (3.2.5) + rspec (3.11.0) + rspec-core (~> 3.11.0) + rspec-expectations (~> 3.11.0) + rspec-mocks (~> 3.11.0) + rspec-core (3.11.0) + rspec-support (~> 3.11.0) + rspec-expectations (3.11.1) + diff-lcs (>= 1.2.0, < 2.0) + rspec-support (~> 3.11.0) + rspec-its (1.3.0) + rspec-core (>= 3.0.0) + rspec-expectations (>= 3.0.0) + rspec-mocks (3.11.2) + diff-lcs (>= 1.2.0, < 2.0) + rspec-support (~> 3.11.0) + rspec-retry (0.6.2) + rspec-core (> 3.3) + rspec-support (3.11.1) + rubocop (1.25.1) + parallel (~> 1.10) + parser (>= 3.1.0.0) + rainbow (>= 2.2.2, < 4.0) + regexp_parser (>= 1.8, < 3.0) + rexml + rubocop-ast (>= 1.15.1, < 2.0) + ruby-progressbar (~> 1.7) + unicode-display_width (>= 1.4.0, < 3.0) + rubocop-ast (1.28.1) + parser (>= 3.2.1.0) + ruby-progressbar (1.13.0) + ruby2_keywords (0.0.5) + rubyntlm (0.6.3) + rubyzip (2.3.2) + semverse (3.0.2) + signet (0.17.0) + addressable (~> 2.8) + faraday (>= 0.17.5, < 3.a) + jwt (>= 1.5, < 3.0) + multi_json (~> 1.10) + sslshake (1.3.1) + strings (0.2.1) + strings-ansi (~> 0.2) + unicode-display_width (>= 1.5, < 3.0) + unicode_utils (~> 1.4) + strings-ansi (0.2.0) + test-kitchen (3.5.0) + bcrypt_pbkdf (~> 1.0) + chef-utils (>= 16.4.35) + ed25519 (~> 1.2) + license-acceptance (>= 1.0.11, < 3.0) + mixlib-install (~> 3.6) + mixlib-shellout (>= 1.2, < 4.0) + net-scp (>= 1.1, < 5.0) + net-ssh (>= 2.9, < 8.0) + net-ssh-gateway (>= 1.2, < 3.0) + thor (>= 0.19, < 2.0) + winrm (~> 2.0) + winrm-elevated (~> 1.0) + winrm-fs (~> 1.1) + thor (1.2.2) + timeliness (0.3.10) + tomlrb (1.3.0) + trailblazer-option (0.1.2) + train (3.10.7) + activesupport (>= 6.0.3.1) + azure_graph_rbac (~> 0.16) + azure_mgmt_key_vault (~> 0.17) + azure_mgmt_resources (~> 0.15) + azure_mgmt_security (~> 0.18) + azure_mgmt_storage (~> 0.18) + docker-api (>= 1.26, < 3.0) + google-api-client (>= 0.23.9, <= 0.52.0) + googleauth (>= 0.6.6, <= 0.14.0) + inifile (~> 3.0) + train-core (= 3.10.7) + train-winrm (~> 0.2) + train-aws (0.2.24) + aws-sdk-alexaforbusiness (~> 1.0) + aws-sdk-amplify (~> 1.32.0) + aws-sdk-apigateway (~> 1.0) + aws-sdk-apigatewayv2 (~> 1.0) + aws-sdk-applicationautoscaling (>= 1.46, < 1.52) + aws-sdk-athena (~> 1.0) + aws-sdk-autoscaling (>= 1.22, < 1.64) + aws-sdk-batch (>= 1.36, < 1.48) + aws-sdk-budgets (~> 1.0) + aws-sdk-cloudformation (~> 1.0) + aws-sdk-cloudfront (~> 1.0) + aws-sdk-cloudhsm (~> 1.0) + aws-sdk-cloudhsmv2 (~> 1.0) + aws-sdk-cloudtrail (~> 1.8) + aws-sdk-cloudwatch (~> 1.13) + aws-sdk-cloudwatchevents (>= 1.36, < 1.47) + aws-sdk-cloudwatchlogs (~> 1.13) + aws-sdk-codecommit (~> 1.0) + aws-sdk-codedeploy (~> 1.0) + aws-sdk-codepipeline (~> 1.0) + aws-sdk-cognitoidentity (>= 1.26, < 1.32) + aws-sdk-cognitoidentityprovider (>= 1.46, < 1.54) + aws-sdk-configservice (~> 1.21) + aws-sdk-core (~> 3.0) + aws-sdk-costandusagereportservice (~> 1.6) + aws-sdk-databasemigrationservice (>= 1.42, < 1.54) + aws-sdk-dynamodb (~> 1.31) + aws-sdk-ec2 (~> 1.70) + aws-sdk-ecr (~> 1.18) + aws-sdk-ecrpublic (~> 1.3) + aws-sdk-ecs (~> 1.30) + aws-sdk-efs (~> 1.0) + aws-sdk-eks (~> 1.9) + aws-sdk-elasticache (~> 1.0) + aws-sdk-elasticbeanstalk (~> 1.0) + aws-sdk-elasticloadbalancing (~> 1.8) + aws-sdk-elasticloadbalancingv2 (~> 1.0) + aws-sdk-elasticsearchservice (~> 1.0) + aws-sdk-emr (~> 1.53.0) + aws-sdk-eventbridge (~> 1.24.0) + aws-sdk-firehose (~> 1.0) + aws-sdk-glue (>= 1.71, < 1.89) + aws-sdk-guardduty (~> 1.31) + aws-sdk-iam (~> 1.13) + aws-sdk-kafka (~> 1.0) + aws-sdk-kinesis (~> 1.0) + aws-sdk-kms (~> 1.13) + aws-sdk-lambda (~> 1.0) + aws-sdk-mq (~> 1.40.0) + aws-sdk-networkfirewall (>= 1.6.0) + aws-sdk-networkmanager (>= 1.13.0) + aws-sdk-organizations (>= 1.17, < 1.60) + aws-sdk-ram (>= 1.21, < 1.27) + aws-sdk-rds (~> 1.43) + aws-sdk-redshift (~> 1.0) + aws-sdk-route53 (~> 1.0) + aws-sdk-route53domains (~> 1.0) + aws-sdk-route53resolver (~> 1.0) + aws-sdk-s3 (~> 1.30) + aws-sdk-s3control (~> 1.43.0) + aws-sdk-secretsmanager (>= 1.42, < 1.47) + aws-sdk-securityhub (~> 1.0) + aws-sdk-servicecatalog (>= 1.48, < 1.61) + aws-sdk-ses (~> 1.41.0) + aws-sdk-shield (~> 1.30) + aws-sdk-signer (~> 1.32.0) + aws-sdk-simpledb (~> 1.29.0) + aws-sdk-sms (~> 1.0) + aws-sdk-sns (~> 1.9) + aws-sdk-sqs (~> 1.10) + aws-sdk-ssm (~> 1.0) + aws-sdk-states (>= 1.35, < 1.40) + aws-sdk-synthetics (~> 1.19.0) + aws-sdk-transfer (>= 1.26, < 1.35) + aws-sdk-waf (~> 1.43.0) + train-core (3.10.7) + addressable (~> 2.5) + ffi (!= 1.13.0) + json (>= 1.8, < 3.0) + mixlib-shellout (>= 2.0, < 4.0) + net-scp (>= 1.2, < 5.0) + net-ssh (>= 2.9, < 8.0) + train-habitat (0.2.22) + train-winrm (0.2.13) + winrm (>= 2.3.6, < 3.0) + winrm-elevated (~> 1.2.2) + winrm-fs (~> 1.0) + tty-box (0.7.0) + pastel (~> 0.8) + strings (~> 0.2.0) + tty-cursor (~> 0.7) + tty-color (0.6.0) + tty-cursor (0.7.1) + tty-prompt (0.23.1) + pastel (~> 0.8) + tty-reader (~> 0.8) + tty-reader (0.9.0) + tty-cursor (~> 0.7) + tty-screen (~> 0.8) + wisper (~> 2.0) + tty-screen (0.8.1) + tty-table (0.12.0) + pastel (~> 0.8) + strings (~> 0.2.0) + tty-screen (~> 0.8) + tty-which (0.5.0) + tzinfo (2.0.6) + concurrent-ruby (~> 1.0) + uber (0.1.0) + unf (0.1.4) + unf_ext + unicode-display_width (2.4.2) + unicode_utils (1.4.0) + winrm (2.3.6) + builder (>= 2.1.2) + erubi (~> 1.8) + gssapi (~> 1.2) + gyoku (~> 1.0) + httpclient (~> 2.2, >= 2.2.0.2) + logging (>= 1.6.1, < 3.0) + nori (~> 2.0) + rubyntlm (~> 0.6.0, >= 0.6.3) + winrm-elevated (1.2.3) + erubi (~> 1.8) + winrm (~> 2.0) + winrm-fs (~> 1.0) + winrm-fs (1.3.5) + erubi (~> 1.8) + logging (>= 1.6.1, < 3.0) + rubyzip (~> 2.0) + winrm (~> 2.0) + wisper (2.0.1) + zeitwerk (2.6.8) + +PLATFORMS + x86_64-linux + +DEPENDENCIES + activesupport! + aws-sdk (~> 3)! + chef-config! + chef-utils! + cinc-auditor-bin! + inspec (~> 5.21)! + inspec-core! + irb! + kitchen-terraform (~> 7.0)! + mixlib-install! + mixlib-versioning! + rspec-retry! + test-kitchen! + unf_ext! + +BUNDLED WITH + 2.4.13 diff --git a/e2e-python/files/Makefile b/e2e-python/files/Makefile new file mode 100644 index 000000000..f829aae54 --- /dev/null +++ b/e2e-python/files/Makefile @@ -0,0 +1,184 @@ +NAME := AWS Quickstarter +DESCRIPTION := The '$(NAME)' is a is a prototype for an ODS quickstarter + +PWD := $(shell dirname $(realpath $(lastword $(MAKEFILE_LIST)))) +GEMS_HOME ?= $(PWD)/vendor/bundle +INSTALL_REPORT_HOME := ./reports/install +SHELL := /usr/bin/env bash +.SHELLFLAGS := -eu -o pipefail -c +.DELETE_ON_ERROR: +MAKEFLAGS += --warn-undefined-variables +MAKEFLAGS += --no-builtin-rules + +TF_WORKSPACE = default + +# tfenv hack +DEBUG := 0 + +# Statefile Parameters +ACCOUNT_ID := $(shell aws sts get-caller-identity --query 'Account' --output text) +TF_BACKEND_S3KEY_MOD := $(shell echo "$(TF_BACKEND_S3KEY)" | sed "s/\//-/g") +TF_BACKEND_S3KEY_MOD := $(shell echo "$(TF_BACKEND_S3KEY_MOD)" | sed "s/-/\//") + +TFSTATE_BUCKET := $(ACCOUNT_ID)-terraform-state-bucket +TFSTATE_KEY := $(TF_BACKEND_S3KEY_MOD)-terraform-state +TFSTATE_TABLE := $(ACCOUNT_ID)-terraform-state-lock-table + + +.PHONY: default +default: test + +.PHONY: all +all: test plan deploy deployment-test describe + +.PHONY: init +# Initialize project. +init: install-dev-deps install-test-deps + +.PHONY: create-tfvars +# create terraform.tfvars.json +create-tfvars: + terraform-docs json . | jq '.inputs | map({ (.name): .default }) | add' > terraform.tfvars.json + +.PHONY: prep-test +prep-test: + pre-commit run terraformcreatei2o -a + pre-commit run terraformstackmoduleoutputs -a + +.PHONY: test +# Run (pre-deployment) tests. +test: install-test-deps + @$(call check_aws_credentials) + + # output aws account and user id for testing + aws sts get-caller-identity --output text | tee $(INSTALL_REPORT_HOME)/aws_testing_account.log + + # Remove any previously created Terraform test artefacts. + for dir in .terraform terraform.tfstate.d; do \ + find test/fixtures -name $$dir -print0 | xargs -0 rm -rf; \ + done \ + + inspec_profiles=$$(ls -1 ./test/integration); \ + for fdir in $$inspec_profiles; do \ + mkdir -p test/integration/$$fdir/files ; \ + ./.venv/bin/python3 ./.venv/bin/hcl2tojson test/fixtures/$$fdir/main.tf test/integration/$$fdir/files/main.json; \ + done \ + + # See https://github.com/test-kitchen/test-kitchen/issues/1436 for why a simple `bundle exec kitchen test` is not an option. + for suite in $$(bundle exec kitchen list --bare); do \ + bundle exec kitchen verify $$suite || { bundle exec kitchen destroy $$suite; exit 1; }; \ + bundle exec kitchen destroy $$suite; \ + done + +.PHONY: plan +# Plan infrastructure deployment. +plan: init-terraform + @$(call check_aws_credentials) + + TF_IN_AUTOMATION=1 TF_WORKSPACE="$(TF_WORKSPACE)" terraform plan -input=false -out=tfplan + +.PHONY: deploy +# Deploy infrastructure. +deploy: init-terraform plan + @$(call check_aws_credentials) + + # output aws account and user id for testing + aws sts get-caller-identity --output text | tee $(INSTALL_REPORT_HOME)/aws_deploy_account.log + + TF_IN_AUTOMATION=1 TF_WORKSPACE="$(TF_WORKSPACE)" terraform apply -auto-approve -input=false -no-color tfplan | tee "$(INSTALL_REPORT_HOME)/tf_apply.log" + @TF_IN_AUTOMATION=1 TF_WORKSPACE="$(TF_WORKSPACE)" terraform show -no-color -json | tee "$(INSTALL_REPORT_HOME)/tf_show.log" 1>/dev/null + +.PHONY: deployment-test +# Run (post-deployment) tests. +deployment-test: install-test-deps + @$(call check_aws_credentials) + + sh ./lib/scripts/createstackoutputs2yml.sh + inspec_profiles=$$(ls -1 ./test/integration); \ + for profile in $$inspec_profiles; do \ + JSON_VARS_FILE="$(PWD)/terraform.tfvars.json" bundle exec cinc-auditor exec ./test/integration/$$profile --no-create-lockfile --no-distinct-exit --input-file ./test/integration/$$profile/files/inputs-from-tfo-stack.yml --reporter=cli junit2:build/test-results/test/$$profile.xml json:reports/install/data/inspec/post-install/$$profile.json --target aws://; \ + done + +.PHONY: install-report +install-report: + awk '/Creation complete/ && !/terraform-data/ {print}' "$(INSTALL_REPORT_HOME)/tf_apply.log" > $(INSTALL_REPORT_HOME)/tf_created.log + +.PHONY: describe +# Describe infrastructure. +describe: init-terraform + TF_IN_AUTOMATION=1 TF_WORKSPACE="$(TF_WORKSPACE)" terraform output -json | tee outputs.json + +.PHONY: destroy +# Destroy infrastructure. +destroy: init-terraform + @$(call check_aws_credentials) + + TF_IN_AUTOMATION=1 TF_WORKSPACE="$(TF_WORKSPACE)" terraform destroy -auto-approve + +.PHONY: install-dev-deps +# Install development dependencies. +install-dev-deps: install-git-pre-commit-hooks + +.PHONY: install-git-pre-commit-hooks +# Install Git pre-commit hooks. +install-git-pre-commit-hooks: + pre-commit install --overwrite + +.PHONY: install-ruby-gems +# Install Ruby gems specified in Gemfile. +install-ruby-gems: + BUNDLE_SILENCE_ROOT_WARNING=true bundle config --local path $(GEMS_HOME) + # see https://github.com/rubygems/rubygems/issues/4466 to get rid of error messages in Jenkins + BUNDLE_SILENCE_ROOT_WARNING=true TMPDIR=./vendor/tmp bundle install --jobs=8 + +.PHONY: install-python-env +# Install python virtual environment based on Pipfile +install-python-env: + CI=true PIPENV_VENV_IN_PROJECT=true pipenv install + +.PHONY: init-terraform +# Install Terraform workspace. +init-terraform: + + @echo "Bucket: ${TFSTATE_BUCKET}" + @echo "Key : ${TFSTATE_KEY}" + @echo "Table : ${TFSTATE_TABLE}" + + echo 1 | terraform init -backend-config="bucket=$(TFSTATE_BUCKET)" -backend-config="key=$(TFSTATE_KEY)" -backend-config="dynamodb_table=$(TFSTATE_TABLE)" -force-copy -input=false + +.PHONY: install-test-deps +# Install testing dependencies. +install-test-deps: install-ruby-gems install-python-env + +.PHONY: cinc-auditor-test +# run cinc-auditor without use of kitchen-terraform and create yaml for mapping terraform outputs to inspec inputs. +cinc-auditor-test: + sh ./lib/scripts/createstackfixtureoutputs2yml.sh + bundle exec cinc-auditor exec test/integration/default --no-create-lockfile --no-distinct-exit --input-file ./test/integration/default/files/inputs-from-tfo-stack.yml --target aws:// + +.PHONY: clean +# Reset Working directory (take care if something has deployed upfront) +clean: + @rm -rf .kitchen/ + @rm -rf test/fixtures/default/terraform.tfstate.d/ + @rm -rf test/fixtures/default/.terraform/ + @rm -f test/fixtures/default/.terraform.lock.hcl + +.PHONY: check-config +# Do some basic verification of configuration files and accounts +check-config: + @sh ./lib/scripts/aws/check_conf.sh + +# Checks AWS account +check_aws_credentials = \ + exitStatus=0; \ + if [ -v AWS_ACCESS_KEY_ID ] && [ -v AWS_SECRET_ACCESS_KEY ]; then \ + echo "Info: using AWS environment variables AWS_ACCESS_KEY_ID & AWS_SECRET_ACCESS_KEY ..."; \ + else \ + aws sts get-caller-identity &> /dev/null || exitStatus=$$?; \ + if [ $$exitStatus = 0 ]; then \ + echo "Info: using alternate credentials (e.g. AWS SSO) ..."; \ + else \ + echo "Error: No AWS credentials specified ..."; exit 1; \ + fi \ + fi diff --git a/e2e-python/files/Pipfile b/e2e-python/files/Pipfile new file mode 100644 index 000000000..b679ceaa2 --- /dev/null +++ b/e2e-python/files/Pipfile @@ -0,0 +1,14 @@ +[[source]] +name = "pypi" +url = "https://pypi.org/simple" +verify_ssl = true + +[dev-packages] + +[packages] +python-hcl2 = "~=2.0" +boto3 = "~=1.26" +yq = ">2" + +[requires] +python_version = "3" diff --git a/e2e-python/files/backend.tf b/e2e-python/files/backend.tf new file mode 100644 index 000000000..824d6152d --- /dev/null +++ b/e2e-python/files/backend.tf @@ -0,0 +1,5 @@ +terraform { + backend "s3" { + region = "eu-west-1" + } +} diff --git a/e2e-python/files/cfn-templates/.gitkeep b/e2e-python/files/cfn-templates/.gitkeep new file mode 100644 index 000000000..e69de29bb diff --git a/e2e-python/files/cfn-templates/cfs3.json b/e2e-python/files/cfn-templates/cfs3.json new file mode 100644 index 000000000..374d38831 --- /dev/null +++ b/e2e-python/files/cfn-templates/cfs3.json @@ -0,0 +1,29 @@ +{ + "AWSTemplateFormatVersion": "2010-09-09", + "Description": "CloudFormation template of an S3 bucket for the AWS Quickstarter.", + "Outputs": { + "S3BucketName": { + "Description": "Bucket Created using this template.", + "Value": { + "Ref": "S3Bucket" + } + } + }, + "Resources": { + "S3Bucket": { + "Properties": { + "AccessControl": "Private", + "BucketEncryption": { + "ServerSideEncryptionConfiguration": [ + { + "ServerSideEncryptionByDefault": { + "SSEAlgorithm": "AES256" + } + } + ] + } + }, + "Type": "AWS::S3::Bucket" + } + } +} diff --git a/e2e-python/files/common-tags.tf b/e2e-python/files/common-tags.tf new file mode 100644 index 000000000..56a0f42df --- /dev/null +++ b/e2e-python/files/common-tags.tf @@ -0,0 +1,5 @@ +locals { + common_tags = { + Environment = upper(var.meta_environment) + } +} diff --git a/e2e-python/files/environments/dev.json b/e2e-python/files/environments/dev.json new file mode 100644 index 000000000..e9a0fd4b0 --- /dev/null +++ b/e2e-python/files/environments/dev.json @@ -0,0 +1,13 @@ +{ + "meta_environment" : "DEVELOPMENT", + + "codebuild_project_name" : "codebuild-project", + "codepipeline_name" : "test-codepipeline", + "codepipeline_bucket_name" : "cpplartifacts", + "bitbucket_source_bucket_name" : "src-bitbucket", + "e2e_results_bucket_name" : "test-results", + "pipeline_role_name" : "test-codePipelineRole", + "codebuild_role_name" : "test-codeBuildRole", + "codepipeline_policy_name" : "codepipeline_policy", + "codebuild_policy_name" : "codebuild_policy" +} diff --git a/e2e-python/files/environments/prod.json b/e2e-python/files/environments/prod.json new file mode 100644 index 000000000..1952c66d4 --- /dev/null +++ b/e2e-python/files/environments/prod.json @@ -0,0 +1,13 @@ +{ + "meta_environment" : "PRODUCTIVE", + + "codebuild_project_name" : "codebuild-project", + "codepipeline_name" : "test-codepipeline", + "codepipeline_bucket_name" : "cpplartifacts", + "bitbucket_source_bucket_name" : "src-bitbucket", + "e2e_results_bucket_name" : "test-results", + "pipeline_role_name" : "test-codePipelineRole", + "codebuild_role_name" : "test-codeBuildRole", + "codepipeline_policy_name" : "codepipeline_policy", + "codebuild_policy_name" : "codebuild_policy" +} diff --git a/e2e-python/files/environments/test.json b/e2e-python/files/environments/test.json new file mode 100644 index 000000000..db57fb13f --- /dev/null +++ b/e2e-python/files/environments/test.json @@ -0,0 +1,13 @@ +{ + "meta_environment" : "QUALITYASSURANCE", + + "codebuild_project_name" : "codebuild-project", + "codepipeline_name" : "test-codepipeline", + "codepipeline_bucket_name" : "cpplartifacts", + "bitbucket_source_bucket_name" : "src-bitbucket", + "e2e_results_bucket_name" : "test-results", + "pipeline_role_name" : "test-codePipelineRole", + "codebuild_role_name" : "test-codeBuildRole", + "codepipeline_policy_name" : "codepipeline_policy", + "codebuild_policy_name" : "codebuild_policy" +} diff --git a/e2e-python/files/inputs2outputs.tf b/e2e-python/files/inputs2outputs.tf new file mode 100644 index 000000000..13ae9729f --- /dev/null +++ b/e2e-python/files/inputs2outputs.tf @@ -0,0 +1,11 @@ +# This file has been created automatically. +# terraform variables are passed to outputs. +# Following variable names are skipped: '.*[password|secret].*'. + +output "inputs2outputs" { + description = "all inputs passed to outputs" + value = [{ + meta_environment = var.meta_environment + name = var.name + }] +} diff --git a/e2e-python/files/kitchen.yml b/e2e-python/files/kitchen.yml new file mode 100644 index 000000000..bfc77c464 --- /dev/null +++ b/e2e-python/files/kitchen.yml @@ -0,0 +1,29 @@ +driver: + name: terraform + command_timeout: 5400 + +provisioner: + name: terraform + +platforms: +- name: aws + +verifier: + name: terraform + +lifecycle: + pre_verify: + - local: mkdir -p test/integration/${KITCHEN_SUITE_NAME}/files + - local: ./.venv/bin/python3 ./.venv/bin/hcl2tojson test/fixtures/${KITCHEN_SUITE_NAME}/main.tf test/integration/${KITCHEN_SUITE_NAME}/files/main.json + +suites: +- name: default + driver: + root_module_directory: test/fixtures/default + verifier: + systems: + - name: aws + backend: aws + reporter: + - cli + - json:reports/install/data/inspec/pre-install/default.json diff --git a/e2e-python/files/lib/scripts/aws/check_conf.sh b/e2e-python/files/lib/scripts/aws/check_conf.sh new file mode 100644 index 000000000..84ab3cc54 --- /dev/null +++ b/e2e-python/files/lib/scripts/aws/check_conf.sh @@ -0,0 +1,122 @@ +#!/usr/bin/env bash +# +# Author: Erhard Wais +# erhard.wais@boehringer-ingelheim.com +# +# This script does some basic checks on the AWS QS and reports potential issues. +# It is triggered via "make check-config" + +# TODO: +# - Return error in case of missconfig + +set -e +set -o pipefail + +#CONST + +DEFAULTBUCKET="" +DEFAULTACCOUNT="" +DOTS="........................................................................." + +BUCKET= +ACCOUNT= +MESSAGE= +HASAWSCONFIGURED=0 + +# functions +function format_message() { + MESSAGE=$1 + local offset=${#MESSAGE} + MESSAGE="$MESSAGE${DOTS:offset:((${#DOTS} - offset))}" +} + +function ok() { + format_message "$1" + echo -e "$MESSAGE\033[42mPassed\033[0m" +} +function nok() { + format_message "$1" + echo -e "$MESSAGE\033[41mFailed\033[0m" +} +function warn() { + format_message "$1" + echo -e "$MESSAGE\033[44m Warn \033[0m" +} +function note() { + format_message "$1" + echo -e "$MESSAGE" +} + +function check_backend() { + BUCKET="$ACCOUNT-terraform-state-bucket" + if [ -n "$BUCKET" ]; then + if [ "$BUCKET" = "$DEFAULTBUCKET" ]; then + nok "TF Backend is not configured. Check your backend.tf file" + else + ok "TF Backend is set to \"$BUCKET\"" + fi + else + nok "TF Backend is not specified. Update your backend.tf file" + fi +} + +function check_env() { + local envaccount=$(grep "account" environments/"$1".yml | awk -F ':' '{print $2}'|tr -d '"'|xargs) + if [ "$envaccount" = "$DEFAULTACCOUNT" ]; then + warn "There is no account configured for the \"$1\" environment" + else + ok "Account \"$envaccount\" is configured for the \"$1\" environment" + fi +} + +function check_aws_credentials() { + local exitStatus=0 + local arn + local user + + if [ -v AWS_ACCESS_KEY_ID ] && [ -v AWS_SECRET_ACCESS_KEY ]; then + ok "AWS account specified using environment variables" + HASAWSCONFIGURED=1 + else + aws sts get-caller-identity &> /dev/null || exitStatus=$? + if [ $exitStatus = 0 ]; then + ok "AWS account configured using SSO" + HASAWSCONFIGURED=1 + else + nok "No AWS account information specified for local development" + fi + fi + + # Check IAM user, Group and Policy + if [[ $HASAWSCONFIGURED = 1 ]]; then + arn=$(aws sts get-caller-identity --query "Arn" --output text) + arn=${arn:13} + ACCOUNT=${arn%:*} + user=${arn##*/} + + ok "Using \"$ACCOUNT:$user\"" + fi +} + +function check_backend_access() { + local exitStatus=0 + + if [ -n "$BUCKET" ] && [ "$BUCKET" != "$DEFAULTBUCKET" ]; then + if [[ "$HASAWSCONFIGURED" = 1 ]]; then + echo touch | aws s3 cp - s3://"$1"/"$2"/testaccess &> /dev/null || exitStatus=$? + if [ $exitStatus = 0 ]; then + ok "Configured AWS credentials have write access to TF Bucket" + else + warn "AWS credentials have no write access to TF Bucket" + fi + fi + fi +} + +# Rund different tests +check_env dev +check_env test +check_env prod +check_aws_credentials +check_backend +check_backend_access "$BUCKET" "$ACCOUNT" diff --git a/e2e-python/files/lib/scripts/createstackfixtureoutputs2yml.sh b/e2e-python/files/lib/scripts/createstackfixtureoutputs2yml.sh new file mode 100644 index 000000000..fced6fcd0 --- /dev/null +++ b/e2e-python/files/lib/scripts/createstackfixtureoutputs2yml.sh @@ -0,0 +1,37 @@ +#!/usr/bin/env bash +# +# Author: Josef Hartmann +# josef.hartmann@boehringer-ingelheim.com +# +# This script creates terraform json output, converts it to yaml. +# This yaml file is used for loading terraform outputs as cinc-auditor/inspec inputs using option --input-file= +# +# + +set -e +set -o pipefail + +if [ "x${KITCHEN_SUITE_NAME}" == "x" ]; then + echo "Not running within kitchen." + KITCHEN_SUITE_NAME="default" +fi + +CWD="$(cd -P -- "$(dirname -- "${BASH_SOURCE[0]}")/../.." && pwd -P)" +TOJFILE=${CWD}/test/integration/${KITCHEN_SUITE_NAME}/files/tf-stack-output.json +TOYFILE=${CWD}/test/integration/${KITCHEN_SUITE_NAME}/files/inputs-from-tfo-stack.yml + +pushd . + +# +# A TF_WORKSPACE might be applied by the environment. +# +cd ${CWD}/test/fixtures/default +terraform output -json > "${TOJFILE}" + +# Convert terraform json outputs to yaml. +# Do not use symbolize_names for keys ("id" -> :id). +# Symbolize_names is no longer required, as kitchen-terraform outputs are created as inspec inputs using this type for keys. +jq 'with_entries(.value |= .value)|with_entries(.key = "output_" + .key)' "${TOJFILE}" | \ + ruby -ryaml -rjson -e 'puts YAML.dump(JSON.parse(STDIN.read, :symbolize_names => false))' > "${TOYFILE}" + +popd diff --git a/e2e-python/files/lib/scripts/createstackoutputs2yml.sh b/e2e-python/files/lib/scripts/createstackoutputs2yml.sh new file mode 100644 index 000000000..88c30d319 --- /dev/null +++ b/e2e-python/files/lib/scripts/createstackoutputs2yml.sh @@ -0,0 +1,36 @@ +#!/usr/bin/env bash +# +# Author: Josef Hartmann +# josef.hartmann@boehringer-ingelheim.com +# +# This script creates terraform json output, converts it to yaml. +# This yaml file is used for loading terraform outputs as cinc-auditor/inspec inputs using option --input-file= +# +# + +set -e +set -o pipefail + +if [ "x${KITCHEN_SUITE_NAME}" == "x" ]; then + echo "Not running within kitchen." + KITCHEN_SUITE_NAME="default" +fi + +CWD="$(cd -P -- "$(dirname -- "${BASH_SOURCE[0]}")/../.." && pwd -P)" +TOJFILE=${CWD}/test/integration/${KITCHEN_SUITE_NAME}/files/tf-stack-output.json +TOYFILE=${CWD}/test/integration/${KITCHEN_SUITE_NAME}/files/inputs-from-tfo-stack.yml + +pushd . + +# +# A TF_WORKSPACE might be applied by the environment. +# +terraform output -json > "${TOJFILE}" + +# Convert terraform json outputs to yaml. +# Do not use symbolize_names for keys ("id" -> :id). +# Symbolize_names is no longer required, as kitchen-terraform outputs are created as inspec inputs using this type for keys. +jq 'with_entries(.value |= .value)|with_entries(.key = "output_" + .key)' "${TOJFILE}" | \ + ruby -ryaml -rjson -e 'puts YAML.dump(JSON.parse(STDIN.read, :symbolize_names => false))' > "${TOYFILE}" + +popd diff --git a/e2e-python/files/main.tf b/e2e-python/files/main.tf new file mode 100644 index 000000000..a758e80dc --- /dev/null +++ b/e2e-python/files/main.tf @@ -0,0 +1,74 @@ +locals { + account_id = data.aws_caller_identity.current.account_id + unique_name = var.name + + tags = merge(local.common_tags, { + DeploymentDate = formatdate("YYYYMMDD", timestamp()) + InitialDeploymentDate = time_static.deployment.rfc3339 + }) +} + +resource "time_static" "deployment" {} + +data "aws_region" "current" {} +data "aws_caller_identity" "current" {} + +module "codebuild_terraform" { + depends_on = [ module.iam_roles ] + source = "./modules/codebuild" + +# build_project_name = var.build_project_name +# environment_type = var.environment_type +# environment_image = var.environment_image +# image_pull_credentials_type = var.image_pull_credentials_type +# testing_project_name = var.testing_project_name + + codebuild_role_arn = module.iam_roles.codebuild_role_arn + codepipeline_bucket_name = module.s3_artifacts_bucket.cp_bucket_name + e2e_results_bucket_name = module.s3_artifacts_bucket.e2e_results_bucket_name + local_id = local.id + projectId = var.projectId + environment = var.environment +} + +module "codepipeline_terraform" { + + source = "./modules/codepipeline" + +# codepipeline_name = var.codepipeline_name + + codepipeline_bucket_name = module.s3_artifacts_bucket.cp_bucket_name + codepipeline_role_arn = module.iam_roles.codepipeline_role_arn + bitbucket_source_bucket_name = module.s3_artifacts_bucket.bitbucket_s3bucket_name + codebuild_project_name = module.codebuild_terraform.codebuild_project_name + + local_id = local.id + projectId = var.projectId + repository = var.repository + branch_name = var.branch_name +} + +module "iam_roles" { + source = "./modules/iam_roles" + +# pipeline_role_name = var.pipeline_role_name +# codebuild_role_name = var.codebuild_role_name +# codepipeline_policy_name = var.codepipeline_policy_name +# codebuild_policy_name = var.codebuild_policy_name + + local_id = local.id + projectId = var.projectId +} + +module "s3_artifacts_bucket" { + source = "./modules/s3-bucket" + +# codepipeline_bucket_name = var.codepipeline_bucket_name +# bitbucket_source_bucket_name = var.bitbucket_source_bucket_name +# e2e_results_bucket_name = var.codepipeline_bucket_name + + local_id = local.id + projectId = var.projectId +} + + diff --git a/e2e-python/files/metadata.yml b/e2e-python/files/metadata.yml new file mode 100644 index 000000000..cca8805ed --- /dev/null +++ b/e2e-python/files/metadata.yml @@ -0,0 +1,7 @@ +--- +name: e2e-python +# yamllint disable-line rule:line-length +description: "This end-to-end testing project was generated from the e2e-python ODS quickstarter." +supplier: https://es.python.org/ +version: 1.0 +type: ods-test diff --git a/e2e-python/files/modules/codebuild/main.tf b/e2e-python/files/modules/codebuild/main.tf new file mode 100644 index 000000000..dbfa2a1c6 --- /dev/null +++ b/e2e-python/files/modules/codebuild/main.tf @@ -0,0 +1,96 @@ + +resource "aws_codebuild_project" "build_project" { + name = "${var.projectId}-e2e-cb-${var.aws_region}-${var.codebuild_project_name}-${var.local_id}" //"CodeBuild-project-test" + service_role = var.codebuild_role_arn + build_timeout = var.build_timeout + + artifacts { + type = var.artifacts_type + } + + environment { + compute_type = var.environment_compute_type + image = var.environment_image + type = var.environment_type + image_pull_credentials_type = var.image_pull_credentials_type + + environment_variable { + name = "ENVIRONMENT" + value = var.environment + } + } + + + source { + type = var.source_type + report_build_status = var.report_build_status + buildspec = <<-EOT + version: 0.2 + + phases: + install: + runtime-versions: + python: ${var.env_version} + + pre_build: + commands: + - pip install -r requirements.txt + - npm install -g allure-commandline --save-dev + + build: + commands: + - python tests/acceptance/great_expectations/test_preparation/pre_requisites.py + - python utils/checkpoints_executions.py + - python tests/acceptance/great_expectations/test_preparation/post_requisites.py + - python -m pytest --alluredir=pytest/test_results/acceptance --junitxml=pytest/test_results/junit/acceptance_pytest_junit.xml tests/acceptance/pytest + - python -m pytest --alluredir=pytest/test_results/installation --junitxml=pytest/test_results/junit/installation_pytest_junit.xml tests/installation + - python -m pytest --alluredir=pytest/test_results/integration --junitxml=pytest/test_results/junit/integration_pytest_junit.xml tests/integration + + post_build: + commands: + - (cd tests/acceptance && great_expectations -y docs build) + - aws s3 cp tests/acceptance/great_expectations/uncommitted/data_docs/local_site s3://${var.e2e_results_bucket_name}/GX_test_results --recursive + - aws s3 cp tests/acceptance/great_expectations/uncommitted/validations s3://${var.e2e_results_bucket_name}/GX_jsons --recursive + - python utils/json2JUnit.py + + - aws s3 cp s3://${var.e2e_results_bucket_name}/pytest_results/acceptance/history pytest/test_results/acceptance/history --recursive + - aws s3 cp s3://${var.e2e_results_bucket_name}/pytest_results/installation/history pytest/test_results/installation/history --recursive + - aws s3 cp s3://${var.e2e_results_bucket_name}/pytest_results/integration/history pytest/test_results/integration/history --recursive + + - allure generate pytest/test_results/acceptance -o pytest/acceptance_allure_report --clean + - allure generate pytest/test_results/installation -o pytest/installation_allure_report --clean + - allure generate pytest/test_results/integration -o pytest/integration_allure_report --clean + + + - allure-combine pytest/acceptance_allure_report + - allure-combine pytest/installation_allure_report + - allure-combine pytest/integration_allure_report + + - aws s3 cp pytest/acceptance_allure_report/history s3://${var.e2e_results_bucket_name}/pytest_results/acceptance/history --recursive + - aws s3 cp pytest/installation_allure_report/history s3://${var.e2e_results_bucket_name}/pytest_results/installation/history --recursive + - aws s3 cp pytest/integration_allure_report/history s3://${var.e2e_results_bucket_name}/pytest_results/integration/history --recursive + + - aws s3 cp pytest/acceptance_allure_report/complete.html s3://${var.e2e_results_bucket_name}/pytest_results/acceptance/acceptance_allure_report_complete.html + - aws s3 cp pytest/installation_allure_report/complete.html s3://${var.e2e_results_bucket_name}/pytest_results/installation/installation_allure_report_complete.html + - aws s3 cp pytest/integration_allure_report/complete.html s3://${var.e2e_results_bucket_name}/pytest_results/integration/integration_allure_report_complete.html + + - aws s3 cp tests/acceptance/great_expectations/uncommitted/validations/junit.xml s3://${var.e2e_results_bucket_name}/junit/acceptance_GX_junit.xml + - aws s3 cp pytest/test_results/junit/acceptance_pytest_junit.xml s3://${var.e2e_results_bucket_name}/junit/acceptance_pytest_junit.xml + - aws s3 cp pytest/test_results/junit/integration_pytest_junit.xml s3://${var.e2e_results_bucket_name}/junit/integration_pytest_junit.xml + - aws s3 cp pytest/test_results/junit/installation_pytest_junit.xml s3://${var.e2e_results_bucket_name}/junit/installation_pytest_junit.xml + + reports: + GX_reports: + files: + - junit.xml + base-directory: tests/acceptance/great_expectations/uncommitted/validations/ + file-format: JUNITXML + Allure_report: + files: + - acceptance_pytest_junit.xml + base-directory: pytest/test_results/junit/ + file-format: JUNITXML + + EOT + } +} diff --git a/e2e-python/files/modules/codebuild/output.tf b/e2e-python/files/modules/codebuild/output.tf new file mode 100644 index 000000000..d2d239345 --- /dev/null +++ b/e2e-python/files/modules/codebuild/output.tf @@ -0,0 +1,12 @@ +output "codebuild_project_name" { + value = aws_codebuild_project.build_project.name + description = "Name of the CodeBuild project" +} +output "codebuild_project_arn" { + value = aws_codebuild_project.build_project.arn + description = "ARN of the CodeBuild project" +} +output "codebuild_project_id" { + value = aws_codebuild_project.build_project.id + description = "ID of the CodeBuild project" +} diff --git a/e2e-python/files/modules/codebuild/variables.tf b/e2e-python/files/modules/codebuild/variables.tf new file mode 100644 index 000000000..d8e75c53f --- /dev/null +++ b/e2e-python/files/modules/codebuild/variables.tf @@ -0,0 +1,117 @@ +variable "codebuild_project_name" { + description = "codebuild project name" + type = string + default = "codebuild-project" +} + +variable "codebuild_role_arn" { + description = "Codebuild IAM role arn. " + type = string +} + +variable "build_timeout" { + description = "Build Timeout" + type = number + default = 60 +} + +variable "artifacts_type" { + description = "type to store Artifacts" + type = string + default = "CODEPIPELINE" +} + +variable "environment_compute_type" { + description = "environment_compute_type" + type = string + default = "BUILD_GENERAL1_SMALL" +} + +variable "environment_image" { + description = "environment_image" + type = string + default = "aws/codebuild/standard:5.0" +} + +variable "environment_type" { + description = "environment_type" + type = string + default = "LINUX_CONTAINER" +} + +variable "image_pull_credentials_type" { + description = "image_pull_credentials_type" + type = string + default = "CODEBUILD" +} + +variable "source_type" { + description = "Artifacts_source_type" + type = string + default = "CODEPIPELINE" +} + +variable "env_version" { + type = string + default = "3.9" +} + +variable "report_build_status" { + description = "report_build_status" + type = bool + default = false +} + +variable "GXtest_project_name" { + description = "codebuild Great Expectation project name" + type = string + default = "GXtest-project" +} +variable "GX_reporting_project_name" { + description = "Great Expectations reporting project name" + type = string + default = "GX_reporting-project" +} +variable "Pytest_project_name" { + description = "Pytest testing project name" + type = string + default = "Pytest-project" +} +variable "Pytest_reporting_project_name" { + description = "Pytest reporting project name" + type = string + default = "Pytest_reporting-project" +} + +variable "codepipeline_bucket_name" { + description = "s3_bucket_name" + type = string +} + +variable "e2e_results_bucket_name" { + description = "s3_bucket_for_results_artifacts" + type = string +} + +variable "local_id" { + description = "id for unique s3buckets " + type = string +} + +variable "projectId" { + description = "EDP project name" + type = string + default = "testpg" +} + +variable "aws_region" { + description = "AWS infrastructure regio" + type = string + default = "eu-west-1" +} + +variable "environment" { + description = "The project execution environment." + type = string + default = "dev" +} diff --git a/e2e-python/files/modules/codepipeline/main.tf b/e2e-python/files/modules/codepipeline/main.tf new file mode 100644 index 000000000..f16ac6378 --- /dev/null +++ b/e2e-python/files/modules/codepipeline/main.tf @@ -0,0 +1,53 @@ + +provider "aws" { + region = var.aws_region +} + +resource "aws_codepipeline" "codepipeline" { + name = "${var.projectId}-e2e-cppl-${var.aws_region}-${var.codepipeline_name}-${var.local_id}" + role_arn = var.codepipeline_role_arn + + artifact_store { + type = var.artifacts_store_type + location = var.codepipeline_bucket_name + } + + stage { + name = "Source" + + action { + name = "Source" + category = "Source" + owner = "AWS" + provider = var.source_provider + version = "1" + output_artifacts = ["source_output"] + + configuration = { + S3Bucket = var.bitbucket_source_bucket_name + S3ObjectKey = "${var.repository}-${var.branch_name}.zip" + PollForSourceChanges = false + } + } + } + + stage { + name = "Test" + + action { + name = "Test" + category = "Build" + provider = "CodeBuild" + owner = "AWS" + input_artifacts = ["source_output"] + output_artifacts = ["install_output"] + version = "1" + configuration = { + ProjectName = var.codebuild_project_name + } + } + } +} + + + diff --git a/e2e-python/files/modules/codepipeline/output.tf b/e2e-python/files/modules/codepipeline/output.tf new file mode 100644 index 000000000..bda8f4ea9 --- /dev/null +++ b/e2e-python/files/modules/codepipeline/output.tf @@ -0,0 +1,14 @@ +output "aws_codepipeline_arn" { + value = aws_codepipeline.codepipeline.arn + description = "The ARN of the CodePipeline" +} + +output "aws_codepipeline_id" { + value = aws_codepipeline.codepipeline.id + description = "The id of the CodePipeline" +} + +output "aws_codepipeline_name" { + value = aws_codepipeline.codepipeline.name + description = "The name of the CodePipeline" +} diff --git a/e2e-python/files/modules/codepipeline/variables.tf b/e2e-python/files/modules/codepipeline/variables.tf new file mode 100644 index 000000000..1da275b58 --- /dev/null +++ b/e2e-python/files/modules/codepipeline/variables.tf @@ -0,0 +1,67 @@ +variable "codepipeline_name" { + description = "the codepipeline name" + type = string + default = "test-codepipeline" +} + +variable "codepipeline_bucket_name" { + description = "s3_bucket_name" + type = string +} + +variable "codepipeline_role_arn" { + description = "ARN of the codepipeline IAM role" + type = string +} + +variable "bitbucket_source_bucket_name" { + description = "s3_source_bucket" + type = string +} + +variable "artifacts_store_type" { + description = "Artifacts store type" + type = string + default = "S3" +} + +variable "source_provider" { + description = "source_provider" + type = string + default = "S3" +} + +variable "branch_name" { + description = "branch_name" + type = string + default = "master" +} + +variable "codebuild_project_name" { + description = "codebuild project name" + type = string +} + + +variable "local_id" { + description = "id for unique s3buckets " + type = string +} + +variable "projectId" { + description = "EDP project name" + type = string + default = "testpg" +} + +variable "aws_region" { + description = "AWS infrastructure region" + type = string + default = "eu-west-1" +} + +variable "repository" { + description = "QS bitbucket repository" + type = string + default = "e2e-python" +} diff --git a/e2e-python/files/modules/iam_roles/main.tf b/e2e-python/files/modules/iam_roles/main.tf new file mode 100644 index 000000000..e1d6a9144 --- /dev/null +++ b/e2e-python/files/modules/iam_roles/main.tf @@ -0,0 +1,79 @@ +resource "aws_iam_role" "codepipeline_role" { + name = "${var.projectId}-e2e-IAMrole-${var.aws_region}-${var.pipeline_role_name}-${var.local_id}" + assume_role_policy = data.aws_iam_policy_document.codepipeline_assume_role.json +} + +resource "aws_iam_role" "codebuild_role" { + name = "${var.projectId}-e2e-IAMrole-${var.aws_region}-${var.codebuild_role_name}-${var.local_id}" + assume_role_policy = data.aws_iam_policy_document.codebuild_assume_role.json +} + +resource "aws_iam_role_policy" "codepipeline_policy" { + name = "${var.projectId}-e2e-policy-${var.aws_region}-${var.codepipeline_policy_name}-${var.local_id}" + role = aws_iam_role.codepipeline_role.id + policy = data.aws_iam_policy_document.codepipeline_policy.json +} + +resource "aws_iam_role_policy" "codebuild_policy" { + name = "${var.projectId}-e2e-policy-${var.aws_region}-${var.codebuild_policy_name}-${var.local_id}" + role = aws_iam_role.codebuild_role.id + policy = data.aws_iam_policy_document.codebuild_policy.json +} + +data "aws_iam_policy_document" "codepipeline_assume_role" { + statement { + effect = "Allow" + + principals { + type = "Service" + identifiers = ["codepipeline.amazonaws.com"] + } + + actions = ["sts:AssumeRole"] + } +} + +data "aws_iam_policy_document" "codebuild_assume_role" { + statement { + effect = "Allow" + + principals { + type = "Service" + identifiers = ["codebuild.amazonaws.com"] + } + + actions = ["sts:AssumeRole"] + } +} + +data "aws_iam_policy_document" "codepipeline_policy" { + statement { + sid = "" + actions = [ + "cloudwatch:*", + "s3:*", + "codebuild:*" + ] + resources = ["*"] + effect = "Allow" + } +} + +data "aws_iam_policy_document" "codebuild_policy" { + statement { + sid = "" + actions = [ + "cloudwatch:*", + "logs:*", + "s3:*", + "codebuild:*", + "secretsmanager:*", + "iam:*", + "athena:*", + "glue:*", + "codepipeline:*" + ] + resources = ["*"] + effect = "Allow" + } +} diff --git a/e2e-python/files/modules/iam_roles/outputs.tf b/e2e-python/files/modules/iam_roles/outputs.tf new file mode 100644 index 000000000..188da5286 --- /dev/null +++ b/e2e-python/files/modules/iam_roles/outputs.tf @@ -0,0 +1,11 @@ +output "codepipeline_role_arn" { + value = try(aws_iam_role.codepipeline_role.arn, "") + description = "role arn" +} + +output "codebuild_role_arn" { + value = try(aws_iam_role.codebuild_role.arn, "") + description = "role arn" +} + + diff --git a/e2e-python/files/modules/iam_roles/variables.tf b/e2e-python/files/modules/iam_roles/variables.tf new file mode 100644 index 000000000..78d9706a3 --- /dev/null +++ b/e2e-python/files/modules/iam_roles/variables.tf @@ -0,0 +1,38 @@ +variable "pipeline_role_name" { + description = "role_name" + type = string + default = "test-codePipelineRole" +} +variable "codebuild_role_name" { + description = "role_name" + type = string + default = "test-codeBuildRole" +} + +variable "codepipeline_policy_name" { + description = "Codepipeline_policy_name" + type = string + default = "codepipeline_policy" +} +variable "codebuild_policy_name" { + description = "Codebuild_policy_name" + type = string + default = "codebuild_policy" +} + + +variable "local_id" { + description = "id for unique s3buckets " + type = string +} + +variable "projectId" { + description = "EDP project name" + type = string +} + +variable "aws_region" { + description = "AWS infrastructure region" + type = string + default = "eu-west-1" +} diff --git a/e2e-python/files/modules/s3-bucket/main.tf b/e2e-python/files/modules/s3-bucket/main.tf new file mode 100644 index 000000000..49603f50e --- /dev/null +++ b/e2e-python/files/modules/s3-bucket/main.tf @@ -0,0 +1,33 @@ +resource "aws_s3_bucket" "codepipeline_bucket" { + bucket = "${var.projectId}-e2e-s3-${var.aws_region}-${var.codepipeline_bucket_name}-${var.local_id}" +} +resource "aws_s3_bucket_versioning" "s3versioning-cp" { + bucket = aws_s3_bucket.codepipeline_bucket.id + + versioning_configuration { + status = var.s3_versioning_cp + } +} + +resource "aws_s3_bucket" "e2e_results_bucket" { + bucket = "${var.projectId}-e2e-s3-${var.aws_region}-${var.e2e_results_bucket_name}-${var.local_id}" +} +resource "aws_s3_bucket_versioning" "s3versioning-artfcs" { + bucket = aws_s3_bucket.e2e_results_bucket.id + + versioning_configuration { + status = var.s3_versioning_results + } +} + +resource "aws_s3_bucket" "source_bitbucket_bucket" { + bucket = "${var.projectId}-e2e-s3-${var.aws_region}-${var.bitbucket_source_bucket_name}-${var.local_id}" +} +resource "aws_s3_bucket_versioning" "s3versioning-bucket" { + bucket = aws_s3_bucket.source_bitbucket_bucket.id + + versioning_configuration { + status = var.s3_versioning_bitbuckets3 + } +} + diff --git a/e2e-python/files/modules/s3-bucket/outputs.tf b/e2e-python/files/modules/s3-bucket/outputs.tf new file mode 100644 index 000000000..696d46579 --- /dev/null +++ b/e2e-python/files/modules/s3-bucket/outputs.tf @@ -0,0 +1,38 @@ +output "cp_bucket_arn" { + value = aws_s3_bucket.codepipeline_bucket.arn + description = "The ARN of the S3 Bucket" +} +output "cp_bucket_name" { + value = aws_s3_bucket.codepipeline_bucket.bucket + description = "The Name of the S3 Bucket" +} +output "cp_bucket_id" { + value = aws_s3_bucket.codepipeline_bucket.id + description = "The ID of the S3 Bucket" +} + +output "e2e_results_bucket_arn" { + value = aws_s3_bucket.e2e_results_bucket.arn + description = "The ARN of the results artifacts S3 Bucket" +} +output "e2e_results_bucket_name" { + value = aws_s3_bucket.e2e_results_bucket.bucket + description = "The Name of the results artifacts S3 Bucket" +} +output "e2e_results_bucket_id" { + value = aws_s3_bucket.e2e_results_bucket.id + description = "The ID of the results artifacts S3 Bucket" +} + +output "bitbucket_s3bucket_arn" { + value = aws_s3_bucket.source_bitbucket_bucket.arn + description = "The ARN of the bitbucket S3 Bucket" +} +output "bitbucket_s3bucket_name" { + value = aws_s3_bucket.source_bitbucket_bucket.bucket + description = "The Name of the bitbucket S3 Bucket" +} +output "bitbucket_s3bucket_id" { + value = aws_s3_bucket.source_bitbucket_bucket.id + description = "The ID of the bitbucket S3 Bucket" +} diff --git a/e2e-python/files/modules/s3-bucket/variables.tf b/e2e-python/files/modules/s3-bucket/variables.tf new file mode 100644 index 000000000..5f37563c8 --- /dev/null +++ b/e2e-python/files/modules/s3-bucket/variables.tf @@ -0,0 +1,51 @@ +variable "codepipeline_bucket_name" { + type = string + default = "cpplartifacts" +} + +variable "bitbucket_source_bucket_name" { + description = "Source bitbucket s3 bucket name" + type = string + default = "src-bitbucket" +} + +variable "e2e_results_bucket_name" { + description = "s3_bucket_for_results_artifacts" + type = string + default = "test-results" +} + +variable "s3_versioning_cp" { + description = "s3 versioning for codepipeline bucket" + type = string + default = "Enabled" +} + +variable "s3_versioning_bitbuckets3" { + description = "s3 versioning for source bucket" + type = string + default = "Enabled" +} + +variable "s3_versioning_results" { + description = "s3 versioning for results bucket" + type = string + default = "Enabled" +} + + +variable "local_id" { + description = "id for unique s3buckets " + type = string +} + +variable "projectId" { + description = "EDP project name" + type = string +} + +variable "aws_region" { + description = "AWS infrastructure region" + type = string + default = "eu-west-1" +} diff --git a/e2e-python/files/outputs.tf b/e2e-python/files/outputs.tf new file mode 100644 index 000000000..4c9d98a4b --- /dev/null +++ b/e2e-python/files/outputs.tf @@ -0,0 +1,61 @@ +# ----------------------------------------------------------------------------- +# OUTPUTS +# This stack supports the following output values. +# Documentation: https://www.terraform.io/docs/configuration/outputs.html +# ----------------------------------------------------------------------------- + +output "name" { + description = "The name of the stack." + value = var.name +} + +output "meta_environment" { + description = "The type of the environment." + value = var.meta_environment +} + +output "aws_region" { + description = "The current region." + value = data.aws_region.current.name +} + + +output "codebuild_name" { + value = module.codebuild_terraform.codebuild_project_name + description = "The Name of the Codebuild Project" +} +output "codebuild_arn" { + value = module.codebuild_terraform.codebuild_project_arn + description = "The ARN of the Codebuild Project" +} + +output "codepipeline_name" { + value = module.codepipeline_terraform.aws_codepipeline_name + description = "The Name of the CodePipeline" +} +output "codepipeline_arn" { + value = module.codepipeline_terraform.aws_codepipeline_arn + description = "The ARN of the CodePipeline" +} + +output "cp_iam_arn" { + value = module.iam_roles.codepipeline_role_arn + description = "The ARN of the IAM Role used by the CodePipeline" +} +output "cb_iam_arn" { + value = module.iam_roles.codebuild_role_arn + description = "The ARN of the IAM Role used by the CodePipeline" +} + +output "bitbucket_s3bucket_name" { + value = module.s3_artifacts_bucket.bitbucket_s3bucket_name + description = "The Name of the bitbucket S3 Bucket" +} +output "cp_bucket_name" { + value = module.s3_artifacts_bucket.cp_bucket_name + description = "The Name of the S3 Bucket" +} +output "e2e_results_bucket_name" { + value = module.s3_artifacts_bucket.e2e_results_bucket_name + description = "The Name of the results artifacts S3 Bucket" +} diff --git a/e2e-python/files/pytest.ini b/e2e-python/files/pytest.ini new file mode 100644 index 000000000..79d7a8825 --- /dev/null +++ b/e2e-python/files/pytest.ini @@ -0,0 +1,2 @@ +[pytest] +python_functions = *_test diff --git a/e2e-python/files/random.tf b/e2e-python/files/random.tf new file mode 100644 index 000000000..b0e6c90df --- /dev/null +++ b/e2e-python/files/random.tf @@ -0,0 +1,13 @@ +resource "random_id" "id" { + keepers = { + # Create a new random ID iff the workspace name changes. + lifecycle = terraform.workspace + } + + byte_length = 4 +} + +locals { + id = random_id.id.hex +} + diff --git a/e2e-python/files/release-manager.yml b/e2e-python/files/release-manager.yml new file mode 100644 index 000000000..23d65c7ef --- /dev/null +++ b/e2e-python/files/release-manager.yml @@ -0,0 +1,2 @@ +--- +dependencies: [] diff --git a/e2e-python/files/reports/install/.gitkeep b/e2e-python/files/reports/install/.gitkeep new file mode 100644 index 000000000..9074a39d4 --- /dev/null +++ b/e2e-python/files/reports/install/.gitkeep @@ -0,0 +1 @@ +/report.* diff --git a/e2e-python/files/requirements.txt b/e2e-python/files/requirements.txt new file mode 100644 index 000000000..fd149701f --- /dev/null +++ b/e2e-python/files/requirements.txt @@ -0,0 +1,19 @@ +great_expectations == 0.18.3 +sqlalchemy == 2.0.23 +pyathena[SQLAlchemy] == 3.0.10 +pytest == 7.4.3 +allure-pytest == 2.13.2 +allure-combine == 1.0.11 +boto3 == 1.29.6 +pandas == 2.1.3 +numpy == 1.26.2 +pytest-timeout == 2.2.0 +pytest-ordering == 0.6 +pytest-repeat == 0.9.3 +pyspark == 3.5.0 +pytz == 2023.3.post1 +snowflake-connector-python == 3.6.0 +cryptography == 41.0.7 +psycopg2-binary == 2.9.1 +snowflake-snowpark-python == 1.11.1 + diff --git a/e2e-python/files/stackmodulesoutputs.tf b/e2e-python/files/stackmodulesoutputs.tf new file mode 100644 index 000000000..5c5d93d1d --- /dev/null +++ b/e2e-python/files/stackmodulesoutputs.tf @@ -0,0 +1,2 @@ +# This file has been created automatically. + diff --git a/e2e-python/files/terraform-data.tf b/e2e-python/files/terraform-data.tf new file mode 100644 index 000000000..63265d73c --- /dev/null +++ b/e2e-python/files/terraform-data.tf @@ -0,0 +1,14 @@ +locals { + terraform-data = { + id = local.id + name = var.name + tags = local.tags + current_region = data.aws_region.current.name + } +} + +resource "local_file" "terraform-data" { + filename = "${path.module}/.terraform-data.json" + content = jsonencode(local.terraform-data) +} + diff --git a/e2e-python/files/test/fixtures/default/backend.tf b/e2e-python/files/test/fixtures/default/backend.tf new file mode 100644 index 000000000..2314bd81b --- /dev/null +++ b/e2e-python/files/test/fixtures/default/backend.tf @@ -0,0 +1,5 @@ +terraform { + backend "local" { + } +} + diff --git a/e2e-python/files/test/fixtures/default/main.tf b/e2e-python/files/test/fixtures/default/main.tf new file mode 100644 index 000000000..deb46277d --- /dev/null +++ b/e2e-python/files/test/fixtures/default/main.tf @@ -0,0 +1,16 @@ +locals { + name = "stack-aws-quickstarter-test" + tags = { + Name = local.name + } +} + +data "aws_region" "current" {} + +module "stack-aws-quickstarter-test" { + # module name and value of name parameter have to be equal + source = "../../.." + + name = local.name + meta_environment = "DEVELOPMENT" +} diff --git a/e2e-python/files/test/fixtures/default/moduleoutputs.tf b/e2e-python/files/test/fixtures/default/moduleoutputs.tf new file mode 100644 index 000000000..b04eb0d8e --- /dev/null +++ b/e2e-python/files/test/fixtures/default/moduleoutputs.tf @@ -0,0 +1,5 @@ +# This file has been created automatically. + +output "module_ods_quickstarters" { + value = module.stack-aws-quickstarter-test.* +} diff --git a/e2e-python/files/test/fixtures/default/random.tf b/e2e-python/files/test/fixtures/default/random.tf new file mode 100644 index 000000000..0cc81ef09 --- /dev/null +++ b/e2e-python/files/test/fixtures/default/random.tf @@ -0,0 +1,9 @@ +provider "random" {} + +resource "random_id" "id" { + byte_length = 4 +} + +locals { + id = random_id.id.hex +} diff --git a/e2e-python/files/test/integration/default/controls/blueprints.rb b/e2e-python/files/test/integration/default/controls/blueprints.rb new file mode 100644 index 000000000..04b6ef760 --- /dev/null +++ b/e2e-python/files/test/integration/default/controls/blueprints.rb @@ -0,0 +1 @@ +# This file has been created automatically. diff --git a/e2e-python/files/test/integration/default/controls/default.rb b/e2e-python/files/test/integration/default/controls/default.rb new file mode 100644 index 000000000..cad5d7f69 --- /dev/null +++ b/e2e-python/files/test/integration/default/controls/default.rb @@ -0,0 +1,25 @@ +require_relative '../libraries/terraform_data.rb' +require_relative '../libraries/fixture_data.rb' +require_relative '../libraries/aws.rb' + +t = SpecHelper::TerraformData.new +id = t['id'] +name = t['name'] +tags = { :Name => name + '-' + id } + +f = SpecHelper::FixtureData.new.for_module(name) + +control 'stack' do + impact 1.0 + title "Test Suite: 'Stack'" + desc "This test suite asserts the correct functionality of the stack under test." + tag name + + describe aws_region(region_name: t['current_region']) do + its('endpoint') { should be_in ['ec2.eu-west-1.amazonaws.com','ec2.us-east-1.amazonaws.com'] } + end + + describe "Stack Testing" do + it { expect(true).to be_truthy } + end +end diff --git a/e2e-python/files/test/integration/default/files/.gitkeep b/e2e-python/files/test/integration/default/files/.gitkeep new file mode 100644 index 000000000..e69de29bb diff --git a/e2e-python/files/test/integration/default/inspec.yml b/e2e-python/files/test/integration/default/inspec.yml new file mode 100644 index 000000000..4ae1a130d --- /dev/null +++ b/e2e-python/files/test/integration/default/inspec.yml @@ -0,0 +1,10 @@ +# This file has been created automatically. + +--- +name: stack +supports: + - platform: aws +depends: + - name: inspec-aws + git: https://github.com/inspec/inspec-aws + tag: v1.83.60 diff --git a/e2e-python/files/test/integration/default/inspec.yml.tmpl b/e2e-python/files/test/integration/default/inspec.yml.tmpl new file mode 100644 index 000000000..c4935f920 --- /dev/null +++ b/e2e-python/files/test/integration/default/inspec.yml.tmpl @@ -0,0 +1,8 @@ +--- +name: stack +supports: + - platform: aws +depends: + - name: inspec-aws + git: https://github.com/inspec/inspec-aws + tag: v1.83.60 diff --git a/e2e-python/files/test/integration/default/libraries/aws.rb b/e2e-python/files/test/integration/default/libraries/aws.rb new file mode 100644 index 000000000..d78efd511 --- /dev/null +++ b/e2e-python/files/test/integration/default/libraries/aws.rb @@ -0,0 +1,161 @@ +require 'aws-sdk' +require 'ipaddr' +require 'singleton' + +module SpecHelper + class AWS + # See https://docs.aws.amazon.com/sdkforruby/api/Aws.html + class SDK + include Singleton + + def client(clazz, region = ENV['AWS_DEFAULT_REGION']) + client_clazz = Module.const_get(clazz.to_s + '::Client') + client_clazz.new(region: region) + end + + def resource(clazz, region = ENV['AWS_DEFAULT_REGION']) + client = client(clazz, region) + + resource_clazz = Module.const_get(clazz.to_s + '::Resource') + resource_clazz.new(client: client) + end + end + + def self.sdk + return SDK.instance + end + + def self.convert_aws_tags_to_hash(tags) + results = {} + + tags.each do |tag| + results[tag.key] = tag.value + end + + results + end + + def self.convert_tags_hash_to_array(tags) + tags.to_a.map do |tag| + { key: tag.first.to_s, value: tag.last } + end + end + + def self.convert_tags_hash_to_aws_filters(tags) + tags.to_a.map do |tag| + { name: "tag:#{tag.first}", values: [tag.last] } + end + end + + def self.filter_resources(resource, type, filters) + matches = resource.send(type, { filters: filters }).map(&:id) + + if matches.count == 1 + matches[0] + elsif matches.count == 0 + STDERR.puts "Error: could not find any resources of type '#{type}' with tag:Name = '#{name}'" + [] + else + STDERR.puts "Error: there is more than one resource of type '#{type}' with tag:Name = '#{name}'" + matches + end + end + + def self.get_asg_name_by_tags(tags, region = ENV['AWS_DEFAULT_REGION']) + client = self.sdk.client(Aws::AutoScaling, region) + + # Convert the incoming tags into an array + tags = convert_tags_hash_to_array(tags) + + names = client.describe_auto_scaling_groups().data['auto_scaling_groups'].find_all { |group| + # Convert the auto scaling group's tags into an array + group_tags = group.tags.map do |tag| + { key: tag.key, value: tag.value } + end + + # Check if all incoming tags are present in the auto scaling group + (tags - group_tags).empty? + }.map(&:auto_scaling_group_name) + + if names.count == 1 + names[0] + elsif names.count == 0 + STDERR.puts "Error: could not find any auto scaling group with tags = '#{tags}'" + [] + else + STDERR.puts "Error: there is more than one auto scaling group with tags = '#{tags}'" + names + end + end + + def self.get_ec2_instance_id_by_tags(tags, region = ENV['AWS_DEFAULT_REGION']) + filters = convert_tags_hash_to_aws_filters(tags) + filters << { name: 'instance-state-name', values: ['pending', 'running'] } + + # See https://docs.aws.amazon.com/sdkforruby/api/Aws/EC2/Resource.html#instances-instance_method. + filter_resources(self.sdk.resource(Aws::EC2, region), 'instances', filters) + end + + def self.get_rds_instance_id_by_tags(tags, region = ENV['AWS_DEFAULT_REGION']) + client = self.sdk.client(Aws::RDS, region) + + # Convert the incoming tags into an array + tags = convert_tags_hash_to_array(tags) + + ids = client.describe_db_instances().db_instances.find_all { |instance| + resp = client.list_tags_for_resource({ :resource_name => instance.db_instance_arn }) + if resp.nil? or resp.tag_list.empty? + STDERR.puts "Error: could not find any RDS database instance with tags = '#{tags}'" + return [] + end + + # Check if all incoming tags are present in the RDS database instance + instance_tags = convert_tags_hash_to_array(convert_aws_tags_to_hash(resp.tag_list)) + (tags - instance_tags).empty? + }.map(&:db_instance_identifier) + + if ids.count == 1 + ids[0] + elsif ids.count == 0 + STDERR.puts "Error: could not find any RDS database instance with tags = '#{tags}'" + [] + else + STDERR.puts "Error: there is more than one RDS database instance with tags = '#{tags}'" + ids + end + end + + def self.get_security_group_id_by_tags(tags, region = ENV['AWS_DEFAULT_REGION']) + filters = convert_tags_hash_to_aws_filters(tags) + + # See https://docs.aws.amazon.com/sdkforruby/api/Aws/EC2/Resource.html#security_groups-instance_method. + filter_resources(self.sdk.resource(Aws::EC2, region), 'security_groups', filters) + end + + def self.get_vpc_id_by_tags(tags, region = ENV['AWS_DEFAULT_REGION']) + filters = convert_tags_hash_to_aws_filters(tags) + + # See https://docs.aws.amazon.com/sdkforruby/api/Aws/EC2/Resource.html#vpcs-instance_method. + filter_resources(self.sdk.resource(Aws::EC2, region), 'vpcs', filters) + end + + def self.get_subnet_ids_by_vpc_id(id, region = ENV['AWS_DEFAULT_REGION']) + # See https://docs.aws.amazon.com/sdkforruby/api/Aws/EC2/Resource.html#vpc-instance_method. + vpc = self.sdk.resource(Aws::EC2, region).vpc(id) + + unless vpc.nil? + # See https://docs.aws.amazon.com/sdkforruby/api/Aws/EC2/Vpc.html#subnets-instance_method. + vpc.subnets().sort_by { |subnet| + IPAddr.new(subnet.cidr_block) + }.map(&:id) + else + STDERR.puts "Error: could not find a VPC with ID = '#{id}'" + [] + end + end + + private_constant :SDK + private_class_method :convert_tags_hash_to_aws_filters + private_class_method :filter_resources + end +end diff --git a/e2e-python/files/test/integration/default/libraries/fixture_data.rb b/e2e-python/files/test/integration/default/libraries/fixture_data.rb new file mode 100644 index 000000000..2f9d7ec03 --- /dev/null +++ b/e2e-python/files/test/integration/default/libraries/fixture_data.rb @@ -0,0 +1,49 @@ +require 'json' + +module SpecHelper + class FixtureData + @data + + def json_vars?() + ENV.has_key?('JSON_VARS_FILE') and ENV['JSON_VARS_FILE'] != '' + end + + def initialize(suite = 'default') + if json_vars? then + @data = JSON.parse(File.read(ENV['JSON_VARS_FILE'])) + else + @data = JSON.parse(File.read('test/integration/' + suite + '/files/main.json')) + extract_first_element_of_array(@data) + end + end + + def locals + json_vars? ? @data : extract_first_element_of_array(@data['locals']) + end + + def for_module(name = nil) + json_vars? ? @data : extract_first_element_of_array(@data['module'].select { |x| x[name] }.first[name]) + end + + def for_resource(type = nil, name = nil) + tdata = @data['resource'].select { |x| x[type] } # array having all resources of given type + tdata = tdata.select { |x| x[type][name] }.first # select the item matching resource name + extract_first_element_of_array(tdata[type][name]) # trim given structure + json_vars? ? @data : tdata[type][name] + end + + private :json_vars? + + private + + def extract_first_element_of_array(myhash = nil) + myhash.each do |k, v| + if !(['module', 'resource', 'data'].include? k.to_s) + if v.kind_of?(Array) + myhash[k] = v[0] + end + end + end + end + end +end diff --git a/e2e-python/files/test/integration/default/libraries/terraform_data.rb b/e2e-python/files/test/integration/default/libraries/terraform_data.rb new file mode 100644 index 000000000..ca996c30b --- /dev/null +++ b/e2e-python/files/test/integration/default/libraries/terraform_data.rb @@ -0,0 +1,15 @@ +require 'json' + +module SpecHelper + class TerraformData + @data + + def initialize(path = '.terraform-data.json') + @data = JSON.parse(File.read(path)) + end + + def [](key) + @data[key] + end + end +end diff --git a/e2e-python/files/tests/acceptance/great_expectations/.gitignore b/e2e-python/files/tests/acceptance/great_expectations/.gitignore new file mode 100644 index 000000000..40e0c4641 --- /dev/null +++ b/e2e-python/files/tests/acceptance/great_expectations/.gitignore @@ -0,0 +1,2 @@ +uncommitted/ +expectaions/.ge_store_backend_id diff --git a/e2e-python/files/tests/acceptance/great_expectations/checkpoints/Demo_athena_checkpoint.yml b/e2e-python/files/tests/acceptance/great_expectations/checkpoints/Demo_athena_checkpoint.yml new file mode 100644 index 000000000..3bcdcf4d4 --- /dev/null +++ b/e2e-python/files/tests/acceptance/great_expectations/checkpoints/Demo_athena_checkpoint.yml @@ -0,0 +1,32 @@ +name: athena_checkpoint +config_version: 1.0 +template_name: +module_name: great_expectations.checkpoint +class_name: Checkpoint +run_name_template: '%Y%m%d-%H%M%S-verification-no-failures' +expectation_suite_name: +batch_request: {} +action_list: + - name: store_validation_result + action: + class_name: StoreValidationResultAction + - name: store_evaluation_params + action: + class_name: StoreEvaluationParametersAction + - name: update_data_docs + action: + class_name: UpdateDataDocsAction + site_names: [] +evaluation_parameters: {} +runtime_configuration: {} +validations: + - batch_request: + datasource_name: AWS-Athena-datasource + data_connector_name: default_configured_data_connector_name + data_asset_name: address + data_connector_query: + index: -1 + expectation_suite_name: athena_validation_suite +profilers: [] +ge_cloud_id: +expectation_suite_ge_cloud_id: diff --git a/e2e-python/files/tests/acceptance/great_expectations/checkpoints/Demo_person_checkpoint.yml b/e2e-python/files/tests/acceptance/great_expectations/checkpoints/Demo_person_checkpoint.yml new file mode 100644 index 000000000..aac60db9d --- /dev/null +++ b/e2e-python/files/tests/acceptance/great_expectations/checkpoints/Demo_person_checkpoint.yml @@ -0,0 +1,32 @@ +name: person_checkpoint +config_version: 1.0 +template_name: +module_name: great_expectations.checkpoint +class_name: Checkpoint +run_name_template: '%Y%m%d-%H%M%S-verification-no-failures' +expectation_suite_name: +batch_request: {} +action_list: + - name: store_validation_result + action: + class_name: StoreValidationResultAction + - name: store_evaluation_params + action: + class_name: StoreEvaluationParametersAction + - name: update_data_docs + action: + class_name: UpdateDataDocsAction + site_names: [] +evaluation_parameters: {} +runtime_configuration: {} +validations: + - batch_request: + datasource_name: AWS-Athena-datasource + data_connector_name: default_configured_data_connector_name + data_asset_name: person + data_connector_query: + index: -1 + expectation_suite_name: person_validation_suite +profilers: [] +ge_cloud_id: +expectation_suite_ge_cloud_id: diff --git a/e2e-python/files/tests/acceptance/great_expectations/expectations/athena_validation_suite.json b/e2e-python/files/tests/acceptance/great_expectations/expectations/athena_validation_suite.json new file mode 100644 index 000000000..949ba1234 --- /dev/null +++ b/e2e-python/files/tests/acceptance/great_expectations/expectations/athena_validation_suite.json @@ -0,0 +1,68 @@ +{ + "data_asset_type": null, + "expectation_suite_name": "athena_validation_suite", + "expectations": [ + { + "expectation_type": "expect_table_columns_to_match_set", + "kwargs": { + "column_set": [ + "_hoodie_commit_time", + "_hoodie_commit_seqno", + "_hoodie_record_key", + "_hoodie_partition_path", + "_hoodie_file_name", + "address_id", + "address_line_1", + "address_line_2", + "address_line_3", + "address_line_4", + "address_owner_key_1", + "address_owner_key_2", + "address_owner_key_3", + "address_owner_key_4", + "address_owner_key_5", + "address_type_code", + "country_code", + "last_updated_date", + "owner", + "post_zip_code", + "primary_address_flag", + "province_county", + "province_county_code", + "table_short_name", + "town_city", + "updated_during_mon_visit_by", + "update_count", + "aud_action_flag", + "aud_date_changed", + "aud_personnel_no", + "time_zone_offset", + "transaction_no", + "ingestion_timestamp", + "pr_tab_hist_hkey", + "pr_tab_hkey", + "audit_id", + "audit_task_id", + "int_tec_from_dt", + "int_tec_to_dt", + "curr_flg", + "del_flg", + "modulekey" + ] + }, + "meta": {} + }, + { + "expectation_type": "expect_table_row_count_to_equal", + "kwargs": { + "value": 0 + }, + "meta": {} + } + ], + + "ge_cloud_id": null, + "meta": { + "great_expectations_version": "0.17.9" + } +} diff --git a/e2e-python/files/tests/acceptance/great_expectations/expectations/person_validation_suite.json b/e2e-python/files/tests/acceptance/great_expectations/expectations/person_validation_suite.json new file mode 100644 index 000000000..65b5a46f5 --- /dev/null +++ b/e2e-python/files/tests/acceptance/great_expectations/expectations/person_validation_suite.json @@ -0,0 +1,47 @@ +{ + "data_asset_type": null, + "expectation_suite_name": "person_validation_suite", + "expectations": [ + { + "expectation_type": "expect_table_columns_to_match_set", + "kwargs": { + "column_set": [ + "name", + "surname", + "age", + "location" ] + }, + "meta": {} + }, + { + "expectation_type": "expect_table_row_count_to_equal", + "kwargs": { + "value": 3 + }, + "meta": {} + }, + { + "expectation_type": "expect_column_value_lengths_to_be_between", + "kwargs": { + "column": "name", + "min_value": 3, + "max_value": 10 + + }, + "meta": {} + }, + { + "expectation_type": "expect_column_values_to_not_be_null", + "kwargs": { + "column": "name", + "mostly": 0.8 + }, + "meta": {} + } + ], + + "ge_cloud_id": null, + "meta": { + "great_expectations_version": "0.17.9" + } +} diff --git a/e2e-python/files/tests/acceptance/great_expectations/great_expectations.yml b/e2e-python/files/tests/acceptance/great_expectations/great_expectations.yml new file mode 100644 index 000000000..ed499dbdf --- /dev/null +++ b/e2e-python/files/tests/acceptance/great_expectations/great_expectations.yml @@ -0,0 +1,124 @@ + +# Welcome to Great Expectations! Always know what to expect from your data. +# +# Here you can define datasources, batch kwargs generators, integrations and +# more. This file is intended to be committed to your repo. For help with +# configuration please: +# - Read our docs: https://docs.greatexpectations.io/docs/guides/connecting_to_your_data/connect_to_data_overview/#2-configure-your-datasource +# - Join our slack channel: http://greatexpectations.io/slack + +# config_version refers to the syntactic version of this config file, and is used in maintaining backwards compatibility +# It is auto-generated and usually does not need to be changed. +config_version: 3 + +# Datasources tell Great Expectations where your data lives and how to get it. +# You can use the CLI command `great_expectations datasource new` to help you +# add a new datasource. Read more at https://docs.greatexpectations.io/docs/guides/connecting_to_your_data/connect_to_data_overview +datasources: + AWS-Athena-datasource: + module_name: great_expectations.datasource + execution_engine: + class_name: SqlAlchemyExecutionEngine + module_name: great_expectations.execution_engine + connection_string: awsathena+rest://:@athena.eu-west-1.amazonaws.com:443/greatexpectationsdb?s3_staging_dir=s3://greatexpectationss3/ + class_name: Datasource + data_connectors: + default_configured_data_connector_name: + module_name: great_expectations.datasource.data_connector + class_name: ConfiguredAssetSqlDataConnector + assets: + address: + module_name: great_expectations.datasource.data_connector.asset + class_name: Asset + schema_name: greatexpectationsdb + person: + module_name: great_expectations.datasource.data_connector.asset + class_name: Asset + schema_name: greatexpectationsdb + name: default_configured_data_connector_name + +# This config file supports variable substitution which enables: 1) keeping +# secrets out of source control & 2) environment-based configuration changes +# such as staging vs prod. +# +# When GX encounters substitution syntax (like `my_key: ${my_value}` or +# `my_key: $my_value`) in the great_expectations.yml file, it will attempt +# to replace the value of `my_key` with the value from an environment +# variable `my_value` or a corresponding key read from this config file, +# which is defined through the `config_variables_file_path`. +# Environment variables take precedence over variables defined here. +# +# Substitution values defined here can be a simple (non-nested) value, +# nested value such as a dictionary, or an environment variable (i.e. ${ENV_VAR}) +# +# +# https://docs.greatexpectations.io/docs/guides/setup/configuring_data_contexts/how_to_configure_credentials + + +config_variables_file_path: uncommitted/config_variables.yml + +# The plugins_directory will be added to your python path for custom modules +# used to override and extend Great Expectations. +plugins_directory: plugins/ + +stores: +# Stores are configurable places to store things like Expectations, Validations +# Data Docs, and more. These are for advanced users only - most users can simply +# leave this section alone. +# +# Three stores are required: expectations, validations, and +# evaluation_parameters, and must exist with a valid store entry. Additional +# stores can be configured for uses such as data_docs, etc. + expectations_store: + class_name: ExpectationsStore + store_backend: + class_name: TupleFilesystemStoreBackend + base_directory: expectations/ + + validations_store: + class_name: ValidationsStore + store_backend: + class_name: TupleFilesystemStoreBackend + base_directory: uncommitted/validations/ + + evaluation_parameter_store: + # Evaluation Parameters enable dynamic expectations. Read more here: + # https://docs.greatexpectations.io/docs/reference/evaluation_parameters/ + class_name: EvaluationParameterStore + + checkpoint_store: + class_name: CheckpointStore + store_backend: + class_name: TupleFilesystemStoreBackend + suppress_store_backend_id: true + base_directory: checkpoints/ + + profiler_store: + class_name: ProfilerStore + store_backend: + class_name: TupleFilesystemStoreBackend + suppress_store_backend_id: true + base_directory: profilers/ + +expectations_store_name: expectations_store +validations_store_name: validations_store +evaluation_parameter_store_name: evaluation_parameter_store +checkpoint_store_name: checkpoint_store + +data_docs_sites: + # Data Docs make it simple to visualize data quality in your project. These + # include Expectations, Validations & Profiles. The are built for all + # Datasources from JSON artifacts in the local repo including validations & + # profiles from the uncommitted directory. Read more at https://docs.greatexpectations.io/docs/terms/data_docs + local_site: + class_name: SiteBuilder + # set to false to hide how-to buttons in Data Docs + show_how_to_buttons: true + store_backend: + class_name: TupleFilesystemStoreBackend + base_directory: uncommitted/data_docs/local_site/ + site_index_builder: + class_name: DefaultSiteIndexBuilder + +anonymous_usage_statistics: + enabled: True diff --git a/e2e-python/files/tests/acceptance/great_expectations/plugins/custom_data_docs/styles/data_docs_custom_styles.css b/e2e-python/files/tests/acceptance/great_expectations/plugins/custom_data_docs/styles/data_docs_custom_styles.css new file mode 100644 index 000000000..8bf5a1521 --- /dev/null +++ b/e2e-python/files/tests/acceptance/great_expectations/plugins/custom_data_docs/styles/data_docs_custom_styles.css @@ -0,0 +1,22 @@ +/*index page*/ +.ge-index-page-site-name-title {} +.ge-index-page-table-container {} +.ge-index-page-table {} +.ge-index-page-table-profiling-links-header {} +.ge-index-page-table-expectations-links-header {} +.ge-index-page-table-validations-links-header {} +.ge-index-page-table-profiling-links-list {} +.ge-index-page-table-profiling-links-item {} +.ge-index-page-table-expectation-suite-link {} +.ge-index-page-table-validation-links-list {} +.ge-index-page-table-validation-links-item {} + +/*breadcrumbs*/ +.ge-breadcrumbs {} +.ge-breadcrumbs-item {} + +/*navigation sidebar*/ +.ge-navigation-sidebar-container {} +.ge-navigation-sidebar-content {} +.ge-navigation-sidebar-title {} +.ge-navigation-sidebar-link {} diff --git a/e2e-python/files/tests/acceptance/great_expectations/test_preparation/post_requisites.py b/e2e-python/files/tests/acceptance/great_expectations/test_preparation/post_requisites.py new file mode 100644 index 000000000..b36c0e2a2 --- /dev/null +++ b/e2e-python/files/tests/acceptance/great_expectations/test_preparation/post_requisites.py @@ -0,0 +1,39 @@ +import boto3 +import json + +def get_terraform_outputs(): + with open('terraform_outputs.json') as file: + output_json = json.load(file) + + return output_json + +def delete_test_database(): + tf_outputs = get_terraform_outputs() + aws_region = tf_outputs["aws_region"]["value"] + client = boto3.client('athena', region_name=aws_region) + q_delete_address_table = "DROP TABLE IF EXISTS address" + q_delete_person_table = "DROP TABLE IF EXISTS person" + q_delete_db = "DROP DATABASE IF EXISTS greatexpectationsdbtest" + execute_query(client, q_delete_address_table) + execute_query(client, q_delete_person_table) + execute_query(client, q_delete_db) + +def execute_query(client, query): + response = client.start_query_execution( + QueryString=query, + QueryExecutionContext={ + 'Database': 'greatexpectationsdbtest' + }, + ResultConfiguration={ + 'OutputLocation': 's3://gxdbtests3/db_test_outputs/', + } + ) + print("Query execution ID: ", response['QueryExecutionId']) + + +def main(): + delete_test_database() + + +if __name__ == "__main__": + main() diff --git a/e2e-python/files/tests/acceptance/great_expectations/test_preparation/pre_requisites.py b/e2e-python/files/tests/acceptance/great_expectations/test_preparation/pre_requisites.py new file mode 100644 index 000000000..517e312f2 --- /dev/null +++ b/e2e-python/files/tests/acceptance/great_expectations/test_preparation/pre_requisites.py @@ -0,0 +1,143 @@ +import os +import boto3 +import json + +def get_terraform_outputs(): + with open('terraform_outputs.json') as file: + output_json = json.load(file) + + return output_json + +def setup_test_database(): + tf_outputs = get_terraform_outputs() + aws_region = tf_outputs["aws_region"]["value"] + client = boto3.client('athena', region_name=aws_region) + created = create_database(client) + if created: + address_table_creation_query(client) + person_table_creation_query(client) + + +def create_database(client): + + query = "CREATE DATABASE greatexpectationsdbtest" + response = client.start_query_execution( + QueryString=query, + ResultConfiguration={ + 'OutputLocation': 's3://gxdbtests3/db_test_outputs/', + } + ) + print('Database created.') + return 1 + +def address_table_creation_query(client): + query = """ + CREATE EXTERNAL TABLE IF NOT EXISTS `address`( + `_hoodie_commit_time` string COMMENT '', + `_hoodie_commit_seqno` string COMMENT '', + `_hoodie_record_key` string COMMENT '', + `_hoodie_partition_path` string COMMENT '', + `_hoodie_file_name` string COMMENT '', + `address_id` decimal(10,0) COMMENT '', + `address_line_1` string COMMENT '', + `address_line_2` string COMMENT '', + `address_line_3` string COMMENT '', + `address_line_4` string COMMENT '', + `address_owner_key_1` string COMMENT '', + `address_owner_key_2` string COMMENT '', + `address_owner_key_3` string COMMENT '', + `address_owner_key_4` string COMMENT '', + `address_owner_key_5` string COMMENT '', + `address_type_code` decimal(6,0) COMMENT '', + `country_code` string COMMENT '', + `last_updated_date` timestamp COMMENT '', + `owner` string COMMENT '', + `post_zip_code` string COMMENT '', + `primary_address_flag` string COMMENT '', + `province_county` string COMMENT '', + `province_county_code` string COMMENT '', + `table_short_name` string COMMENT '', + `town_city` string COMMENT '', + `updated_during_mon_visit_by` decimal(6,0) COMMENT '', + `update_count` decimal(8,0) COMMENT '', + `aud_action_flag` string COMMENT '', + `aud_date_changed` timestamp COMMENT '', + `aud_personnel_no` decimal(6,0) COMMENT '', + `time_zone_offset` decimal(4,2) COMMENT '', + `transaction_no` decimal(38,0) COMMENT '', + `ingestion_timestamp` bigint COMMENT '', + `pr_tab_hist_hkey` string COMMENT '', + `pr_tab_hkey` string COMMENT '', + `audit_id` string COMMENT '', + `audit_task_id` string COMMENT '', + `int_tec_from_dt` date COMMENT '', + `int_tec_to_dt` date COMMENT '', + `curr_flg` int COMMENT '', + `del_flg` int COMMENT '') + PARTITIONED BY ( + `modulekey` bigint COMMENT '') + ROW FORMAT SERDE + 'org.apache.hadoop.hive.ql.io.parquet.serde.ParquetHiveSerDe' + WITH SERDEPROPERTIES ( + 'hoodie.query.as.ro.table'='false', + 'path'='s3://gxdbtests3/clean/address') + STORED AS INPUTFORMAT + 'org.apache.hudi.hadoop.HoodieParquetInputFormat' + OUTPUTFORMAT + 'org.apache.hadoop.hive.ql.io.parquet.MapredParquetOutputFormat' + LOCATION + 's3://gxdbtests3/clean/address' + TBLPROPERTIES ( + 'last_commit_time_sync'='20230725080610371', + 'spark.sql.create.version'='3.3.0-amzn-1', + 'spark.sql.sources.provider'='hudi', + 'spark.sql.sources.schema.numPartCols'='1', + 'spark.sql.sources.schema.numParts'='1', + 'spark.sql.sources.schema.part.0'='{\"type\":\"struct\",\"fields\":[{\"name\":\"_hoodie_commit_time\",\"type\":\"string\",\"nullable\":true,\"metadata\":{}},{\"name\":\"_hoodie_commit_seqno\",\"type\":\"string\",\"nullable\":true,\"metadata\":{}},{\"name\":\"_hoodie_record_key\",\"type\":\"string\",\"nullable\":true,\"metadata\":{}},{\"name\":\"_hoodie_partition_path\",\"type\":\"string\",\"nullable\":true,\"metadata\":{}},{\"name\":\"_hoodie_file_name\",\"type\":\"string\",\"nullable\":true,\"metadata\":{}},{\"name\":\"address_id\",\"type\":\"decimal(10,0)\",\"nullable\":true,\"metadata\":{}},{\"name\":\"address_line_1\",\"type\":\"string\",\"nullable\":true,\"metadata\":{}},{\"name\":\"address_line_2\",\"type\":\"string\",\"nullable\":true,\"metadata\":{}},{\"name\":\"address_line_3\",\"type\":\"string\",\"nullable\":true,\"metadata\":{}},{\"name\":\"address_line_4\",\"type\":\"string\",\"nullable\":true,\"metadata\":{}},{\"name\":\"address_owner_key_1\",\"type\":\"string\",\"nullable\":true,\"metadata\":{}},{\"name\":\"address_owner_key_2\",\"type\":\"string\",\"nullable\":true,\"metadata\":{}},{\"name\":\"address_owner_key_3\",\"type\":\"string\",\"nullable\":true,\"metadata\":{}},{\"name\":\"address_owner_key_4\",\"type\":\"string\",\"nullable\":true,\"metadata\":{}},{\"name\":\"address_owner_key_5\",\"type\":\"string\",\"nullable\":true,\"metadata\":{}},{\"name\":\"address_type_code\",\"type\":\"decimal(6,0)\",\"nullable\":true,\"metadata\":{}},{\"name\":\"country_code\",\"type\":\"string\",\"nullable\":true,\"metadata\":{}},{\"name\":\"last_updated_date\",\"type\":\"timestamp\",\"nullable\":true,\"metadata\":{}},{\"name\":\"owner\",\"type\":\"string\",\"nullable\":true,\"metadata\":{}},{\"name\":\"post_zip_code\",\"type\":\"string\",\"nullable\":true,\"metadata\":{}},{\"name\":\"primary_address_flag\",\"type\":\"string\",\"nullable\":true,\"metadata\":{}},{\"name\":\"province_county\",\"type\":\"string\",\"nullable\":true,\"metadata\":{}},{\"name\":\"province_county_code\",\"type\":\"string\",\"nullable\":true,\"metadata\":{}},{\"name\":\"table_short_name\",\"type\":\"string\",\"nullable\":true,\"metadata\":{}},{\"name\":\"town_city\",\"type\":\"string\",\"nullable\":true,\"metadata\":{}},{\"name\":\"updated_during_mon_visit_by\",\"type\":\"decimal(6,0)\",\"nullable\":true,\"metadata\":{}},{\"name\":\"update_count\",\"type\":\"decimal(8,0)\",\"nullable\":true,\"metadata\":{}},{\"name\":\"aud_action_flag\",\"type\":\"string\",\"nullable\":true,\"metadata\":{}},{\"name\":\"aud_date_changed\",\"type\":\"timestamp\",\"nullable\":true,\"metadata\":{}},{\"name\":\"aud_personnel_no\",\"type\":\"decimal(6,0)\",\"nullable\":true,\"metadata\":{}},{\"name\":\"time_zone_offset\",\"type\":\"decimal(4,2)\",\"nullable\":true,\"metadata\":{}},{\"name\":\"transaction_no\",\"type\":\"decimal(38,0)\",\"nullable\":true,\"metadata\":{}},{\"name\":\"ingestion_timestamp\",\"type\":\"long\",\"nullable\":true,\"metadata\":{}},{\"name\":\"pr_tab_hist_hkey\",\"type\":\"string\",\"nullable\":true,\"metadata\":{}},{\"name\":\"pr_tab_hkey\",\"type\":\"string\",\"nullable\":true,\"metadata\":{}},{\"name\":\"audit_id\",\"type\":\"string\",\"nullable\":true,\"metadata\":{}},{\"name\":\"audit_task_id\",\"type\":\"string\",\"nullable\":true,\"metadata\":{}},{\"name\":\"int_tec_from_dt\",\"type\":\"date\",\"nullable\":true,\"metadata\":{}},{\"name\":\"int_tec_to_dt\",\"type\":\"date\",\"nullable\":true,\"metadata\":{}},{\"name\":\"curr_flg\",\"type\":\"integer\",\"nullable\":true,\"metadata\":{}},{\"name\":\"del_flg\",\"type\":\"integer\",\"nullable\":true,\"metadata\":{}},{\"name\":\"moduleKey\",\"type\":\"long\",\"nullable\":true,\"metadata\":{}}]}', + 'spark.sql.sources.schema.partCol.0'='moduleKey', + 'transient_lastDdlTime'='1690272434') + """ + execute_query(client, query) + +def person_table_creation_query(client): + query = """ + CREATE EXTERNAL TABLE IF NOT EXISTS person ( + name VARCHAR(50), + surname VARCHAR(50), + age INT, + location VARCHAR(100) + ) + ROW FORMAT DELIMITED + FIELDS TERMINATED BY ',' + LOCATION 's3://gxdbtests3/clean/person'; + """ + execute_query(client, query) + + query_insert_data = """ + INSERT INTO person (name, surname, age, location) + VALUES + ('John', 'Doe', 25, 'New York'), + ('Jane', 'Smith', 32, 'London'), + ('Michael', 'Johnson', 45, 'Berlin'); + """ + execute_query(client, query_insert_data) + + +def execute_query(client, query): + response = client.start_query_execution( + QueryString=query, + QueryExecutionContext={ + 'Database': 'greatexpectationsdbtest' + }, + ResultConfiguration={ + 'OutputLocation': 's3://gxdbtests3/db_test_outputs/', + } + ) + print("Query execution ID: ", response['QueryExecutionId']) + + +def main(): + setup_test_database() + +if __name__ == "__main__": + main() diff --git a/e2e-python/files/tests/acceptance/pytest/Demo_allure_pytest_test.py b/e2e-python/files/tests/acceptance/pytest/Demo_allure_pytest_test.py new file mode 100644 index 000000000..2059ba3de --- /dev/null +++ b/e2e-python/files/tests/acceptance/pytest/Demo_allure_pytest_test.py @@ -0,0 +1,66 @@ +import boto3 +import pytest +import allure +import json +import os +import datetime +import pytz + + +def get_terraform_outputs(): + with open('terraform_outputs.json') as file: + output_json = json.load(file) + + return output_json + + +def get_env_vars(): + environment = os.environ['ENVIRONMENT'] + env_vars_path = f"environments/{environment}.json" + with open(env_vars_path, 'r') as file: + data = json.load(file) + + return data + +'''Remember to rename the test with this format __test e.g: EDPTP457_s3_file_present_test''' +def Demo_s3_file_present_test(record_property): + outputs_tf = get_terraform_outputs() + bucket_name = outputs_tf["bitbucket_s3bucket_name"]["value"] + env_vars = get_env_vars() + file_key = env_vars['repository'] + '-' + env_vars['branch_name'] + '.zip' + + record_property( + "test_evidence_1", + f"Name of the bucket search: {bucket_name}, file to search in the bucket: {file_key}" + ) + + s3_client = boto3.client('s3') + with allure.step("Check if file exists in S3 bucket"): + response = s3_client.list_objects_v2(Bucket=bucket_name, Prefix=file_key) + file_present = 'Contents' in response + record_property( + "test_evidence_2", + f"Response form the call to the S3 bucket: {file_key}" + ) + assert file_present, f"File '{file_key}' not found in S3 bucket '{bucket_name}'" + +'''Remember to rename the test with this format __test e.g: EDPTP456_s3_file_present_test''' +def Demo_test_pipeline_execution_time_test(record_property): + outputs_tf = get_terraform_outputs() + codepipeline_name = outputs_tf['codepipeline_name']['value'] + client = boto3.client('codepipeline') + + record_property( + "test_evidence_1", + f"Name of the pipeline: {codepipeline_name}" + ) + + with allure.step("Check aws pipeline last execution"): + response = client.get_pipeline_state(name=codepipeline_name) + last_execution = response['stageStates'][0]['actionStates'][0]['latestExecution']['lastStatusChange'] + record_property( + "test_evidence_2", + f"Response from the Pipeline, last execution was on date: {last_execution}" + ) + now = datetime.datetime.now(pytz.UTC) + assert last_execution > now - datetime.timedelta(hours=24), f"Pipeline has not been executed in the last 24 hours" diff --git a/e2e-python/files/tests/installation/installation_test.py b/e2e-python/files/tests/installation/installation_test.py new file mode 100644 index 000000000..54154591b --- /dev/null +++ b/e2e-python/files/tests/installation/installation_test.py @@ -0,0 +1,4 @@ +#In this folder you can place your installation test cases if needed + +def dummy_test(): + assert 1==1 diff --git a/e2e-python/files/tests/integration/integration_test.py b/e2e-python/files/tests/integration/integration_test.py new file mode 100644 index 000000000..45516ba1f --- /dev/null +++ b/e2e-python/files/tests/integration/integration_test.py @@ -0,0 +1,4 @@ +#In this folder you can place your integration test cases if needed + +def dummy_test(): + assert 1==1 diff --git a/e2e-python/files/utils/checkpoints_executions.py b/e2e-python/files/utils/checkpoints_executions.py new file mode 100644 index 000000000..0fb945482 --- /dev/null +++ b/e2e-python/files/utils/checkpoints_executions.py @@ -0,0 +1,17 @@ +import os +from great_expectations import DataContext + + +folder_name = "tests/acceptance/great_expectations" + +context = DataContext(folder_name) + +checkpoints_dir = os.path.join(folder_name, "checkpoints") + +for filename in os.listdir(checkpoints_dir): + + if filename.endswith(".yml"): + checkpoint_name = os.path.splitext(filename)[0] + checkpoint_path = os.path.join(checkpoints_dir, filename) + + context.run_checkpoint(checkpoint_name=checkpoint_name) diff --git a/e2e-python/files/utils/json2JUnit.py b/e2e-python/files/utils/json2JUnit.py new file mode 100644 index 000000000..7b80a04ae --- /dev/null +++ b/e2e-python/files/utils/json2JUnit.py @@ -0,0 +1,75 @@ +import json +import glob +import xml.etree.ElementTree as ET +import xml.dom.minidom as minidom +import datetime +import os + + +folder_name = "tests/acceptance/great_expectations" +output_path = folder_name + "/uncommitted/validations/junit.xml" +jsons_location_path = folder_name + "/uncommitted/validations/**/*.json" + +json_files = glob.glob(jsons_location_path, recursive=True) + +root = ET.Element("testsuites", name="GreatExpectations") + +total_tests = 0 +total_failures = 0 + +for json_file_path in json_files: + + with open(json_file_path, "r") as json_file: + data = json.load(json_file) + + validation_time = datetime.datetime.strptime(data["meta"]["validation_time"], "%Y%m%dT%H%M%S.%fZ") + ge_load_time = datetime.datetime.strptime(data["meta"]["batch_markers"]["ge_load_time"], "%Y%m%dT%H%M%S.%fZ") + execution_time = validation_time - ge_load_time + + failures_checkpoint = 0 + if data['statistics']['unsuccessful_expectations'] > 0: + failures_checkpoint = 1 + + testsuite = ET.SubElement( + root, "testsuite", + # id=data["meta"]["run_id"]["run_name"], --Not necessary for now + name=data["meta"]["checkpoint_name"], + tests="1", + failures=str(failures_checkpoint), + time=str(execution_time.total_seconds()) + ) + + total_tests += 1 + if data["statistics"]["unsuccessful_expectations"] > 0: + total_failures += 1 + + testcase = ET.SubElement( + testsuite, + "testcase", + name=data["meta"]["checkpoint_name"], + evaluated_expectations=str(data['statistics']['evaluated_expectations']), + successful_expectations=str(data['statistics']['successful_expectations']), + unsuccessful_expectations=str(data['statistics']['unsuccessful_expectations']), + log=data["results"] + ) + + for idx, result in enumerate(data["results"], start=1): + + if not result["success"]: + exception_message = str(result["exception_info"]["exception_message"] if result["exception_info"][ + "raised_exception"] else None) + expectation_config = str(result["expectation_config"]) + observed_vaue = str(result["result"]) + failure = ET.SubElement( + testcase, + "failure", + message=exception_message + expectation_config + observed_vaue + ) + failure.text = exception_message + expectation_config + observed_vaue + +root.set("tests", str(total_tests)) +root.set("failures", str(total_failures)) +tree = ET.ElementTree(root) + +with open(output_path, 'wb') as f: + f.write(minidom.parseString(ET.tostring(root)).toprettyxml(encoding="utf-8")) diff --git a/e2e-python/files/variables.tf b/e2e-python/files/variables.tf new file mode 100644 index 000000000..09f01e0f3 --- /dev/null +++ b/e2e-python/files/variables.tf @@ -0,0 +1,76 @@ +# ----------------------------------------------------------------------------- +# ENVIRONMENT VARIABLES +# This stack supports the following secrets as environment variables. +# ----------------------------------------------------------------------------- + +# AWS_ACCESS_KEY_ID +# AWS_SECRET_ACCESS_KEY +# AWS_DEFAULT_REGION + +# ----------------------------------------------------------------------------- +# REQUIRED PARAMETERS +# The following parameters require a value. +# Documentation: https://www.terraform.io/docs/configuration/variables.html +# ----------------------------------------------------------------------------- + +# ----------------------------------------------------------------------------- +# OPTIONAL PARAMETERS +# The following parameters are optional with sensible defaults. +# Documentation: https://www.terraform.io/docs/configuration/variables.html +# ----------------------------------------------------------------------------- + +variable "name" { + description = "The name of the stack." + type = string + default = "stack-aws-quickstarter" +} + +variable "meta_environment" { + description = "The type of the environment. Can be any of DEVELOPMENT, EVALUATION, PRODUCTIVE, QUALITYASSURANCE, TRAINING, VALIDATION." + type = string + default = "DEVELOPMENT" +} + + +/* +# CodeBuild +variable "build_project_name" {} + +# CodePipeline +variable "codepipeline_name" {} + +# iam_roles +variable "pipeline_role_name" {} +variable "codebuild_role_name" {} +variable "codepipeline_policy_name" {} +variable "codebuild_policy_name" {} + +# s3 +variable "codepipeline_bucket_name" {} +variable "e2e_results_bucket_name" {} +variable "bitbucket_source_bucket_name" {} +*/ + +variable "projectId" { + description = "EDP project name" + type = string + default = "testpg" +} + +variable "environment" { + description = "The project execution environment." + type = string + default = "dev" +} + +variable "repository" { + description = "QS bitbucket repository" + type = string + default = "e2e-python" +} + +variable "branch_name" { + description = "repository branch_name" + type = string + default = "master" +} diff --git a/e2e-python/files/versions.tf b/e2e-python/files/versions.tf new file mode 100644 index 000000000..88efbc759 --- /dev/null +++ b/e2e-python/files/versions.tf @@ -0,0 +1,13 @@ +terraform { + required_version = ">= 1.0" + required_providers { + aws = { + source = "hashicorp/aws" + version = "4.67.0" + } + random = { + source = "hashicorp/random" + version = "3.5.1" + } + } +} diff --git a/e2e-python/prod.yml.template b/e2e-python/prod.yml.template new file mode 100644 index 000000000..b9d192b92 --- /dev/null +++ b/e2e-python/prod.yml.template @@ -0,0 +1,7 @@ +region: eu-west-1 + +credentials: + key: @project_id@-cd-aws-access-key-id-prod + secret: @project_id@-cd-aws-secret-access-key-prod + +account: "" diff --git a/e2e-python/test.yml.template b/e2e-python/test.yml.template new file mode 100644 index 000000000..0f6413d32 --- /dev/null +++ b/e2e-python/test.yml.template @@ -0,0 +1,8 @@ +region: eu-west-1 + +credentials: + key: @project_id@-cd-aws-access-key-id-test + secret: @project_id@-cd-aws-secret-access-key-test + +account: "" + diff --git a/e2e-python/testdata/golden/jenkins-build-stages.json b/e2e-python/testdata/golden/jenkins-build-stages.json new file mode 100644 index 000000000..32f609fbe --- /dev/null +++ b/e2e-python/testdata/golden/jenkins-build-stages.json @@ -0,0 +1,18 @@ +[ + { + "stage": "odsPipeline start", + "status": "SUCCESS" + }, + { + "stage": "Integration Test", + "status": "SUCCESS" + }, + { + "stage": "SonarQube Analysis", + "status": "SUCCESS" + }, + { + "stage": "odsPipeline finished", + "status": "SUCCESS" + } +] \ No newline at end of file diff --git a/e2e-python/testdata/golden/jenkins-provision-stages.json b/e2e-python/testdata/golden/jenkins-provision-stages.json new file mode 100644 index 000000000..9e9ae3cb8 --- /dev/null +++ b/e2e-python/testdata/golden/jenkins-provision-stages.json @@ -0,0 +1,26 @@ +[ + { + "stage": "Checkout quickstarter", + "status": "SUCCESS" + }, + { + "stage": "Initialize output directory", + "status": "SUCCESS" + }, + { + "stage": "Copy files from quickstarter", + "status": "SUCCESS" + }, + { + "stage": "Create Jenkinsfile", + "status": "SUCCESS" + }, + { + "stage": "Create sonar-project.properties", + "status": "SUCCESS" + }, + { + "stage": "Push to remote", + "status": "SUCCESS" + } +] diff --git a/e2e-python/testdata/golden/sonar-scan.json b/e2e-python/testdata/golden/sonar-scan.json new file mode 100644 index 000000000..22413e7c9 --- /dev/null +++ b/e2e-python/testdata/golden/sonar-scan.json @@ -0,0 +1,30 @@ +{ + "key": "{{.ProjectID}}-{{.ComponentID}}", + "name": "{{.ProjectID}}-{{.ComponentID}}", + "isFavorite": false, + "visibility": "public", + "extensions": [], + "qualityProfiles": [ + { + "name": "{{.SonarQualityProfile}}", + "language": "js", + "deleted": false + }, + { + "name": "{{.SonarQualityProfile}}", + "language": "ts", + "deleted": false + } + ], + "qualityGate": { + "name": "Sonar way", + "isDefault": true + }, + "breadcrumbs": [ + { + "key": "{{.ProjectID}}-{{.ComponentID}}", + "name": "{{.ProjectID}}-{{.ComponentID}}", + "qualifier": "TRK" + } + ] +} \ No newline at end of file diff --git a/e2e-python/testdata/steps.yml b/e2e-python/testdata/steps.yml new file mode 100644 index 000000000..0b5433255 --- /dev/null +++ b/e2e-python/testdata/steps.yml @@ -0,0 +1,15 @@ +componentID: e2e-python +steps: +- type: provision + provisionParams: + verify: + jenkinsStages: golden/jenkins-provision-stages.json +- type: build + buildParams: + verify: + jenkinsStages: golden/jenkins-build-stages.json + sonarScan: golden/sonar-scan.json + runAttachments: + - SCRR-{{.ProjectID}}-{{.ComponentID}}.docx + - SCRR-{{.ProjectID}}-{{.ComponentID}}.md + testResults: 2 diff --git a/e2e-python/testing.yml.template b/e2e-python/testing.yml.template new file mode 100644 index 000000000..8c93f3dcc --- /dev/null +++ b/e2e-python/testing.yml.template @@ -0,0 +1,5 @@ +region: eu-west-1 + +credentials: + key: @project_id@-cd-aws-access-key-id-testing + secret: @project_id@-cd-aws-secret-access-key-testing From cc4059be103f4e916423551d73e6634576c52a10 Mon Sep 17 00:00:00 2001 From: perezpec Date: Wed, 17 Jan 2024 15:54:36 +0100 Subject: [PATCH 08/32] use functional pre/post_requistes.py in demo tests --- .../great_expectations/test_preparation/post_requisites.py | 4 ++-- .../great_expectations/test_preparation/pre_requisites.py | 4 ++-- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/e2e-python/files/tests/acceptance/great_expectations/test_preparation/post_requisites.py b/e2e-python/files/tests/acceptance/great_expectations/test_preparation/post_requisites.py index b36c0e2a2..06bad2882 100644 --- a/e2e-python/files/tests/acceptance/great_expectations/test_preparation/post_requisites.py +++ b/e2e-python/files/tests/acceptance/great_expectations/test_preparation/post_requisites.py @@ -13,7 +13,7 @@ def delete_test_database(): client = boto3.client('athena', region_name=aws_region) q_delete_address_table = "DROP TABLE IF EXISTS address" q_delete_person_table = "DROP TABLE IF EXISTS person" - q_delete_db = "DROP DATABASE IF EXISTS greatexpectationsdbtest" + q_delete_db = "DROP DATABASE IF EXISTS greatexpectationsdb" execute_query(client, q_delete_address_table) execute_query(client, q_delete_person_table) execute_query(client, q_delete_db) @@ -22,7 +22,7 @@ def execute_query(client, query): response = client.start_query_execution( QueryString=query, QueryExecutionContext={ - 'Database': 'greatexpectationsdbtest' + 'Database': 'greatexpectationsdb' }, ResultConfiguration={ 'OutputLocation': 's3://gxdbtests3/db_test_outputs/', diff --git a/e2e-python/files/tests/acceptance/great_expectations/test_preparation/pre_requisites.py b/e2e-python/files/tests/acceptance/great_expectations/test_preparation/pre_requisites.py index 517e312f2..25a4c83d9 100644 --- a/e2e-python/files/tests/acceptance/great_expectations/test_preparation/pre_requisites.py +++ b/e2e-python/files/tests/acceptance/great_expectations/test_preparation/pre_requisites.py @@ -20,7 +20,7 @@ def setup_test_database(): def create_database(client): - query = "CREATE DATABASE greatexpectationsdbtest" + query = "CREATE DATABASE greatexpectationsdb" response = client.start_query_execution( QueryString=query, ResultConfiguration={ @@ -127,7 +127,7 @@ def execute_query(client, query): response = client.start_query_execution( QueryString=query, QueryExecutionContext={ - 'Database': 'greatexpectationsdbtest' + 'Database': 'greatexpectationsdb' }, ResultConfiguration={ 'OutputLocation': 's3://gxdbtests3/db_test_outputs/', From bf4a0e689c91d130b3b29856e8205d0e2585fb57 Mon Sep 17 00:00:00 2001 From: perezpec Date: Thu, 18 Jan 2024 09:47:17 +0100 Subject: [PATCH 09/32] remove person expectation expect_table_row_count_to_equal --- .../expectations/person_validation_suite.json | 7 ------- 1 file changed, 7 deletions(-) diff --git a/e2e-python/files/tests/acceptance/great_expectations/expectations/person_validation_suite.json b/e2e-python/files/tests/acceptance/great_expectations/expectations/person_validation_suite.json index 65b5a46f5..db8ea030a 100644 --- a/e2e-python/files/tests/acceptance/great_expectations/expectations/person_validation_suite.json +++ b/e2e-python/files/tests/acceptance/great_expectations/expectations/person_validation_suite.json @@ -13,13 +13,6 @@ }, "meta": {} }, - { - "expectation_type": "expect_table_row_count_to_equal", - "kwargs": { - "value": 3 - }, - "meta": {} - }, { "expectation_type": "expect_column_value_lengths_to_be_between", "kwargs": { From 8f30f5d2e011efcfe212b375b3ba5d38ea8354d0 Mon Sep 17 00:00:00 2001 From: Your Name Date: Tue, 23 Jan 2024 15:15:33 +0100 Subject: [PATCH 10/32] 1. Sample tests working 2. json2JUnit.py escaping characters --- .../great_expectations/great_expectations.yml | 52 +++---- .../test_preparation/post_requisites.py | 25 ++- .../test_preparation/pre_requisites.py | 144 +++++++++++------- e2e-python/files/utils/json2JUnit.py | 26 ++-- 4 files changed, 148 insertions(+), 99 deletions(-) diff --git a/e2e-python/files/tests/acceptance/great_expectations/great_expectations.yml b/e2e-python/files/tests/acceptance/great_expectations/great_expectations.yml index ed499dbdf..642b55d73 100644 --- a/e2e-python/files/tests/acceptance/great_expectations/great_expectations.yml +++ b/e2e-python/files/tests/acceptance/great_expectations/great_expectations.yml @@ -1,4 +1,3 @@ - # Welcome to Great Expectations! Always know what to expect from your data. # # Here you can define datasources, batch kwargs generators, integrations and @@ -9,52 +8,33 @@ # config_version refers to the syntactic version of this config file, and is used in maintaining backwards compatibility # It is auto-generated and usually does not need to be changed. -config_version: 3 +config_version: 3.0 # Datasources tell Great Expectations where your data lives and how to get it. # You can use the CLI command `great_expectations datasource new` to help you # add a new datasource. Read more at https://docs.greatexpectations.io/docs/guides/connecting_to_your_data/connect_to_data_overview datasources: AWS-Athena-datasource: + class_name: Datasource module_name: great_expectations.datasource execution_engine: class_name: SqlAlchemyExecutionEngine module_name: great_expectations.execution_engine - connection_string: awsathena+rest://:@athena.eu-west-1.amazonaws.com:443/greatexpectationsdb?s3_staging_dir=s3://greatexpectationss3/ - class_name: Datasource + connection_string: ${connection_string} data_connectors: default_configured_data_connector_name: - module_name: great_expectations.datasource.data_connector + name: default_configured_data_connector_name class_name: ConfiguredAssetSqlDataConnector + module_name: great_expectations.datasource.data_connector assets: address: - module_name: great_expectations.datasource.data_connector.asset class_name: Asset + module_name: great_expectations.datasource.data_connector.asset schema_name: greatexpectationsdb person: - module_name: great_expectations.datasource.data_connector.asset class_name: Asset + module_name: great_expectations.datasource.data_connector.asset schema_name: greatexpectationsdb - name: default_configured_data_connector_name - -# This config file supports variable substitution which enables: 1) keeping -# secrets out of source control & 2) environment-based configuration changes -# such as staging vs prod. -# -# When GX encounters substitution syntax (like `my_key: ${my_value}` or -# `my_key: $my_value`) in the great_expectations.yml file, it will attempt -# to replace the value of `my_key` with the value from an environment -# variable `my_value` or a corresponding key read from this config file, -# which is defined through the `config_variables_file_path`. -# Environment variables take precedence over variables defined here. -# -# Substitution values defined here can be a simple (non-nested) value, -# nested value such as a dictionary, or an environment variable (i.e. ${ENV_VAR}) -# -# -# https://docs.greatexpectations.io/docs/guides/setup/configuring_data_contexts/how_to_configure_credentials - - config_variables_file_path: uncommitted/config_variables.yml # The plugins_directory will be added to your python path for custom modules @@ -82,10 +62,7 @@ stores: base_directory: uncommitted/validations/ evaluation_parameter_store: - # Evaluation Parameters enable dynamic expectations. Read more here: - # https://docs.greatexpectations.io/docs/reference/evaluation_parameters/ class_name: EvaluationParameterStore - checkpoint_store: class_name: CheckpointStore store_backend: @@ -112,13 +89,18 @@ data_docs_sites: # profiles from the uncommitted directory. Read more at https://docs.greatexpectations.io/docs/terms/data_docs local_site: class_name: SiteBuilder - # set to false to hide how-to buttons in Data Docs show_how_to_buttons: true store_backend: - class_name: TupleFilesystemStoreBackend - base_directory: uncommitted/data_docs/local_site/ + class_name: TupleFilesystemStoreBackend + base_directory: uncommitted/data_docs/local_site/ site_index_builder: - class_name: DefaultSiteIndexBuilder + class_name: DefaultSiteIndexBuilder anonymous_usage_statistics: - enabled: True + data_context_id: caaa9cd1-4ad3-47ad-b01c-614ea6ad8e89 + enabled: true +notebooks: +include_rendered_content: + globally: false + expectation_suite: false + expectation_validation_result: false diff --git a/e2e-python/files/tests/acceptance/great_expectations/test_preparation/post_requisites.py b/e2e-python/files/tests/acceptance/great_expectations/test_preparation/post_requisites.py index 06bad2882..9e9404474 100644 --- a/e2e-python/files/tests/acceptance/great_expectations/test_preparation/post_requisites.py +++ b/e2e-python/files/tests/acceptance/great_expectations/test_preparation/post_requisites.py @@ -1,6 +1,14 @@ import boto3 import json +""" + +This is an example of what you could do as a post-requisite. In the pre_requistie.py we deployed the athena dabase that would be tested. +Now, in this post_requisite.py we delete the database and the tables inside of it. + +""" + + def get_terraform_outputs(): with open('terraform_outputs.json') as file: output_json = json.load(file) @@ -19,20 +27,35 @@ def delete_test_database(): execute_query(client, q_delete_db) def execute_query(client, query): + tf_outputs = get_terraform_outputs() + bucket_name = tf_outputs["bitbucket_s3bucket_name"]["value"] response = client.start_query_execution( QueryString=query, QueryExecutionContext={ 'Database': 'greatexpectationsdb' }, ResultConfiguration={ - 'OutputLocation': 's3://gxdbtests3/db_test_outputs/', + 'OutputLocation': f's3://{bucket_name}/db_test_outputs/', } ) print("Query execution ID: ", response['QueryExecutionId']) +def remove_unnecesarry_objects_s3src(): + tf_outputs = get_terraform_outputs() + bucket_name = tf_outputs["bitbucket_s3bucket_name"]["value"] + s3 = boto3.resource('s3') + bucket = s3.Bucket(bucket_name) + for obj in bucket.objects.all(): + if not obj.key.endswith('.zip'): + obj.delete() + + + + def main(): delete_test_database() + remove_unnecesarry_objects_s3src() if __name__ == "__main__": diff --git a/e2e-python/files/tests/acceptance/great_expectations/test_preparation/pre_requisites.py b/e2e-python/files/tests/acceptance/great_expectations/test_preparation/pre_requisites.py index 25a4c83d9..74e224a58 100644 --- a/e2e-python/files/tests/acceptance/great_expectations/test_preparation/pre_requisites.py +++ b/e2e-python/files/tests/acceptance/great_expectations/test_preparation/pre_requisites.py @@ -1,37 +1,54 @@ import os import boto3 import json +from pathlib import Path + +""" + +This is an example of what you could do as a pre-requisite before executing your great expectations tests, in this example we deploy an athena database +with two tables: person and address. + +In the post_requisite.py we will delete this athena database + +""" + + + def get_terraform_outputs(): - with open('terraform_outputs.json') as file: - output_json = json.load(file) - - return output_json - -def setup_test_database(): - tf_outputs = get_terraform_outputs() - aws_region = tf_outputs["aws_region"]["value"] - client = boto3.client('athena', region_name=aws_region) - created = create_database(client) - if created: - address_table_creation_query(client) - person_table_creation_query(client) - - -def create_database(client): - - query = "CREATE DATABASE greatexpectationsdb" - response = client.start_query_execution( - QueryString=query, - ResultConfiguration={ - 'OutputLocation': 's3://gxdbtests3/db_test_outputs/', - } - ) - print('Database created.') - return 1 - -def address_table_creation_query(client): - query = """ + with open('terraform_outputs.json') as file: + output_json = json.load(file) + return output_json + + +def setup_test_database(tf_outputs): + aws_region = tf_outputs["aws_region"]["value"] + # Create Athena tables + client = boto3.client('athena', region_name=aws_region) + created = create_database(client, tf_outputs) + if created: + address_table_creation_query(client, tf_outputs) + person_table_creation_query(client, tf_outputs) + + +def create_database(client, tf_outputs): + bucket_name = tf_outputs["bitbucket_s3bucket_name"]["value"] + + query = "CREATE DATABASE greatexpectationsdb" + response = client.start_query_execution( + QueryString=query, + ResultConfiguration={ + 'OutputLocation': f's3://{bucket_name}/db_test_outputs/', + } + ) + print('Database created.') + return 1 + + +def address_table_creation_query(client, tf_outputs): + bucket_name = tf_outputs["bitbucket_s3bucket_name"]["value"] + formated_string = "'spark.sql.sources.schema.part.0'='{\"type\":\"struct\",\"fields\":[{\"name\":\"_hoodie_commit_time\",\"type\":\"string\",\"nullable\":true,\"metadata\":{}},{\"name\":\"_hoodie_commit_seqno\",\"type\":\"string\",\"nullable\":true,\"metadata\":{}},{\"name\":\"_hoodie_record_key\",\"type\":\"string\",\"nullable\":true,\"metadata\":{}},{\"name\":\"_hoodie_partition_path\",\"type\":\"string\",\"nullable\":true,\"metadata\":{}},{\"name\":\"_hoodie_file_name\",\"type\":\"string\",\"nullable\":true,\"metadata\":{}},{\"name\":\"address_id\",\"type\":\"decimal(10,0)\",\"nullable\":true,\"metadata\":{}},{\"name\":\"address_line_1\",\"type\":\"string\",\"nullable\":true,\"metadata\":{}},{\"name\":\"address_line_2\",\"type\":\"string\",\"nullable\":true,\"metadata\":{}},{\"name\":\"address_line_3\",\"type\":\"string\",\"nullable\":true,\"metadata\":{}},{\"name\":\"address_line_4\",\"type\":\"string\",\"nullable\":true,\"metadata\":{}},{\"name\":\"address_owner_key_1\",\"type\":\"string\",\"nullable\":true,\"metadata\":{}},{\"name\":\"address_owner_key_2\",\"type\":\"string\",\"nullable\":true,\"metadata\":{}},{\"name\":\"address_owner_key_3\",\"type\":\"string\",\"nullable\":true,\"metadata\":{}},{\"name\":\"address_owner_key_4\",\"type\":\"string\",\"nullable\":true,\"metadata\":{}},{\"name\":\"address_owner_key_5\",\"type\":\"string\",\"nullable\":true,\"metadata\":{}},{\"name\":\"address_type_code\",\"type\":\"decimal(6,0)\",\"nullable\":true,\"metadata\":{}},{\"name\":\"country_code\",\"type\":\"string\",\"nullable\":true,\"metadata\":{}},{\"name\":\"last_updated_date\",\"type\":\"timestamp\",\"nullable\":true,\"metadata\":{}},{\"name\":\"owner\",\"type\":\"string\",\"nullable\":true,\"metadata\":{}},{\"name\":\"post_zip_code\",\"type\":\"string\",\"nullable\":true,\"metadata\":{}},{\"name\":\"primary_address_flag\",\"type\":\"string\",\"nullable\":true,\"metadata\":{}},{\"name\":\"province_county\",\"type\":\"string\",\"nullable\":true,\"metadata\":{}},{\"name\":\"province_county_code\",\"type\":\"string\",\"nullable\":true,\"metadata\":{}},{\"name\":\"table_short_name\",\"type\":\"string\",\"nullable\":true,\"metadata\":{}},{\"name\":\"town_city\",\"type\":\"string\",\"nullable\":true,\"metadata\":{}},{\"name\":\"updated_during_mon_visit_by\",\"type\":\"decimal(6,0)\",\"nullable\":true,\"metadata\":{}},{\"name\":\"update_count\",\"type\":\"decimal(8,0)\",\"nullable\":true,\"metadata\":{}},{\"name\":\"aud_action_flag\",\"type\":\"string\",\"nullable\":true,\"metadata\":{}},{\"name\":\"aud_date_changed\",\"type\":\"timestamp\",\"nullable\":true,\"metadata\":{}},{\"name\":\"aud_personnel_no\",\"type\":\"decimal(6,0)\",\"nullable\":true,\"metadata\":{}},{\"name\":\"time_zone_offset\",\"type\":\"decimal(4,2)\",\"nullable\":true,\"metadata\":{}},{\"name\":\"transaction_no\",\"type\":\"decimal(38,0)\",\"nullable\":true,\"metadata\":{}},{\"name\":\"ingestion_timestamp\",\"type\":\"long\",\"nullable\":true,\"metadata\":{}},{\"name\":\"pr_tab_hist_hkey\",\"type\":\"string\",\"nullable\":true,\"metadata\":{}},{\"name\":\"pr_tab_hkey\",\"type\":\"string\",\"nullable\":true,\"metadata\":{}},{\"name\":\"audit_id\",\"type\":\"string\",\"nullable\":true,\"metadata\":{}},{\"name\":\"audit_task_id\",\"type\":\"string\",\"nullable\":true,\"metadata\":{}},{\"name\":\"int_tec_from_dt\",\"type\":\"date\",\"nullable\":true,\"metadata\":{}},{\"name\":\"int_tec_to_dt\",\"type\":\"date\",\"nullable\":true,\"metadata\":{}},{\"name\":\"curr_flg\",\"type\":\"integer\",\"nullable\":true,\"metadata\":{}},{\"name\":\"del_flg\",\"type\":\"integer\",\"nullable\":true,\"metadata\":{}},{\"name\":\"moduleKey\",\"type\":\"long\",\"nullable\":true,\"metadata\":{}}]}'," + query = f""" CREATE EXTERNAL TABLE IF NOT EXISTS `address`( `_hoodie_commit_time` string COMMENT '', `_hoodie_commit_seqno` string COMMENT '', @@ -80,27 +97,30 @@ def address_table_creation_query(client): 'org.apache.hadoop.hive.ql.io.parquet.serde.ParquetHiveSerDe' WITH SERDEPROPERTIES ( 'hoodie.query.as.ro.table'='false', - 'path'='s3://gxdbtests3/clean/address') + 'path'='s3://{bucket_name}/clean/address') STORED AS INPUTFORMAT 'org.apache.hudi.hadoop.HoodieParquetInputFormat' OUTPUTFORMAT 'org.apache.hadoop.hive.ql.io.parquet.MapredParquetOutputFormat' LOCATION - 's3://gxdbtests3/clean/address' + 's3://{bucket_name}/clean/address' TBLPROPERTIES ( 'last_commit_time_sync'='20230725080610371', 'spark.sql.create.version'='3.3.0-amzn-1', 'spark.sql.sources.provider'='hudi', 'spark.sql.sources.schema.numPartCols'='1', 'spark.sql.sources.schema.numParts'='1', - 'spark.sql.sources.schema.part.0'='{\"type\":\"struct\",\"fields\":[{\"name\":\"_hoodie_commit_time\",\"type\":\"string\",\"nullable\":true,\"metadata\":{}},{\"name\":\"_hoodie_commit_seqno\",\"type\":\"string\",\"nullable\":true,\"metadata\":{}},{\"name\":\"_hoodie_record_key\",\"type\":\"string\",\"nullable\":true,\"metadata\":{}},{\"name\":\"_hoodie_partition_path\",\"type\":\"string\",\"nullable\":true,\"metadata\":{}},{\"name\":\"_hoodie_file_name\",\"type\":\"string\",\"nullable\":true,\"metadata\":{}},{\"name\":\"address_id\",\"type\":\"decimal(10,0)\",\"nullable\":true,\"metadata\":{}},{\"name\":\"address_line_1\",\"type\":\"string\",\"nullable\":true,\"metadata\":{}},{\"name\":\"address_line_2\",\"type\":\"string\",\"nullable\":true,\"metadata\":{}},{\"name\":\"address_line_3\",\"type\":\"string\",\"nullable\":true,\"metadata\":{}},{\"name\":\"address_line_4\",\"type\":\"string\",\"nullable\":true,\"metadata\":{}},{\"name\":\"address_owner_key_1\",\"type\":\"string\",\"nullable\":true,\"metadata\":{}},{\"name\":\"address_owner_key_2\",\"type\":\"string\",\"nullable\":true,\"metadata\":{}},{\"name\":\"address_owner_key_3\",\"type\":\"string\",\"nullable\":true,\"metadata\":{}},{\"name\":\"address_owner_key_4\",\"type\":\"string\",\"nullable\":true,\"metadata\":{}},{\"name\":\"address_owner_key_5\",\"type\":\"string\",\"nullable\":true,\"metadata\":{}},{\"name\":\"address_type_code\",\"type\":\"decimal(6,0)\",\"nullable\":true,\"metadata\":{}},{\"name\":\"country_code\",\"type\":\"string\",\"nullable\":true,\"metadata\":{}},{\"name\":\"last_updated_date\",\"type\":\"timestamp\",\"nullable\":true,\"metadata\":{}},{\"name\":\"owner\",\"type\":\"string\",\"nullable\":true,\"metadata\":{}},{\"name\":\"post_zip_code\",\"type\":\"string\",\"nullable\":true,\"metadata\":{}},{\"name\":\"primary_address_flag\",\"type\":\"string\",\"nullable\":true,\"metadata\":{}},{\"name\":\"province_county\",\"type\":\"string\",\"nullable\":true,\"metadata\":{}},{\"name\":\"province_county_code\",\"type\":\"string\",\"nullable\":true,\"metadata\":{}},{\"name\":\"table_short_name\",\"type\":\"string\",\"nullable\":true,\"metadata\":{}},{\"name\":\"town_city\",\"type\":\"string\",\"nullable\":true,\"metadata\":{}},{\"name\":\"updated_during_mon_visit_by\",\"type\":\"decimal(6,0)\",\"nullable\":true,\"metadata\":{}},{\"name\":\"update_count\",\"type\":\"decimal(8,0)\",\"nullable\":true,\"metadata\":{}},{\"name\":\"aud_action_flag\",\"type\":\"string\",\"nullable\":true,\"metadata\":{}},{\"name\":\"aud_date_changed\",\"type\":\"timestamp\",\"nullable\":true,\"metadata\":{}},{\"name\":\"aud_personnel_no\",\"type\":\"decimal(6,0)\",\"nullable\":true,\"metadata\":{}},{\"name\":\"time_zone_offset\",\"type\":\"decimal(4,2)\",\"nullable\":true,\"metadata\":{}},{\"name\":\"transaction_no\",\"type\":\"decimal(38,0)\",\"nullable\":true,\"metadata\":{}},{\"name\":\"ingestion_timestamp\",\"type\":\"long\",\"nullable\":true,\"metadata\":{}},{\"name\":\"pr_tab_hist_hkey\",\"type\":\"string\",\"nullable\":true,\"metadata\":{}},{\"name\":\"pr_tab_hkey\",\"type\":\"string\",\"nullable\":true,\"metadata\":{}},{\"name\":\"audit_id\",\"type\":\"string\",\"nullable\":true,\"metadata\":{}},{\"name\":\"audit_task_id\",\"type\":\"string\",\"nullable\":true,\"metadata\":{}},{\"name\":\"int_tec_from_dt\",\"type\":\"date\",\"nullable\":true,\"metadata\":{}},{\"name\":\"int_tec_to_dt\",\"type\":\"date\",\"nullable\":true,\"metadata\":{}},{\"name\":\"curr_flg\",\"type\":\"integer\",\"nullable\":true,\"metadata\":{}},{\"name\":\"del_flg\",\"type\":\"integer\",\"nullable\":true,\"metadata\":{}},{\"name\":\"moduleKey\",\"type\":\"long\",\"nullable\":true,\"metadata\":{}}]}', + {formated_string} 'spark.sql.sources.schema.partCol.0'='moduleKey', 'transient_lastDdlTime'='1690272434') """ - execute_query(client, query) + execute_query(client, query, tf_outputs) -def person_table_creation_query(client): - query = """ + +def person_table_creation_query(client, tf_outputs): + bucket_name = tf_outputs["bitbucket_s3bucket_name"]["value"] + + query = f""" CREATE EXTERNAL TABLE IF NOT EXISTS person ( name VARCHAR(50), surname VARCHAR(50), @@ -109,35 +129,57 @@ def person_table_creation_query(client): ) ROW FORMAT DELIMITED FIELDS TERMINATED BY ',' - LOCATION 's3://gxdbtests3/clean/person'; + LOCATION 's3://{bucket_name}/clean/person'; """ - execute_query(client, query) + execute_query(client, query, tf_outputs) - query_insert_data = """ + query_insert_data = """ INSERT INTO person (name, surname, age, location) VALUES ('John', 'Doe', 25, 'New York'), ('Jane', 'Smith', 32, 'London'), ('Michael', 'Johnson', 45, 'Berlin'); """ - execute_query(client, query_insert_data) + execute_query(client, query_insert_data, tf_outputs) + + +def execute_query(client, query, tf_outputs): + bucket_name = tf_outputs["bitbucket_s3bucket_name"]["value"] + response = client.start_query_execution( + QueryString=query, + QueryExecutionContext={ + 'Database': 'greatexpectationsdb' + }, + ResultConfiguration={ + 'OutputLocation': f's3://{bucket_name}/db_test_outputs/', + } + ) + print("Query execution ID: ", response['QueryExecutionId']) + + + + +def setup_config_yml(tf_outputs): + bucket_name = tf_outputs["bitbucket_s3bucket_name"]["value"] + aws_region = tf_outputs["aws_region"]["value"] + # Create 'uncommitted' directory if it doesn't exist + uncommitted_path = Path('tests/acceptance/great_expectations/uncommitted') + uncommitted_path.mkdir(parents=True, exist_ok=True) -def execute_query(client, query): - response = client.start_query_execution( - QueryString=query, - QueryExecutionContext={ - 'Database': 'greatexpectationsdb' - }, - ResultConfiguration={ - 'OutputLocation': 's3://gxdbtests3/db_test_outputs/', - } - ) - print("Query execution ID: ", response['QueryExecutionId']) + # Write environment variables to config_variables.yml + config_file_path = uncommitted_path / 'config_variables.yml' + # Write environment variables to config_variables.yml + with open(config_file_path, 'w') as config_file: + connection_string = f"awsathena+rest://@athena.{aws_region}.amazonaws.com:443/greatexpectationsdb?s3_staging_dir=s3://{bucket_name}/great_expectations" + config_file.write(f"connection_string: {connection_string}\n") + print("Config yml setted") def main(): - setup_test_database() + tf_outputs = get_terraform_outputs() + setup_test_database(tf_outputs) + setup_config_yml(tf_outputs) if __name__ == "__main__": main() diff --git a/e2e-python/files/utils/json2JUnit.py b/e2e-python/files/utils/json2JUnit.py index 7b80a04ae..d4be2921e 100644 --- a/e2e-python/files/utils/json2JUnit.py +++ b/e2e-python/files/utils/json2JUnit.py @@ -1,10 +1,8 @@ import json import glob import xml.etree.ElementTree as ET -import xml.dom.minidom as minidom import datetime -import os - +import xml.sax.saxutils as saxutils folder_name = "tests/acceptance/great_expectations" output_path = folder_name + "/uncommitted/validations/junit.xml" @@ -17,6 +15,10 @@ total_tests = 0 total_failures = 0 +def escape_string(data): + json_str = json.dumps(data) + return saxutils.escape(json_str) + for json_file_path in json_files: with open(json_file_path, "r") as json_file: @@ -46,20 +48,20 @@ testcase = ET.SubElement( testsuite, "testcase", - name=data["meta"]["checkpoint_name"], - evaluated_expectations=str(data['statistics']['evaluated_expectations']), - successful_expectations=str(data['statistics']['successful_expectations']), - unsuccessful_expectations=str(data['statistics']['unsuccessful_expectations']), - log=data["results"] + name=escape_string(data["meta"]["checkpoint_name"]), + evaluated_expectations=escape_string(data['statistics']['evaluated_expectations']), + successful_expectations=escape_string(data['statistics']['successful_expectations']), + unsuccessful_expectations=escape_string(data['statistics']['unsuccessful_expectations']), + log=escape_string(data["results"]) ) for idx, result in enumerate(data["results"], start=1): if not result["success"]: - exception_message = str(result["exception_info"]["exception_message"] if result["exception_info"][ + exception_message = str(escape_string(result["exception_info"]["exception_message"]) if result["exception_info"][ "raised_exception"] else None) - expectation_config = str(result["expectation_config"]) - observed_vaue = str(result["result"]) + expectation_config = escape_string(result["expectation_config"]) + observed_vaue = escape_string(result["result"]) failure = ET.SubElement( testcase, "failure", @@ -72,4 +74,4 @@ tree = ET.ElementTree(root) with open(output_path, 'wb') as f: - f.write(minidom.parseString(ET.tostring(root)).toprettyxml(encoding="utf-8")) + tree.write(f, encoding="utf-8", xml_declaration=True) From f7a3e15968c0a2bc5aa0c1964f321ab022c7a60e Mon Sep 17 00:00:00 2001 From: Your Name Date: Wed, 24 Jan 2024 10:04:50 +0100 Subject: [PATCH 11/32] remove escaping of testcase name --- e2e-python/files/utils/json2JUnit.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/e2e-python/files/utils/json2JUnit.py b/e2e-python/files/utils/json2JUnit.py index d4be2921e..df679adde 100644 --- a/e2e-python/files/utils/json2JUnit.py +++ b/e2e-python/files/utils/json2JUnit.py @@ -48,7 +48,7 @@ def escape_string(data): testcase = ET.SubElement( testsuite, "testcase", - name=escape_string(data["meta"]["checkpoint_name"]), + name=data["meta"]["checkpoint_name"], evaluated_expectations=escape_string(data['statistics']['evaluated_expectations']), successful_expectations=escape_string(data['statistics']['successful_expectations']), unsuccessful_expectations=escape_string(data['statistics']['unsuccessful_expectations']), From 6a1221c9868b2664c64d68031eb5e7d8683f84bb Mon Sep 17 00:00:00 2001 From: Your Name Date: Wed, 24 Jan 2024 14:12:55 +0100 Subject: [PATCH 12/32] replace testpg by projectId in the variables.tf as a default value --- e2e-python/files/modules/codebuild/variables.tf | 2 +- e2e-python/files/modules/codepipeline/variables.tf | 2 +- e2e-python/files/variables.tf | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/e2e-python/files/modules/codebuild/variables.tf b/e2e-python/files/modules/codebuild/variables.tf index d8e75c53f..6c6ab022d 100644 --- a/e2e-python/files/modules/codebuild/variables.tf +++ b/e2e-python/files/modules/codebuild/variables.tf @@ -101,7 +101,7 @@ variable "local_id" { variable "projectId" { description = "EDP project name" type = string - default = "testpg" + default = "projectId" } variable "aws_region" { diff --git a/e2e-python/files/modules/codepipeline/variables.tf b/e2e-python/files/modules/codepipeline/variables.tf index 1da275b58..bba038a71 100644 --- a/e2e-python/files/modules/codepipeline/variables.tf +++ b/e2e-python/files/modules/codepipeline/variables.tf @@ -51,7 +51,7 @@ variable "local_id" { variable "projectId" { description = "EDP project name" type = string - default = "testpg" + default = "projectId" } variable "aws_region" { diff --git a/e2e-python/files/variables.tf b/e2e-python/files/variables.tf index 09f01e0f3..0404142ee 100644 --- a/e2e-python/files/variables.tf +++ b/e2e-python/files/variables.tf @@ -54,7 +54,7 @@ variable "bitbucket_source_bucket_name" {} variable "projectId" { description = "EDP project name" type = string - default = "testpg" + default = "projectId" } variable "environment" { From 7b2887ae150a7f9a1bed9cd19852f0336b735b58 Mon Sep 17 00:00:00 2001 From: Your Name Date: Wed, 24 Jan 2024 14:20:12 +0100 Subject: [PATCH 13/32] rephrase the comments in the post and pre requisties.py --- .../test_preparation/post_requisites.py | 5 ++++- .../test_preparation/pre_requisites.py | 11 ++++++++--- 2 files changed, 12 insertions(+), 4 deletions(-) diff --git a/e2e-python/files/tests/acceptance/great_expectations/test_preparation/post_requisites.py b/e2e-python/files/tests/acceptance/great_expectations/test_preparation/post_requisites.py index 9e9404474..01c315bde 100644 --- a/e2e-python/files/tests/acceptance/great_expectations/test_preparation/post_requisites.py +++ b/e2e-python/files/tests/acceptance/great_expectations/test_preparation/post_requisites.py @@ -3,7 +3,10 @@ """ -This is an example of what you could do as a post-requisite. In the pre_requistie.py we deployed the athena dabase that would be tested. +This is an example of what you could do as a post-requisite. In the pre_requistie.py. It is intended to be used as a clean up step +to remove any data set, or reset your system to its initial state. + +In this scenario we deployed on the prerequisites an athena dabase that would be tested. Now, in this post_requisite.py we delete the database and the tables inside of it. """ diff --git a/e2e-python/files/tests/acceptance/great_expectations/test_preparation/pre_requisites.py b/e2e-python/files/tests/acceptance/great_expectations/test_preparation/pre_requisites.py index 74e224a58..698e5b9b1 100644 --- a/e2e-python/files/tests/acceptance/great_expectations/test_preparation/pre_requisites.py +++ b/e2e-python/files/tests/acceptance/great_expectations/test_preparation/pre_requisites.py @@ -5,10 +5,15 @@ """ -This is an example of what you could do as a pre-requisite before executing your great expectations tests, in this example we deploy an athena database -with two tables: person and address. +This is an example of what you could do as a pre-requisite before executing your great expectations tests. +This is intended to prepare your data sets or even trigger your ETL pipelines. + + +In this specific example we deploy a sample athena database +with two tables: person and address taht will be used on the Demo test cases. + +In the post_requisite.py we will delete this athena database. -In the post_requisite.py we will delete this athena database """ From 3ad8d40f7075f3146cde7a7080f1e2dcb91a187d Mon Sep 17 00:00:00 2001 From: Your Name Date: Wed, 24 Jan 2024 15:32:45 +0100 Subject: [PATCH 14/32] Update CHANGELOG.md --- CHANGELOG.md | 1 + 1 file changed, 1 insertion(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index cab0c8f6f..d7f1ef578 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,6 +1,7 @@ # Changelog ### Added +- Added ETL pipeline testing QS (e2e-python) ([#985](https://github.com/opendevstack/ods-quickstarters/pull/985)) - Added secret scanning in docker plain ([#963](https://github.com/opendevstack/ods-quickstarters/pull/963)) - Added Nodejs20 agent ([#962](https://github.com/opendevstack/ods-quickstarters/issues/962)) - Update Streamlit and Python quickstarters and agent ([#968](https://github.com/opendevstack/ods-quickstarters/issues/968)) From 9026ecc2b728f27fd96e35a057ddb26e6623800e Mon Sep 17 00:00:00 2001 From: Your Name Date: Wed, 24 Jan 2024 16:10:26 +0100 Subject: [PATCH 15/32] Update CHANGELOG.md including ods changes --- CHANGELOG.md | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index d7f1ef578..8b8f27bc3 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -2,8 +2,10 @@ ### Added - Added ETL pipeline testing QS (e2e-python) ([#985](https://github.com/opendevstack/ods-quickstarters/pull/985)) +- Update gateway-Nginx quickstarter ([#983](https://github.com/opendevstack/ods-quickstarters/pull/983)) - Added secret scanning in docker plain ([#963](https://github.com/opendevstack/ods-quickstarters/pull/963)) - Added Nodejs20 agent ([#962](https://github.com/opendevstack/ods-quickstarters/issues/962)) +- Update release manager readme ([#969](https://github.com/opendevstack/ods-quickstarters/issues/969)) - Update Streamlit and Python quickstarters and agent ([#968](https://github.com/opendevstack/ods-quickstarters/issues/968)) ### Modified @@ -406,4 +408,4 @@ ## [0.1.0 ods-project-quickstarters] - 2018-07-27 -Initial release. +Initial release. \ No newline at end of file From 4fab27a4c265387d2115e48155a4522b5d7331a9 Mon Sep 17 00:00:00 2001 From: Your Name Date: Fri, 26 Jan 2024 11:23:59 +0100 Subject: [PATCH 16/32] Update README.md --- e2e-python/files/README.md | 130 +++++++++++++++++++++++++++++++++++++ 1 file changed, 130 insertions(+) create mode 100644 e2e-python/files/README.md diff --git a/e2e-python/files/README.md b/e2e-python/files/README.md new file mode 100644 index 000000000..8b65d0886 --- /dev/null +++ b/e2e-python/files/README.md @@ -0,0 +1,130 @@ +# Python end-to-end tests + +This is a python based quicktarter intended to develop end-to-end tests for data pipelines. +In order to do that it uses two testing technologies: Great Expectations and Pytest. + +This quickstarter project was generated from the *inf-terraform-aws* ODS quickstarter. + +How does it work: + 1. It compresses the bitbucket repository containing the tests, and it places it in an S3 bucket into the AWS account specified. + 2. In AWS it creates and trigger a code pipeline that will execute the tests + 3. When the AWS code pipeline finish, it creates the necessary reports and sends them back to Jenkins. + 4. The Jenkins pipeline finish when receiving the reports + + + +## Stages: installation / integration / acceptance + +With the introduction of the release manager concept in OpenDevStack 3, e2e test quickstarters are expected to run tests in three different stages (installation, integration & acceptance) and generate a JUnit XML result file for each of these stages. + +Make sure to keep `junit` as reporter and to not change the output path for the JUnit results files as they will be stashed by Jenkins and reused by the release manager. + +## How to prepare data: +In case that you need to prepare data before the execution of your Great Expecations tests you could use the test_preparation folder, that contains the pre_requisites.py and post_requisites.py, these scripts +will be executed before and after the execution of your Great Expectations tests. + +In the pre_requistes.py you can do things such as prepare your data sets, create temporally resources... or even trigger your ETL pipelines. +After the execution of your Great Expectations test, the post_requisites.py will be executed. It is intended to be used as a clean-up step to remove any data set, +or reset your system to its initial state. + +For pytest you can configure pre and post requistes on your own since it's much more flexible than Great Expectations. +The tests will be executed in this order: + 1. pre_requistes.py + 2. Great Expecations test suite + 3. post_requistes.py + 4. Pytest test suite + +## Running end-to-end tests + +To execute all end-to-end tests: + +1. Set up AWS account credentials in environment folder's yml files. +2. Customize json files with the desired identification namings for the AWS resources that will be created with the quickestarters execution. +3. Modify the great_expectations and pytes folder to execute your tests located in the 'tests/acceptance/' directory. + +# Pipeline execution options: +- By a commit with a change in the code the pipeline in jenkins will be automatically executed +- From jenkins manually +- Automatic from a test (create a function to automatize the trigger of the pipeline) + +## How to use this Stack? + +The behavior of a stack is determined by its purpose and the set of input parameters. Here is an overview of the *inputs* and *outputs* available for this stack. + + +## Requirements + +| Name | Version | +|------|---------| +| [terraform](#requirement\_terraform) | >= 1.0 | +| [aws](#requirement\_aws) | 4.67.0 | +| [random](#requirement\_random) | 3.5.1 | + +## Providers + +| Name | Version | +|------|---------| +| [aws](#provider\_aws) | 4.67.0 | +| [random](#provider\_random) | 3.5.1 | + +## Modules + +| Name | Description | +|-----------------------------------------------------------------------------------------------------------------|-------------| +| [modules\codebuild]() | resource | +| [modules\codepipeline]() | resource | +| [modules\iam_roles]() | resource | +| [modules\s3-bucket]() | resource | +| [modules\s3-bucket-policy](https://registry.terraform.io/providers/hashicorp/time/latest/docs/resources/static) | resource | + +## Resources + +| Name | Type | +|--------------------------------------------------------------------------------------------------------------------------------------------|------| +| [aws_codebuild_project.build_project](https://registry.terraform.io/providers/hashicorp/...) | resource | +| [aws_codepipeline.codepipeline]() | resource | +| [aws_iam_role.codepipeline_role]() | resource | +| [aws_iam_role.codebuild_role]() | resource | +| [aws_iam_role_policy.codepipeline_policy](https://registry.terraform.io/providers/hashicorp/random/3.5.1/docs/resources/id) | resource | +| [aws_iam_role_policy.codebuild_policy](https://registry.terraform.io/providers/hashicorp/random/3.5.1/docs/resources/id) | resource | +| [aws_s3_bucket_policy.allow_access_from_another_account](https://registry.terraform.io/providers/hashicorp/random/3.5.1/docs/resources/id) | resource | +| [aws_s3_bucket.codepipeline_bucket](https://registry.terraform.io/providers/hashicorp/random/3.5.1/docs/resources/id) | resource | +| [aws_s3_bucket_versioning.s3versioning-cp](https://registry.terraform.io/providers/hashicorp/random/3.5.1/docs/resources/id) | resource | +| [aws_s3_bucket.e2e_results_bucket](https://registry.terraform.io/providers/hashicorp/random/3.5.1/docs/resources/id) | resource | +| [aws_s3_bucket_versioning.s3versioning-artfcs](https://registry.terraform.io/providers/hashicorp/random/3.5.1/docs/resources/id) | resource | +| [aws_s3_bucket.source_bitbucket_bucket](https://registry.terraform.io/providers/hashicorp/random/3.5.1/docs/resources/id) | resource | +| [aws_s3_bucket_versioning.s3versioning-bucket](https://registry.terraform.io/providers/hashicorp/random/3.5.1/docs/resources/id) | resource | +| [random_id.id](https://registry.terraform.io/providers/hashicorp/random/3.5.1/docs/resources/id) | resource | +| [local_file.terraform-data](https://registry.terraform.io/providers/hashicorp/random/3.5.1/docs/resources/id) | resource | +| [time_static.deployment](https://registry.terraform.io/providers/hashicorp/time/latest/docs/resources/static) | resource | + +## Inputs + +| Name | Description | Type | Default | Required | +|------------------------------------------------------------------------------------------------------------------------------|-------------------------------------------------------------------------------------------------------------------------|------|-----------------------|:--------:| +| [codebuild\_project\_name](#input\_codebuild\_project\_name) | The name of the AWS codebuild project. | `string` | `"codebuild-project"` | no | +| [codepipeline\_name](#input\_codepipeline\_name) | The name of the AWS codepipeline. | `string` | `"test-codepipeline"` | no | +| [codepipeline\_bucket\_name](#input\_codepipeline\_bucket\_name) | The name of the codepipeline artifacts S3 bucket. | `string` | `"cpplartifacts"` | no | +| [bitbucket\_source\_bucket\_name](#input\_bitbucket\_source\_bucket\_name) | The name of the source S3 bucket. | `string` | `"src-bitbucket"` | no | +| [e2e\_results\_bucket\_name](#input\_e2e\_results\_bucket\_name) | The name of the results S3 bucket. | `string` | `"test-results"` | no | +| [pipeline\_role\_name](#input\_pipeline\_role\_name) | The name of the codepipeline role. | `string` | `"test-codePipelineRole"` | no | +| [codebuild\_role\_name](#input\_codebuild\_role\_name) | The name of the codebuild role. | `string` | `"test-codeBuildRole"` | no | +| [codepipeline\_policy\_name](#input\_codepipeline\_policy\_name) | The name of the codepipeline policy. | `string` | `"codepipeline_policy"` | no | +| [codebuild\_policy\_name](#input\_codebuild\_policy\_name) | The name of the codebuild policy. | `string` | `"codebuild_policy"` | no | +| [meta\_environment](#input\_meta\_environment) | The type of the environment. Can be any of DEVELOPMENT, EVALUATION, PRODUCTIVE, QUALITYASSURANCE, TRAINING, VALIDATION. | `string` | `"DEVELOPMENT"` | no | +| [name](#input\_name) | The name of the stack. | `string` | `"stack-aws-quickstarter"` | no | + +## Outputs + +The output generated by terraform are used for internal quickestarter's purposes. + + +## Environments +The pipeline supports multiple environments (DEV/QA/PROD) within OpenDevStack. The behaviour of the pipeline in the environments can be controlled within the **environments** directory. +The *.yml files define the Jenkins secrets to read and are used to deploy into the right environments. +The *.json files can override variables from **variables.tf** in case different environments request different inputs (e.g. deploy a smaller version of the stack in DEV). + +## Problems? Questions? Suggestions? + +In case of problems, questions or suggestions, feel free to file an issue with the respective project's repository. Thanks! + From 8d14340d95dc7308b2e481cb1e3af2607ab7f498 Mon Sep 17 00:00:00 2001 From: Your Name Date: Fri, 26 Jan 2024 11:25:22 +0100 Subject: [PATCH 17/32] Update README.md root --- e2e-python/README.md | 30 ++++++++- e2e-python/files/README.md | 130 ------------------------------------- 2 files changed, 28 insertions(+), 132 deletions(-) delete mode 100644 e2e-python/files/README.md diff --git a/e2e-python/README.md b/e2e-python/README.md index 2a6df7d86..8b65d0886 100644 --- a/e2e-python/README.md +++ b/e2e-python/README.md @@ -1,6 +1,17 @@ # Python end-to-end tests -This end-to-end testing project was generated from the *e2e-python* ODS quickstarter. +This is a python based quicktarter intended to develop end-to-end tests for data pipelines. +In order to do that it uses two testing technologies: Great Expectations and Pytest. + +This quickstarter project was generated from the *inf-terraform-aws* ODS quickstarter. + +How does it work: + 1. It compresses the bitbucket repository containing the tests, and it places it in an S3 bucket into the AWS account specified. + 2. In AWS it creates and trigger a code pipeline that will execute the tests + 3. When the AWS code pipeline finish, it creates the necessary reports and sends them back to Jenkins. + 4. The Jenkins pipeline finish when receiving the reports + + ## Stages: installation / integration / acceptance @@ -8,6 +19,21 @@ With the introduction of the release manager concept in OpenDevStack 3, e2e test Make sure to keep `junit` as reporter and to not change the output path for the JUnit results files as they will be stashed by Jenkins and reused by the release manager. +## How to prepare data: +In case that you need to prepare data before the execution of your Great Expecations tests you could use the test_preparation folder, that contains the pre_requisites.py and post_requisites.py, these scripts +will be executed before and after the execution of your Great Expectations tests. + +In the pre_requistes.py you can do things such as prepare your data sets, create temporally resources... or even trigger your ETL pipelines. +After the execution of your Great Expectations test, the post_requisites.py will be executed. It is intended to be used as a clean-up step to remove any data set, +or reset your system to its initial state. + +For pytest you can configure pre and post requistes on your own since it's much more flexible than Great Expectations. +The tests will be executed in this order: + 1. pre_requistes.py + 2. Great Expecations test suite + 3. post_requistes.py + 4. Pytest test suite + ## Running end-to-end tests To execute all end-to-end tests: @@ -94,7 +120,7 @@ The output generated by terraform are used for internal quickestarter's purposes ## Environments -The pipeline supports multiple environments (Testing/DEV/QA/PROD) within OpenDevStack. The behaviour of the pipeline in the environments can be controlled within the **environments** directory. +The pipeline supports multiple environments (DEV/QA/PROD) within OpenDevStack. The behaviour of the pipeline in the environments can be controlled within the **environments** directory. The *.yml files define the Jenkins secrets to read and are used to deploy into the right environments. The *.json files can override variables from **variables.tf** in case different environments request different inputs (e.g. deploy a smaller version of the stack in DEV). diff --git a/e2e-python/files/README.md b/e2e-python/files/README.md deleted file mode 100644 index 8b65d0886..000000000 --- a/e2e-python/files/README.md +++ /dev/null @@ -1,130 +0,0 @@ -# Python end-to-end tests - -This is a python based quicktarter intended to develop end-to-end tests for data pipelines. -In order to do that it uses two testing technologies: Great Expectations and Pytest. - -This quickstarter project was generated from the *inf-terraform-aws* ODS quickstarter. - -How does it work: - 1. It compresses the bitbucket repository containing the tests, and it places it in an S3 bucket into the AWS account specified. - 2. In AWS it creates and trigger a code pipeline that will execute the tests - 3. When the AWS code pipeline finish, it creates the necessary reports and sends them back to Jenkins. - 4. The Jenkins pipeline finish when receiving the reports - - - -## Stages: installation / integration / acceptance - -With the introduction of the release manager concept in OpenDevStack 3, e2e test quickstarters are expected to run tests in three different stages (installation, integration & acceptance) and generate a JUnit XML result file for each of these stages. - -Make sure to keep `junit` as reporter and to not change the output path for the JUnit results files as they will be stashed by Jenkins and reused by the release manager. - -## How to prepare data: -In case that you need to prepare data before the execution of your Great Expecations tests you could use the test_preparation folder, that contains the pre_requisites.py and post_requisites.py, these scripts -will be executed before and after the execution of your Great Expectations tests. - -In the pre_requistes.py you can do things such as prepare your data sets, create temporally resources... or even trigger your ETL pipelines. -After the execution of your Great Expectations test, the post_requisites.py will be executed. It is intended to be used as a clean-up step to remove any data set, -or reset your system to its initial state. - -For pytest you can configure pre and post requistes on your own since it's much more flexible than Great Expectations. -The tests will be executed in this order: - 1. pre_requistes.py - 2. Great Expecations test suite - 3. post_requistes.py - 4. Pytest test suite - -## Running end-to-end tests - -To execute all end-to-end tests: - -1. Set up AWS account credentials in environment folder's yml files. -2. Customize json files with the desired identification namings for the AWS resources that will be created with the quickestarters execution. -3. Modify the great_expectations and pytes folder to execute your tests located in the 'tests/acceptance/' directory. - -# Pipeline execution options: -- By a commit with a change in the code the pipeline in jenkins will be automatically executed -- From jenkins manually -- Automatic from a test (create a function to automatize the trigger of the pipeline) - -## How to use this Stack? - -The behavior of a stack is determined by its purpose and the set of input parameters. Here is an overview of the *inputs* and *outputs* available for this stack. - - -## Requirements - -| Name | Version | -|------|---------| -| [terraform](#requirement\_terraform) | >= 1.0 | -| [aws](#requirement\_aws) | 4.67.0 | -| [random](#requirement\_random) | 3.5.1 | - -## Providers - -| Name | Version | -|------|---------| -| [aws](#provider\_aws) | 4.67.0 | -| [random](#provider\_random) | 3.5.1 | - -## Modules - -| Name | Description | -|-----------------------------------------------------------------------------------------------------------------|-------------| -| [modules\codebuild]() | resource | -| [modules\codepipeline]() | resource | -| [modules\iam_roles]() | resource | -| [modules\s3-bucket]() | resource | -| [modules\s3-bucket-policy](https://registry.terraform.io/providers/hashicorp/time/latest/docs/resources/static) | resource | - -## Resources - -| Name | Type | -|--------------------------------------------------------------------------------------------------------------------------------------------|------| -| [aws_codebuild_project.build_project](https://registry.terraform.io/providers/hashicorp/...) | resource | -| [aws_codepipeline.codepipeline]() | resource | -| [aws_iam_role.codepipeline_role]() | resource | -| [aws_iam_role.codebuild_role]() | resource | -| [aws_iam_role_policy.codepipeline_policy](https://registry.terraform.io/providers/hashicorp/random/3.5.1/docs/resources/id) | resource | -| [aws_iam_role_policy.codebuild_policy](https://registry.terraform.io/providers/hashicorp/random/3.5.1/docs/resources/id) | resource | -| [aws_s3_bucket_policy.allow_access_from_another_account](https://registry.terraform.io/providers/hashicorp/random/3.5.1/docs/resources/id) | resource | -| [aws_s3_bucket.codepipeline_bucket](https://registry.terraform.io/providers/hashicorp/random/3.5.1/docs/resources/id) | resource | -| [aws_s3_bucket_versioning.s3versioning-cp](https://registry.terraform.io/providers/hashicorp/random/3.5.1/docs/resources/id) | resource | -| [aws_s3_bucket.e2e_results_bucket](https://registry.terraform.io/providers/hashicorp/random/3.5.1/docs/resources/id) | resource | -| [aws_s3_bucket_versioning.s3versioning-artfcs](https://registry.terraform.io/providers/hashicorp/random/3.5.1/docs/resources/id) | resource | -| [aws_s3_bucket.source_bitbucket_bucket](https://registry.terraform.io/providers/hashicorp/random/3.5.1/docs/resources/id) | resource | -| [aws_s3_bucket_versioning.s3versioning-bucket](https://registry.terraform.io/providers/hashicorp/random/3.5.1/docs/resources/id) | resource | -| [random_id.id](https://registry.terraform.io/providers/hashicorp/random/3.5.1/docs/resources/id) | resource | -| [local_file.terraform-data](https://registry.terraform.io/providers/hashicorp/random/3.5.1/docs/resources/id) | resource | -| [time_static.deployment](https://registry.terraform.io/providers/hashicorp/time/latest/docs/resources/static) | resource | - -## Inputs - -| Name | Description | Type | Default | Required | -|------------------------------------------------------------------------------------------------------------------------------|-------------------------------------------------------------------------------------------------------------------------|------|-----------------------|:--------:| -| [codebuild\_project\_name](#input\_codebuild\_project\_name) | The name of the AWS codebuild project. | `string` | `"codebuild-project"` | no | -| [codepipeline\_name](#input\_codepipeline\_name) | The name of the AWS codepipeline. | `string` | `"test-codepipeline"` | no | -| [codepipeline\_bucket\_name](#input\_codepipeline\_bucket\_name) | The name of the codepipeline artifacts S3 bucket. | `string` | `"cpplartifacts"` | no | -| [bitbucket\_source\_bucket\_name](#input\_bitbucket\_source\_bucket\_name) | The name of the source S3 bucket. | `string` | `"src-bitbucket"` | no | -| [e2e\_results\_bucket\_name](#input\_e2e\_results\_bucket\_name) | The name of the results S3 bucket. | `string` | `"test-results"` | no | -| [pipeline\_role\_name](#input\_pipeline\_role\_name) | The name of the codepipeline role. | `string` | `"test-codePipelineRole"` | no | -| [codebuild\_role\_name](#input\_codebuild\_role\_name) | The name of the codebuild role. | `string` | `"test-codeBuildRole"` | no | -| [codepipeline\_policy\_name](#input\_codepipeline\_policy\_name) | The name of the codepipeline policy. | `string` | `"codepipeline_policy"` | no | -| [codebuild\_policy\_name](#input\_codebuild\_policy\_name) | The name of the codebuild policy. | `string` | `"codebuild_policy"` | no | -| [meta\_environment](#input\_meta\_environment) | The type of the environment. Can be any of DEVELOPMENT, EVALUATION, PRODUCTIVE, QUALITYASSURANCE, TRAINING, VALIDATION. | `string` | `"DEVELOPMENT"` | no | -| [name](#input\_name) | The name of the stack. | `string` | `"stack-aws-quickstarter"` | no | - -## Outputs - -The output generated by terraform are used for internal quickestarter's purposes. - - -## Environments -The pipeline supports multiple environments (DEV/QA/PROD) within OpenDevStack. The behaviour of the pipeline in the environments can be controlled within the **environments** directory. -The *.yml files define the Jenkins secrets to read and are used to deploy into the right environments. -The *.json files can override variables from **variables.tf** in case different environments request different inputs (e.g. deploy a smaller version of the stack in DEV). - -## Problems? Questions? Suggestions? - -In case of problems, questions or suggestions, feel free to file an issue with the respective project's repository. Thanks! - From ce8f907a63083be6ada177b09d7d3228fb301ad4 Mon Sep 17 00:00:00 2001 From: Your Name Date: Mon, 29 Jan 2024 10:24:32 +0100 Subject: [PATCH 18/32] README.md root now is generic and intern README.md added + requirements --- e2e-python/README.md | 131 +---------------------------------- e2e-python/files/README.md | 137 +++++++++++++++++++++++++++++++++++++ 2 files changed, 140 insertions(+), 128 deletions(-) create mode 100644 e2e-python/files/README.md diff --git a/e2e-python/README.md b/e2e-python/README.md index 8b65d0886..160699593 100644 --- a/e2e-python/README.md +++ b/e2e-python/README.md @@ -1,130 +1,5 @@ -# Python end-to-end tests +# e2e-python Quickstarter (inf-terraform-aws) -This is a python based quicktarter intended to develop end-to-end tests for data pipelines. -In order to do that it uses two testing technologies: Great Expectations and Pytest. - -This quickstarter project was generated from the *inf-terraform-aws* ODS quickstarter. - -How does it work: - 1. It compresses the bitbucket repository containing the tests, and it places it in an S3 bucket into the AWS account specified. - 2. In AWS it creates and trigger a code pipeline that will execute the tests - 3. When the AWS code pipeline finish, it creates the necessary reports and sends them back to Jenkins. - 4. The Jenkins pipeline finish when receiving the reports - - - -## Stages: installation / integration / acceptance - -With the introduction of the release manager concept in OpenDevStack 3, e2e test quickstarters are expected to run tests in three different stages (installation, integration & acceptance) and generate a JUnit XML result file for each of these stages. - -Make sure to keep `junit` as reporter and to not change the output path for the JUnit results files as they will be stashed by Jenkins and reused by the release manager. - -## How to prepare data: -In case that you need to prepare data before the execution of your Great Expecations tests you could use the test_preparation folder, that contains the pre_requisites.py and post_requisites.py, these scripts -will be executed before and after the execution of your Great Expectations tests. - -In the pre_requistes.py you can do things such as prepare your data sets, create temporally resources... or even trigger your ETL pipelines. -After the execution of your Great Expectations test, the post_requisites.py will be executed. It is intended to be used as a clean-up step to remove any data set, -or reset your system to its initial state. - -For pytest you can configure pre and post requistes on your own since it's much more flexible than Great Expectations. -The tests will be executed in this order: - 1. pre_requistes.py - 2. Great Expecations test suite - 3. post_requistes.py - 4. Pytest test suite - -## Running end-to-end tests - -To execute all end-to-end tests: - -1. Set up AWS account credentials in environment folder's yml files. -2. Customize json files with the desired identification namings for the AWS resources that will be created with the quickestarters execution. -3. Modify the great_expectations and pytes folder to execute your tests located in the 'tests/acceptance/' directory. - -# Pipeline execution options: -- By a commit with a change in the code the pipeline in jenkins will be automatically executed -- From jenkins manually -- Automatic from a test (create a function to automatize the trigger of the pipeline) - -## How to use this Stack? - -The behavior of a stack is determined by its purpose and the set of input parameters. Here is an overview of the *inputs* and *outputs* available for this stack. - - -## Requirements - -| Name | Version | -|------|---------| -| [terraform](#requirement\_terraform) | >= 1.0 | -| [aws](#requirement\_aws) | 4.67.0 | -| [random](#requirement\_random) | 3.5.1 | - -## Providers - -| Name | Version | -|------|---------| -| [aws](#provider\_aws) | 4.67.0 | -| [random](#provider\_random) | 3.5.1 | - -## Modules - -| Name | Description | -|-----------------------------------------------------------------------------------------------------------------|-------------| -| [modules\codebuild]() | resource | -| [modules\codepipeline]() | resource | -| [modules\iam_roles]() | resource | -| [modules\s3-bucket]() | resource | -| [modules\s3-bucket-policy](https://registry.terraform.io/providers/hashicorp/time/latest/docs/resources/static) | resource | - -## Resources - -| Name | Type | -|--------------------------------------------------------------------------------------------------------------------------------------------|------| -| [aws_codebuild_project.build_project](https://registry.terraform.io/providers/hashicorp/...) | resource | -| [aws_codepipeline.codepipeline]() | resource | -| [aws_iam_role.codepipeline_role]() | resource | -| [aws_iam_role.codebuild_role]() | resource | -| [aws_iam_role_policy.codepipeline_policy](https://registry.terraform.io/providers/hashicorp/random/3.5.1/docs/resources/id) | resource | -| [aws_iam_role_policy.codebuild_policy](https://registry.terraform.io/providers/hashicorp/random/3.5.1/docs/resources/id) | resource | -| [aws_s3_bucket_policy.allow_access_from_another_account](https://registry.terraform.io/providers/hashicorp/random/3.5.1/docs/resources/id) | resource | -| [aws_s3_bucket.codepipeline_bucket](https://registry.terraform.io/providers/hashicorp/random/3.5.1/docs/resources/id) | resource | -| [aws_s3_bucket_versioning.s3versioning-cp](https://registry.terraform.io/providers/hashicorp/random/3.5.1/docs/resources/id) | resource | -| [aws_s3_bucket.e2e_results_bucket](https://registry.terraform.io/providers/hashicorp/random/3.5.1/docs/resources/id) | resource | -| [aws_s3_bucket_versioning.s3versioning-artfcs](https://registry.terraform.io/providers/hashicorp/random/3.5.1/docs/resources/id) | resource | -| [aws_s3_bucket.source_bitbucket_bucket](https://registry.terraform.io/providers/hashicorp/random/3.5.1/docs/resources/id) | resource | -| [aws_s3_bucket_versioning.s3versioning-bucket](https://registry.terraform.io/providers/hashicorp/random/3.5.1/docs/resources/id) | resource | -| [random_id.id](https://registry.terraform.io/providers/hashicorp/random/3.5.1/docs/resources/id) | resource | -| [local_file.terraform-data](https://registry.terraform.io/providers/hashicorp/random/3.5.1/docs/resources/id) | resource | -| [time_static.deployment](https://registry.terraform.io/providers/hashicorp/time/latest/docs/resources/static) | resource | - -## Inputs - -| Name | Description | Type | Default | Required | -|------------------------------------------------------------------------------------------------------------------------------|-------------------------------------------------------------------------------------------------------------------------|------|-----------------------|:--------:| -| [codebuild\_project\_name](#input\_codebuild\_project\_name) | The name of the AWS codebuild project. | `string` | `"codebuild-project"` | no | -| [codepipeline\_name](#input\_codepipeline\_name) | The name of the AWS codepipeline. | `string` | `"test-codepipeline"` | no | -| [codepipeline\_bucket\_name](#input\_codepipeline\_bucket\_name) | The name of the codepipeline artifacts S3 bucket. | `string` | `"cpplartifacts"` | no | -| [bitbucket\_source\_bucket\_name](#input\_bitbucket\_source\_bucket\_name) | The name of the source S3 bucket. | `string` | `"src-bitbucket"` | no | -| [e2e\_results\_bucket\_name](#input\_e2e\_results\_bucket\_name) | The name of the results S3 bucket. | `string` | `"test-results"` | no | -| [pipeline\_role\_name](#input\_pipeline\_role\_name) | The name of the codepipeline role. | `string` | `"test-codePipelineRole"` | no | -| [codebuild\_role\_name](#input\_codebuild\_role\_name) | The name of the codebuild role. | `string` | `"test-codeBuildRole"` | no | -| [codepipeline\_policy\_name](#input\_codepipeline\_policy\_name) | The name of the codepipeline policy. | `string` | `"codepipeline_policy"` | no | -| [codebuild\_policy\_name](#input\_codebuild\_policy\_name) | The name of the codebuild policy. | `string` | `"codebuild_policy"` | no | -| [meta\_environment](#input\_meta\_environment) | The type of the environment. Can be any of DEVELOPMENT, EVALUATION, PRODUCTIVE, QUALITYASSURANCE, TRAINING, VALIDATION. | `string` | `"DEVELOPMENT"` | no | -| [name](#input\_name) | The name of the stack. | `string` | `"stack-aws-quickstarter"` | no | - -## Outputs - -The output generated by terraform are used for internal quickestarter's purposes. - - -## Environments -The pipeline supports multiple environments (DEV/QA/PROD) within OpenDevStack. The behaviour of the pipeline in the environments can be controlled within the **environments** directory. -The *.yml files define the Jenkins secrets to read and are used to deploy into the right environments. -The *.json files can override variables from **variables.tf** in case different environments request different inputs (e.g. deploy a smaller version of the stack in DEV). - -## Problems? Questions? Suggestions? - -In case of problems, questions or suggestions, feel free to file an issue with the respective project's repository. Thanks! +Documentation is located in our [official documentation](https://www.opendevstack.org/ods-documentation/ods-quickstarters/latest/index.html) +Please update documentation in the [antora page directory](https://github.com/opendevstack/ods-quickstarters/tree/master/docs/modules/quickstarters/pages) diff --git a/e2e-python/files/README.md b/e2e-python/files/README.md new file mode 100644 index 000000000..a2332bedf --- /dev/null +++ b/e2e-python/files/README.md @@ -0,0 +1,137 @@ +# Python end-to-end tests + +This is a python based quicktarter intended to develop end-to-end tests for data pipelines. +In order to do that it uses two testing technologies: Great Expectations and Pytest. + +This quickstarter project was generated from the *inf-terraform-aws* ODS quickstarter. + +How does it work: + 1. The ODS Jenkins pipeline starts. + 2. It compresses the bitbucket repository containing the tests, and it places it in an S3 bucket into the AWS account specified. + 3. In AWS it creates and trigger a code pipeline that will execute the tests. + 4. When the AWS code pipeline finish, it creates the necessary reports and sends them back to Jenkins. + 5. The Jenkins pipeline finish when receiving the reports. + + + +## Stages: installation / integration / acceptance + +With the introduction of the release manager concept in OpenDevStack 3, e2e test quickstarters are expected to run tests in three different stages (installation, integration & acceptance) and generate a JUnit XML result file for each of these stages. + +Make sure to keep `junit` as reporter and to not change the output path for the JUnit results files as they will be stashed by Jenkins and reused by the release manager. + +## How to prepare data: +In case that you need to prepare data before the execution of your Great Expecations tests you could use the test_preparation folder, that contains the pre_requisites.py and post_requisites.py, these scripts +will be executed before and after the execution of your Great Expectations tests. + +In the pre_requistes.py you can do things such as prepare your data sets, create temporally resources... or even trigger your ETL pipelines. +After the execution of your Great Expectations test, the post_requisites.py will be executed. It is intended to be used as a clean-up step to remove any data set, +or reset your system to its initial state. + +For pytest you can configure pre and post requistes on your own since it's much more flexible than Great Expectations. +The tests will be executed in this order: + 1. pre_requistes.py + 2. Great Expecations test suite + 3. post_requistes.py + 4. Pytest test suite + +## Running end-to-end tests + +To execute all end-to-end tests: + +1. Set up AWS account credentials in environment folder's yml files. +2. Customize json files with the desired identification namings for the AWS resources that will be created with the quickestarters execution. +3. Modify the great_expectations and pytes folder to execute your tests located in the 'tests/acceptance/' directory. + +# Pipeline execution options: +- By a commit with a change in the code the pipeline in jenkins will be automatically executed +- From jenkins manually +- Automatic from a test (create a function to automatize the trigger of the pipeline) + +## How to use this Stack? + +The behavior of a stack is determined by its purpose and the set of input parameters. Here is an overview of the *inputs* and *outputs* available for this stack. + + +## Requirements + +| Name | Version | +|------|---------| +| [terraform](#requirement\_terraform) | >= 1.0 | +| [aws](#requirement\_aws) | 4.67.0 | +| [random](#requirement\_random) | 3.5.1 | +| [great_expectations](#requirement\_great_expectations) | 0.18.3 | +| [pytest](#requirement\_pytest) | 7.4.3 | +| [boto3](#requirement\_boto3) | 1.29.6 | +| [allure-pytest](#requirement\_allure-pytest) | 2.13.2 | +| [allure-combine](#requirement\_allure-combine) | 1.0.11 | + + +## Providers + +| Name | Version | +|------|---------| +| [aws](#provider\_aws) | 4.67.0 | +| [random](#provider\_random) | 3.5.1 | + +## Modules + +| Name | Description | +|-----------------------------------------------------------------------------------------------------------------|-------------| +| [modules\codebuild]() | resource | +| [modules\codepipeline]() | resource | +| [modules\iam_roles]() | resource | +| [modules\s3-bucket]() | resource | +| [modules\s3-bucket-policy](https://registry.terraform.io/providers/hashicorp/time/latest/docs/resources/static) | resource | + +## Resources + +| Name | Type | +|--------------------------------------------------------------------------------------------------------------------------------------------|------| +| [aws_codebuild_project.build_project](https://registry.terraform.io/providers/hashicorp/...) | resource | +| [aws_codepipeline.codepipeline]() | resource | +| [aws_iam_role.codepipeline_role]() | resource | +| [aws_iam_role.codebuild_role]() | resource | +| [aws_iam_role_policy.codepipeline_policy](https://registry.terraform.io/providers/hashicorp/random/3.5.1/docs/resources/id) | resource | +| [aws_iam_role_policy.codebuild_policy](https://registry.terraform.io/providers/hashicorp/random/3.5.1/docs/resources/id) | resource | +| [aws_s3_bucket_policy.allow_access_from_another_account](https://registry.terraform.io/providers/hashicorp/random/3.5.1/docs/resources/id) | resource | +| [aws_s3_bucket.codepipeline_bucket](https://registry.terraform.io/providers/hashicorp/random/3.5.1/docs/resources/id) | resource | +| [aws_s3_bucket_versioning.s3versioning-cp](https://registry.terraform.io/providers/hashicorp/random/3.5.1/docs/resources/id) | resource | +| [aws_s3_bucket.e2e_results_bucket](https://registry.terraform.io/providers/hashicorp/random/3.5.1/docs/resources/id) | resource | +| [aws_s3_bucket_versioning.s3versioning-artfcs](https://registry.terraform.io/providers/hashicorp/random/3.5.1/docs/resources/id) | resource | +| [aws_s3_bucket.source_bitbucket_bucket](https://registry.terraform.io/providers/hashicorp/random/3.5.1/docs/resources/id) | resource | +| [aws_s3_bucket_versioning.s3versioning-bucket](https://registry.terraform.io/providers/hashicorp/random/3.5.1/docs/resources/id) | resource | +| [random_id.id](https://registry.terraform.io/providers/hashicorp/random/3.5.1/docs/resources/id) | resource | +| [local_file.terraform-data](https://registry.terraform.io/providers/hashicorp/random/3.5.1/docs/resources/id) | resource | +| [time_static.deployment](https://registry.terraform.io/providers/hashicorp/time/latest/docs/resources/static) | resource | + +## Inputs + +| Name | Description | Type | Default | Required | +|------------------------------------------------------------------------------------------------------------------------------|-------------------------------------------------------------------------------------------------------------------------|------|-----------------------|:--------:| +| [codebuild\_project\_name](#input\_codebuild\_project\_name) | The name of the AWS codebuild project. | `string` | `"codebuild-project"` | no | +| [codepipeline\_name](#input\_codepipeline\_name) | The name of the AWS codepipeline. | `string` | `"test-codepipeline"` | no | +| [codepipeline\_bucket\_name](#input\_codepipeline\_bucket\_name) | The name of the codepipeline artifacts S3 bucket. | `string` | `"cpplartifacts"` | no | +| [bitbucket\_source\_bucket\_name](#input\_bitbucket\_source\_bucket\_name) | The name of the source S3 bucket. | `string` | `"src-bitbucket"` | no | +| [e2e\_results\_bucket\_name](#input\_e2e\_results\_bucket\_name) | The name of the results S3 bucket. | `string` | `"test-results"` | no | +| [pipeline\_role\_name](#input\_pipeline\_role\_name) | The name of the codepipeline role. | `string` | `"test-codePipelineRole"` | no | +| [codebuild\_role\_name](#input\_codebuild\_role\_name) | The name of the codebuild role. | `string` | `"test-codeBuildRole"` | no | +| [codepipeline\_policy\_name](#input\_codepipeline\_policy\_name) | The name of the codepipeline policy. | `string` | `"codepipeline_policy"` | no | +| [codebuild\_policy\_name](#input\_codebuild\_policy\_name) | The name of the codebuild policy. | `string` | `"codebuild_policy"` | no | +| [meta\_environment](#input\_meta\_environment) | The type of the environment. Can be any of DEVELOPMENT, EVALUATION, PRODUCTIVE, QUALITYASSURANCE, TRAINING, VALIDATION. | `string` | `"DEVELOPMENT"` | no | +| [name](#input\_name) | The name of the stack. | `string` | `"stack-aws-quickstarter"` | no | + +## Outputs + +The output generated by terraform are used for internal quickestarter's purposes. + + +## Environments +The pipeline supports multiple environments (DEV/QA/PROD) within OpenDevStack. The behaviour of the pipeline in the environments can be controlled within the **environments** directory. +The *.yml files define the Jenkins secrets to read and are used to deploy into the right environments. +The *.json files can override variables from **variables.tf** in case different environments request different inputs (e.g. deploy a smaller version of the stack in DEV). + +## Problems? Questions? Suggestions? + +In case of problems, questions or suggestions, feel free to file an issue with the respective project's repository. Thanks! + From 78dfc75b42b28c0b3cbd611f53075d534c9fbe2e Mon Sep 17 00:00:00 2001 From: Your Name Date: Mon, 29 Jan 2024 10:27:49 +0100 Subject: [PATCH 19/32] README.md root link updated --- e2e-python/README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/e2e-python/README.md b/e2e-python/README.md index 160699593..7290647ad 100644 --- a/e2e-python/README.md +++ b/e2e-python/README.md @@ -1,5 +1,5 @@ # e2e-python Quickstarter (inf-terraform-aws) -Documentation is located in our [official documentation](https://www.opendevstack.org/ods-documentation/ods-quickstarters/latest/index.html) +Documentation is located in our [official documentation](https://www.opendevstack.org/ods-documentation/opendevstack/latest/getting-started/index.html) Please update documentation in the [antora page directory](https://github.com/opendevstack/ods-quickstarters/tree/master/docs/modules/quickstarters/pages) From 6c4c43adcb496b6b47026746646480d396f17389 Mon Sep 17 00:00:00 2001 From: Your Name Date: Mon, 29 Jan 2024 10:34:34 +0100 Subject: [PATCH 20/32] README.md root updated --- e2e-python/README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/e2e-python/README.md b/e2e-python/README.md index 7290647ad..8ded3b2c9 100644 --- a/e2e-python/README.md +++ b/e2e-python/README.md @@ -1,4 +1,4 @@ -# e2e-python Quickstarter (inf-terraform-aws) +# e2e-python Quickstarter (e2e-python) Documentation is located in our [official documentation](https://www.opendevstack.org/ods-documentation/opendevstack/latest/getting-started/index.html) From 6529de4e6b5ce29117739588b45ac82aa68f684b Mon Sep 17 00:00:00 2001 From: Your Name Date: Mon, 29 Jan 2024 10:58:31 +0100 Subject: [PATCH 21/32] update README.md --- e2e-python/files/README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/e2e-python/files/README.md b/e2e-python/files/README.md index a2332bedf..dfbafd1b7 100644 --- a/e2e-python/files/README.md +++ b/e2e-python/files/README.md @@ -5,7 +5,7 @@ In order to do that it uses two testing technologies: Great Expectations and Pyt This quickstarter project was generated from the *inf-terraform-aws* ODS quickstarter. -How does it work: +How it works: 1. The ODS Jenkins pipeline starts. 2. It compresses the bitbucket repository containing the tests, and it places it in an S3 bucket into the AWS account specified. 3. In AWS it creates and trigger a code pipeline that will execute the tests. From a6c84ea42d36958c9185d5265e3aeff9c8dc6567 Mon Sep 17 00:00:00 2001 From: Your Name Date: Mon, 29 Jan 2024 12:00:25 +0100 Subject: [PATCH 22/32] update README.md --- e2e-python/files/README.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/e2e-python/files/README.md b/e2e-python/files/README.md index dfbafd1b7..1a5a9cc71 100644 --- a/e2e-python/files/README.md +++ b/e2e-python/files/README.md @@ -20,7 +20,7 @@ With the introduction of the release manager concept in OpenDevStack 3, e2e test Make sure to keep `junit` as reporter and to not change the output path for the JUnit results files as they will be stashed by Jenkins and reused by the release manager. -## How to prepare data: +## How to prepare data In case that you need to prepare data before the execution of your Great Expecations tests you could use the test_preparation folder, that contains the pre_requisites.py and post_requisites.py, these scripts will be executed before and after the execution of your Great Expectations tests. @@ -43,7 +43,7 @@ To execute all end-to-end tests: 2. Customize json files with the desired identification namings for the AWS resources that will be created with the quickestarters execution. 3. Modify the great_expectations and pytes folder to execute your tests located in the 'tests/acceptance/' directory. -# Pipeline execution options: +# Pipeline execution options - By a commit with a change in the code the pipeline in jenkins will be automatically executed - From jenkins manually - Automatic from a test (create a function to automatize the trigger of the pipeline) From 281a62bb02fcc2ab09bce3837c563fa5e844503e Mon Sep 17 00:00:00 2001 From: Your Name Date: Mon, 29 Jan 2024 13:32:36 +0100 Subject: [PATCH 23/32] add reference e2e-python to CHANGELOG.md --- CHANGELOG.md | 1 + 1 file changed, 1 insertion(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 7b2390806..f2935cabd 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,6 +1,7 @@ # Changelog ### Added +- Added ETL pipeline testing QS (e2e-python) ([#985](https://github.com/opendevstack/ods-quickstarters/pull/985)) - Update gateway-Nginx quickstarter ([#983](https://github.com/opendevstack/ods-quickstarters/pull/983)) - Added secret scanning in docker plain ([#963](https://github.com/opendevstack/ods-quickstarters/pull/963)) - Added Nodejs20 agent ([#962](https://github.com/opendevstack/ods-quickstarters/issues/962)) From 2fba61e6b333ae66b155ff04ae93dc0dad138d09 Mon Sep 17 00:00:00 2001 From: Roi Carrera Date: Tue, 30 Jan 2024 07:57:02 +0100 Subject: [PATCH 24/32] removed unneeded commented line in Jenkinsfile.template added updates from ods --- .github/dependabot.yml | 3 + be-gateway-nginx/files/docker/Dockerfile | 2 +- be-gateway-nginx/files/metadata.yml | 2 +- .../quickstarters/pages/be-gateway-nginx.adoc | 4 +- e2e-python/Jenkinsfile.template | 1 - release-manager/files/README.md | 246 +++++++++++++++++- 6 files changed, 241 insertions(+), 17 deletions(-) diff --git a/.github/dependabot.yml b/.github/dependabot.yml index dfd0e3086..9c253b318 100644 --- a/.github/dependabot.yml +++ b/.github/dependabot.yml @@ -8,3 +8,6 @@ updates: schedule: # Check for updates to GitHub Actions every week interval: "weekly" + labels: + - "dependencies" + - "skip changelog" \ No newline at end of file diff --git a/be-gateway-nginx/files/docker/Dockerfile b/be-gateway-nginx/files/docker/Dockerfile index 6b2093e6a..3aba489f2 100644 --- a/be-gateway-nginx/files/docker/Dockerfile +++ b/be-gateway-nginx/files/docker/Dockerfile @@ -1,5 +1,5 @@ # https://github.com/openresty/docker-openresty -FROM openresty/openresty:1.21.4.1-rocky +FROM openresty/openresty:1.25.3.1-1-rocky ENV LANG=C.UTF-8 diff --git a/be-gateway-nginx/files/metadata.yml b/be-gateway-nginx/files/metadata.yml index 583757736..f1ab86534 100644 --- a/be-gateway-nginx/files/metadata.yml +++ b/be-gateway-nginx/files/metadata.yml @@ -1,6 +1,6 @@ --- name: nginx -description: "Enhanced nginx with Lua embeded. nginx [engine x] is an HTTP and reverse proxy server, a mail proxy server, and a generic TCP/UDP proxy server. Technologies: OpenResty/nginx 1.19.3.2" +description: "Enhanced nginx with Lua embeded. nginx [engine x] is an HTTP and reverse proxy server, a mail proxy server, and a generic TCP/UDP proxy server. Technologies: OpenResty/nginx 1.25.3.1-1" supplier: https://openresty.org version: 4.x type: ods-service diff --git a/docs/modules/quickstarters/pages/be-gateway-nginx.adoc b/docs/modules/quickstarters/pages/be-gateway-nginx.adoc index 4d77a6631..1054f62e2 100644 --- a/docs/modules/quickstarters/pages/be-gateway-nginx.adoc +++ b/docs/modules/quickstarters/pages/be-gateway-nginx.adoc @@ -109,9 +109,9 @@ Note that the xref:jenkins-shared-library:labelling.adoc[OpenShift resources wil ```yaml name: nginx -description: "Enhanced nginx with Lua embeded. nginx [engine x] is an HTTP and reverse proxy server, a mail proxy server, and a generic TCP/UDP proxy server. Technologies: OpenResty/nginx 1.19.3.2" +description: "Enhanced nginx with Lua embeded. nginx [engine x] is an HTTP and reverse proxy server, a mail proxy server, and a generic TCP/UDP proxy server. Technologies: OpenResty/nginx 1.25.3.1-1" supplier: https://openresty.org -version: 1.21.4.1 +version: 1.25.3.1-1 type: ods-service role: integration ``` diff --git a/e2e-python/Jenkinsfile.template b/e2e-python/Jenkinsfile.template index 1303f3d29..531ab53ec 100644 --- a/e2e-python/Jenkinsfile.template +++ b/e2e-python/Jenkinsfile.template @@ -78,7 +78,6 @@ def generateTerraformOutputsFile() { def stageGetNamesFromOutputs() { def outputNames = [:] def terraformOutputJson = readJSON file: 'terraform_outputs.json' - //def environmentVarsJson = readJSON file: env.auto.tfvars.json outputNames.aws_codepipeline_name = terraformOutputJson.codepipeline_name.value outputNames.bitbuckets3_name = terraformOutputJson.bitbucket_s3bucket_name.value diff --git a/release-manager/files/README.md b/release-manager/files/README.md index 25bfa38ca..4d1c1144f 100644 --- a/release-manager/files/README.md +++ b/release-manager/files/README.md @@ -16,7 +16,7 @@ The release manager supports the orchestration of multiple repositories into a l ### Automated Resolution of Dependencies -The release manager automatically resolves dependencies between repositories to be orchestrated so that they can be delivered in the correct order. Currently, repositories that want to be orchestrated need to be added to the `repositories` list inside `metadata.yml`: +The library automatically resolves dependencies between repositories to be orchestrated so that they can be delivered in the correct order. Currently, repositories that want to be orchestrated need to be added to the `repositories` list inside a release manager component's `metadata.yml`: ``` id: PHOENIX @@ -38,7 +38,7 @@ dependencies: - A ``` -The library supports the following repository types: `ods`, `ods-service`, and `ods-test`. Setting a repository type is required so the orchestrator can make correct assumptions based on the nature of the component at hand: +The library supports the following repository types: `ods`, `ods-infra`, `ods-service`, `ods-saas-service`, `ods-test` and `ods-library`. Setting a repository type is required so the orchestrator can make correct assumptions based on the nature of the component at hand: ``` id: PHOENIX @@ -60,17 +60,31 @@ repositories: This type designates ODS components designed for _code development_. Such repositories are based on quickstarters whose names start with `be-`, `ds-`, or `fe-`, for _backend_, _data science_, and _frontend_, respectively. This is the default type. +If you use this type ODS expects to find JUnit XML test results. If you do not have any test results the pipeline will fail. If you are deploying something where JUnit XML test results are not available consider using [Repository Type: ods-service](#repository-type-ods-service). The test results are stored by default in `build/test-results/test` and the location can be modifed using the pipeline option `testResults`. + +#### Repository Type: ods-infra + +This type designates ODS components designed for _consuming on-prem or cloud services_ of arbitrary type using infrastructure as code. Such components are based on quickstarters whose names start with `inf-`. + +#### Repository Type: ods-saas-service + +This type designates ODS components designed for _documenting vendor-provided SaaS services_. + #### Repository Type: ods-service -This type designates ODS components designed for _running some service_. Examples include repositories based on the `be-gateway-nginx` quickstarter. +This type designates ODS components designed for _running services_ of arbitrary type. Examples include repositories based on the `airflow-cluster` quickstarter. #### Repository Type: ods-test -This type designates ODS components designed for _running automated tests against a live application_. Such repositories are based on quickstarters whose names start with `e2e-`. +This type designates ODS components designed for _running automated tests against a live application_. Such components are based on quickstarters whose names start with `e2e-`. + +#### Repository Type: ods-library (EXPERIMENTAL feature) + +This type designates ODS components designed for _library components_. Such components are only build in dev (WIP and assemble mode), and not promoted to qa nor prod. ### Automated Resolution of Repository Git URL -If no `url` parameter is provided for a repository configuration in the `metadata.yml`, the release manager will attempt to resolve it based on the component's *origin remote URL* and one of the following: +The library will attempt to resolve the repository URL based on the component's *origin remote URL* and one of the following: 1) If the `name` parameter is provided, and not empty, the last path part of the URL is resolved to `${repo-name}.git`. 2) If no `name` parameter is provided, the last path part of the URL is resolved to `${project-id}-${repo-id}.git` (which is the repository name pattern used with *OpenDevStack*). Here `${project-id}` refers to the lowercase value of the top-level `id` attribute in `metadata.yml`. @@ -107,13 +121,50 @@ If no `branch` parameter is provided for a repository, `master` will be assumed. ### Automated Parallelization of Repositories -Instead of merely resolving repositories into a strictly sequential execution model, the release manager automatically understands which repositories form independent groups and can run in parallel for best time-to-feedback and time-to-delivery. +Instead of merely resolving repositories into a strictly sequential execution model, our library automatically understands which repositories form independent groups and can run in parallel for best time-to-feedback and time-to-delivery. + +### Partial rebuilding of components + +By default the shared library will rebuild all type `ods` components, no matter which ones changed since the last release. In order to build _only_ the components whose source code changed (partial rebuilding as we will call it from now on), the following needs to be configured +in `metadata.yml` +``` +allowPartialRebuild : true +``` + +If one repository should always be *rebuilt*, even if partial rebuild is configured on root level, `forceRebuild : true` can be set at repository level, e.g. + +``` +id: PHOENIX +name: Project Phoenix -### Automated Generation of Compliance Documents +repositories: + - id: B + name: my-repo-B + forceRebuild : true +``` -The release manager automatically generates Lean Validation (LeVA) compliance reports based on data in your Jira project, as well as data generated along the automated build, deploy, test, and release process. +It is important to highlight that, despite having configured partial rebuild, the orchestration pipeline will still deploy all the components (both those which changed and which did not) to the target environment. -*Note:* when you configure a Jira service in `metadata.yml`, the release manager expects your Jira project (identified by `id`) to follow a specific structure. If your Jira project has not been set up by *OpenDevStack* lately, your structure will most likely be different. While we plan to support custom Jira setups in the future, you may disable the dependency on the Jira service entirely, as shown in the following example: +### Optimization of runtime performance + +By default the shared library will always pull the *agent image* from the internal docker repository. Depending on the +cluster node setup, this may decrease execution performance. In order to re-use loaded images, a knob in the `Jenkinsfile` configuration of the stage `odsOrchestrationPipeline` can be turned on: +``` +alwaysPullImage: true +``` + +By default the orchestration pipeline will create a pod based on the jenkins-base-agent image to do much of its work. +In seldom cases, ususally with a lot of repositories, one may hit an out of memory error on the pod named 'mro-XX'. In this case the below +memory limit should be adjusted (defaulting to '1Gi') +``` +mroAgentMemoryLimit = "1Gi" +``` + +## Automated Generation of Compliance Documents + +The library automatically generates Lean Validation (LeVA) compliance reports based on data in your Jira project, as well as data generated along the automated build, deploy, test, and release process by the release manager component. + +*Note:* when you configure a Jira service in the release manager component's `metadata.yml`, our library expects your Jira project (identified by `id`) to follow a specific structure. If your Jira project has not been set up by *OpenDevStack* lately, your structure will most likely be different. While we plan to support custom Jira setups in the future, you may disable the dependency on the Jira service entirely, as shown in the following example: ``` services: @@ -128,8 +179,179 @@ services: name: leva-documentation ``` -In this case, the release manager will fall back to the document chapter templates located in the `docs` folder. Therein, you can provide chapter data to be loaded into the supported compliance documents. +In this case, the library will fall back to the document chapter templates located in your release manager component's `docs` folder. Therein, you can provide chapter data to be loaded into the supported compliance documents. + +## Additional Capabilities + +The library supports the activation of various capabilities through the `capabilities:` field in `metadata.yml`. + +### Zephyr for Jira + +``` +capabilities: + - Zephyr +``` + +The Zephyr for Jira capability currently supports: + +- Reporting the result of a test execution to Zephyr for Jira + +## Environment Promotion + +This section will guide you through the "environment promotion" feature of the orchestration pipeline. + +### What is the "environment promotion" feature? + +Typically, software is running in different environments, such as one environment for development (DEV), one for quality assurance (QA), and one for production (PROD - this is what end-users of the software consume). Developers work on on the software in the development environment, and once they finish one version (a state) of the software, they bring that version to the QA environment, and once this version is deemed production-ready it is brought to the production environment so that users can consume the new version. + +The environment promotion feature of the orchestration pipeline automates moving a certain version of the software from one environment to the next. Developers only have to tell the orchestration pipeline if a new version should be built (in DEV) and packaged as an installable "release bundle", or if an existing "release bundle" should be promoted to either the QA or the production environment. + +The environment promotion feature is part of the regular orchestration pipeline. Therefore, the promotion is executed from various Jenkins stages. It is not possible to change the process itself, but you can customize how the promotion happens exactly for each of your software components. + +### Source Code Organisation + +The components of your software are defined in the `repositories` section of the `metadata.yml` file in the release manager repository. In order for the orchestration pipeline to know which state of each component should be promoted, it needs to have some knowledge about how version control in your repositories is organised. Everything depends on a user-supplied build parameter named `version` to the Jenkins pipeline. Other input parameters do not have any impact on source code lookup. + +- When no `version` is given, the orchestration pipeline will default to `WIP` (work in progress). In this scenario, source code for each repository is taken from the configured branch in the `metadata.yml` file (defaulting to `master` if no branch is specified there). +- When a `version` is given, source code will be taken from a branch `release/$VERSION` in each repository. When this branch does not exist yet, it will be created (based on the configured branch in `metadata.yml`) by the pipeline. Subsequent runs with the same `version` input will take the source code from the created release branch - changes to the configured branch will have no effect on this version! This is by design: it allows some developers to work on new features on the mainline branch (typically `master`) while others polish the release branch. To this end, the orchestration pipeline allows to enable separate development environments per version to isolate changes in OpenShift resources (see section "Environments" further down). +- The orchestration pipeline applies the same branching rules to the release manager repository - it will create a release branch per version. There is one small caveat here: Jenkins only considers the `Jenkinsfile` from the branch which is configured for a pipeline. That means that for a pipeline setup against `master`, Jenkins will always execute the latest `Jenkinsfile` from `master`, even when you pass an explicit `version` to the pipeline. The orchestration pipeline will read e.g. the `metadata.yml` file from the matching release branch, but the `Jenkinsfile` itself will be from `master`. Usually, this should not be an issue as you should not make changes to the `Jenkinsfile` of the release manager repository anyway. + +### Release bundles + +A specific "release bundle" is identified by four data points: a `version` (as outlined above), a `changeId`, a build number and an environment. The `version`, `changeId` and `environment` are user-supplied input parameters to the release manager pipeline, the build number is calculated automatically. The `changeId` can be any string meaningful to the user, its value does not have any effect on the operation of the orchestration pipeline. The environment input variable (such as `DEV`) will be shortened to a single-letter token (e.g. `D`). + +Technically speaking, a release bundle is a certain state of the release manager repository and the state of each linked repository at that time. This state is identified by a Git tag. For example, a release bundle with `version=1`, `changeId=1234`, `buildNumber=0` and `environment=DEV` is identified by the Git tag `v1-1234-0-D`. This tag is set on the release manager repository, and all repositories the `metadata.yml` refers to at this time. + +### Environments + +The orchestration pipeline assumes three "conceptual" environments: DEV, QA and PROD (with short token forms D, Q and P). Those environments are strictly ordered - a state should go from DEV to QA, and then from QA to PROD. + +To ensure that software progresses along the DEV -> QA -> PROD path, release bundles from environment DEV can only be installed into QA, and only a release bundle from QA can be installed into PROD. Installing a release bundle from DEV into PROD is not allowed. + +Each "conceptual" environment is mapped to an OpenShift namespace: + +- DEV to `$PROJECT-dev` (e.g. `foo-dev`) +- QA to `$PROJECT-test` (e.g. `foo-test`. Note that it is NOT `-qa`!) +- PROD to `$PROJECT-prod` (e.g. `foo-prod`) + +Keep in mind that when you create a new project with OpenDevStack, you get three OpenShift namespaces: + +- `foo-dev` (your DEV environment) +- `foo-test` (your QA environment - unfortunately not named `-qa` for historical reasons) +- `foo-cd` (where Jenkins runs and the pipelines such as the orchestration pipeline are executed) + +So while there is a corresponding namespace for DEV and QA, there is no namespace corresponding to the PROD environment out-of-the-box. This is because it is assumed that your PROD environment is likely on another cluster altogether. To create `foo-prod` on another cluster, you (or someone with appropriate rights) can run the script located at https://github.com/opendevstack/ods-core/blob/master/ocp-scripts/create-target-project.sh. Then you need to tell orchestration pipeline two things: where the API of the external cluster is, and the credentials with which to access it. A typical configuration is: + +``` +id: foo +... +repositories: [ ... ] +environments: + prod: + apiUrl: https://api.example.com + credentialsId: foo-cd-foo-prod +``` + +This assumes you have the API token credentials stored in a secret of type `kubernetes.io/basic-auth` named `foo-prod` in the `foo-cd` namespace. This secret needs to be synced with Jenkins (which is achieved by labeling it with `credential.sync.jenkins.openshift.io=true`). The stored credentials need to belong to a serviceaccount with rights to admin the `foo-prod` namespace. The easiest way to setup all of this is by running the script located at https://github.com/opendevstack/ods-core/blob/master/ocp-scripts/create-target-sa-secret.sh, which makes use of the output of the `create-target-project.sh` ran earlier. + +TIP: It is also possible to have the PROD environment on the same cluster, then you simply create a `foo-prod` namespace next to `foo-dev` and `foo-test`, and allow the `foo-cd:jenkins` account to admin that project. In that case, you do not need to configure anything in `metadata.yml` as the default configuration assumes the same cluster. The opposite is also possible: you can configure the QA environment to be on a different cluster than the DEV environment - simply follow the instructions above to create a `foo-test` namespace. + +As mentioned in the "Source Code Organisation" section, the orchestration pipeline allows to enable separate development environments to isolate different versions. When this mode is enabled, pipeline runs with `version=WIP` will deploy into the `$PROJECT-dev` as usual, but pipeline runs with `version=X` will deploy into `$PROJECT-dev-X`. The `$PROJECT-dev-X` environment has to be created beforehand (e.g. by cloning `$PROJECT-dev` with its serviceaccounts and rolebindings). To enable this feature, set `versionedDevEnvs` to `true` in the config of your `Jenkinsfile`, like this: + +``` +def config = [debug: true, odsImageTag: 'x.x', versionedDevEnvs: true] +``` + +### Customizing the Release Manager configuration + +### Timeouts and retries + +If one of your components take longer than 10 minutes (this is the default value) to be promoted from one environment to another, the Release Manager pipeline will exit due to this timeout. +You can increase this timeout by setting the `openshiftRolloutTimeoutMinutes` per environment in the Release Manager repository in the `metadata.yml` file. +Similarly, the number of retries is configurable with the `openshiftRolloutTimeoutRetries` property. + +The following example establishes a timeout of `120` minutes for both `qa` and `prod` environments with a total number of `3` retries. + +```yaml +... +environments: + prod: + apiUrl: https://... + credentialsId: ... + openshiftRolloutTimeoutMinutes: 120 + openshiftRolloutTimeoutRetries: 3 + qa: + openshiftRolloutTimeoutMinutes: 120 + openshiftRolloutTimeoutRetries: 3 +... +``` + +### Walkthrough + +Let's start by assuming you have a project FOO with two components, X and Y. These components are defined under the `repositories` section in the `metadata.yml` file of the release manager repository. When you want to create a new release, you start the orchestration pipeline with input parameters - we will use version `1` and change ID `1234` in this example. The environment should be `DEV`. At the end of the pipeline run, you'll have a release bundle identified by the tag `v1-1234-0-D`. This release can later be promoted as-is to QA. Once it is installed there, the same release bundle will be tagged with `v1-1234-0-Q` which can then be promoted to PROD (where it will be tagged with `v1-1234-0-P`). + +To create a release bundle, the orchestration pipeline will first trigger the build of each component. Then, it will export all resources in your OpenShift namespace (`$PROJECT-$ENVIRONMENT`, here `foo-dev`) belonging to the component. By convention, this means all resources labeled with `app=$PROJECT-$COMPONENT` (e.g. `app=foo-x`). Any resources without such a label will NOT be part of the release bundle. The exported resources are stored in a `template.yml` file (an OpenShift template) located in the `openshift-exported` folder within each component repository. Further, the container image SHA of the running pod is retrieved and stored in the file `image-sha` in the same folder. Once done, the orchestration pipeline will commit the two files, tag the commit with `v1-1234-0-D` and push to the remote. After this process has been done for all repositories, the same tag is also applied to the release manager repository. At this stage, the "dev release bundle" is complete and can be installed into QA. + +To trigger the installation of an existing release bundle, the user needs to supply a `version` and `changeId` which has previously been used to create a release bundle. In our example, supplying `version=1`, `changeId=1234` and `environment=QA` will promote the release bundle identified by `v1-1234-0-D` to the QA environment and tag it with `v1-1234-0-Q`. Now that we have a "QA release bundle", we can promote it to PROD by supplying `version=1`, `changeId=1234` and `environment=PROD`. + + +### Customizing release bundle creation + +As outlined above, a release bundle is essentially a state of all involved Git repositories. Each component repository contains two artifacts: + +- a container image SHA +- OpenShift resource configuration (expressed in an OpenShift template) + +You cannot modify the image SHA (it is the result of what the component pipeline builds), but you can influence the OpenShift template. One reason to do so is that e.g. routes or `ConfigMap` values will need to differ between environments, and you need to tell the orchestration pipeline to parametrize the templates, and to supply the right values when the templates are applied in the target environment. + +When the orchestration pipeline exports configuration, it has no way to tell which values should actually be parameters. For example, you might have a route `x.foo-dev.dev-cluster.com` in DEV, and want this to be `x.foo-test.dev-cluster.com` in QA and `x.foo-prod.prod-cluster.com` in PROD. In the exported template, the value `x.foo-dev.dev-cluster.com` will be hardcoded. To fix this, you can create three files in the component openshift folder, `dev.env`, `qa.env` and `prod.env`. These files may contain `PARAM=value` lines, like this: + +dev.env +``` +X_ROUTE=x.foo-dev.dev-cluster.com +``` + +qa.env +``` +X_ROUTE=x.foo-test.dev-cluster.com +``` + +prod.env +``` +X_ROUTE=x.foo-prod.prod-cluster.com +``` + +All three files need to list the exact same parameters - otherwise applying the templates will fail. Once those param files are present, the orchestration pipeline will pick them up automatically. When you create a release bundle (in DEV), the param file is applied "in reverse", meaning that any concrete param value (on the right) will be substituted with the param key (on the left) in the template. Later when the template is applied in e.g. QA, the param keys are replaced with the concrete values from `qa.env`. + +IMPORTANT: It is necessary to have all the param files completed before you create a release bundle - if you want to change e.g. the value of a parameter in the `prod.env` file afterwards, you will need to create a new release bundle (as they are identified by Git tags, which do not move when you make new commits on the release branch). + +Next to parametrizing templates, you can also adjust how the export is done. As the export is using [Tailor](https://github.com/opendevstack/tailor), the best way to customize is to supply a `Tailorfile` in the `openshift-exported` folder, in which you can define the options you want to set, such as excluding certain labels or resource types, or preserving specific fields in the live configuration. Please see Tailor's documentation for more information. It is also possible to have different configuration files per environment if you suffix with the `$PROJECT`, e.g. `Tailorfile.foo-dev`. + +TIP: If you have component-specific parameters that differ between environments, a lightweight way to add these is via parameter files located in the `openshift-exported` folder matching the target project such as `foo-dev.env`, `foo-test.env` and `foo-prod.env`. These files are picked up automatically without special setup in a `Tailorfile`. + +### Authoring OpenShift configuration + +In the process described above, the OpenShift configuration is exported and stored in the repositories in `openshift-exported`. This approach is easy to get started with, but it does have limitations: + +- There is no defined state: whatever gets exported is what will be promoted, even if a certain configuration was meant to be only temporary or is specific to e.g. only the DEV environment. +- There is little traceability: as configuration is done through the OpenShift web interface, it is not known who did the change and when, and no chance for other team members to review that change. +- The parametrization of the exported template might produce incorrect results as it is just a string search-and-replace operation without further knowledge of the meaning of your configuration values. + +To overcome these issues, it is possible to author the OpenShift templates yourself instead of exporting them. The fastest way to start with this is by renaming the folder `openshift-exported` (containing the exported template) to `openshift.` From this point on, the orchestration pipeline will skip the export, and apply whatever is defined in the `openshift` folder. + +TIP: If you are new to writing OpenShift templates, please read https://github.com/opendevstack/tailor#template-authoring. + +When you author templates, you can also store the secrets in the param files GPG encrypted (`.env.enc` files). To achieve this, you need to create a private/public keypair for Jenkins, store the private key in a secret called `tailor-private-key` in your `foo-cd` namespace, and sync it as a Jenkins credentials item. Once the `.env.enc` files are encrypted against the public key, the orchestration pipeline will automatically use the private key to decrypt the params on-the-fly. Please see https://github.com/opendevstack/tailor#working-with-secrets for more information. -### Automated Cloning of Environments +### Known Limitations -If you want your *target environment* to be created from an existing *source environment* such as `dev` or `test` on the fly, you need to provide the `environment` and `sourceEnvironmentToClone` parameters to your pipeline, respectively. Their values will be combined with your project ID in the form `${project-id}-${environment}` to create the project (namespace) name in your OpenShift cluster. +- For versioned, separate DEV environments, pulling images from the `foo-cd` namespace is not possible (because the `foo-cd:jenkins` serviceaccount does not have admin rights in `foo-cd` and therefore can't grant access to it) +- Tagging means we are pointing to a concrete SHA of a Git repository. This enforces that no manual editing of exported config can happen between promotion to QA and promotion to PROD, which in effect forces everything to be parameterized properly. +- JIRA always triggers the `master` branch of the release manager, which means the `Jenkinsfile` is always taken from `master` (and NOT from the correct release branch - only `metadata.yml` etc. are read from the release branch) +- There is only one QA namespace, preventing to test multiple releases at the same time. +- The secret of the serviceaccount in the target cluster is known to the orchestration pipeline (as a Jenkins credential synced from OpenShift), therefore developers with edit/admin rights in the CD namespace have access to that secret +- Tags could manually be set / moved (this can be prevented in Bitbucket by administrators) +- Passwords etc. in the OpenShift configuration are stored in clear text in the export (this can be prevented by authoring templates and using a private key for encryption of param files) +- During export, the templates are parameterized automatically, but this is done using string search-and-replace and unwanted replacements might occur (this can be prevented by authoring the templates manually). +- By default, SonarQube scans (and reports) are only generated for the `master` branch of each component. As the orchestration pipeline automatically creates release branches for each version, no scans and reports are created on those. This can be changed by configuring `sonarQubeBranch: '*'`` in each component's `Jenkinsfile`, however keep in mind that quality trends etc. will be mixed up if you use the free version of SonarQube as that version does not have support for multiple branches. +- An existing QA-tag cannot be deployed again in PROD. This has been intentionally designed that way as any change to PROD needs its unique change ID, which results in a new tag. From 214ae2b2d542978ce3c13eee20cbc70d7f3c44e7 Mon Sep 17 00:00:00 2001 From: Your Name Date: Mon, 5 Feb 2024 16:12:57 +0100 Subject: [PATCH 25/32] update README.md with use cases --- e2e-python/files/README.md | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/e2e-python/files/README.md b/e2e-python/files/README.md index 1a5a9cc71..19fb8e35f 100644 --- a/e2e-python/files/README.md +++ b/e2e-python/files/README.md @@ -1,7 +1,11 @@ # Python end-to-end tests This is a python based quicktarter intended to develop end-to-end tests for data pipelines. -In order to do that it uses two testing technologies: Great Expectations and Pytest. +In order to do that it uses two testing technologies: + 1. Great Expectations, meant for data transformation testing data within relational tables. + e.g.: You could test the shema of a database, the number of rows, that a specific column has no null values, etc + + 2. Pytest together with Boto it allows for testing etl triggers, notification system, content of S3 buckets, etc This quickstarter project was generated from the *inf-terraform-aws* ODS quickstarter. From 94e105fc80d5774feb6a220ed251376ab8e6c36b Mon Sep 17 00:00:00 2001 From: Your Name Date: Mon, 5 Feb 2024 16:13:20 +0100 Subject: [PATCH 26/32] update README.md with use cases --- e2e-python/files/README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/e2e-python/files/README.md b/e2e-python/files/README.md index 19fb8e35f..540bd2c2f 100644 --- a/e2e-python/files/README.md +++ b/e2e-python/files/README.md @@ -3,7 +3,7 @@ This is a python based quicktarter intended to develop end-to-end tests for data pipelines. In order to do that it uses two testing technologies: 1. Great Expectations, meant for data transformation testing data within relational tables. - e.g.: You could test the shema of a database, the number of rows, that a specific column has no null values, etc + e.g.: You could test the schema of a database, the number of rows, that a specific column has no null values, etc 2. Pytest together with Boto it allows for testing etl triggers, notification system, content of S3 buckets, etc From 2c766f7135bd567cfc8e380546c47b84b372cd9f Mon Sep 17 00:00:00 2001 From: Your Name Date: Tue, 6 Feb 2024 11:38:43 +0100 Subject: [PATCH 27/32] rename qs and create ods webpage --- .github/dependabot.yml | 3 ++ .../quickstarters/pages/e2e-etl-python.adoc | 46 +++++++++++++++++++ {e2e-python => e2e-etl-python}/Jenkinsfile | 0 .../Jenkinsfile.template | 0 {e2e-python => e2e-etl-python}/README.md | 2 +- .../dev.yml.template | 0 .../.devcontainer/devcontainer.json.template | 0 .../files/.editorconfig | 0 .../files/.gitignore | 0 .../files/.pre-commit-config.yaml | 0 .../files/.rubocop.yml | 0 .../files/.ruby-version | 0 .../files/.terraform-version | 0 {e2e-python => e2e-etl-python}/files/Gemfile | 0 .../files/Gemfile.lock | 0 {e2e-python => e2e-etl-python}/files/Makefile | 0 {e2e-python => e2e-etl-python}/files/Pipfile | 0 .../files/README.md | 1 - .../files/backend.tf | 0 .../files/cfn-templates/.gitkeep | 0 .../files/cfn-templates/cfs3.json | 0 .../files/common-tags.tf | 0 .../files/environments/dev.json | 0 .../files/environments/prod.json | 0 .../files/environments/test.json | 0 .../files/inputs2outputs.tf | 0 .../files/kitchen.yml | 0 .../files/lib/scripts/aws/check_conf.sh | 0 .../scripts/createstackfixtureoutputs2yml.sh | 0 .../lib/scripts/createstackoutputs2yml.sh | 0 {e2e-python => e2e-etl-python}/files/main.tf | 0 .../files/metadata.yml | 4 +- .../files/modules/codebuild/main.tf | 0 .../files/modules/codebuild/output.tf | 0 .../files/modules/codebuild/variables.tf | 0 .../files/modules/codepipeline/main.tf | 0 .../files/modules/codepipeline/output.tf | 0 .../files/modules/codepipeline/variables.tf | 2 +- .../files/modules/iam_roles/main.tf | 0 .../files/modules/iam_roles/outputs.tf | 0 .../files/modules/iam_roles/variables.tf | 0 .../files/modules/s3-bucket/main.tf | 0 .../files/modules/s3-bucket/outputs.tf | 0 .../files/modules/s3-bucket/variables.tf | 0 .../files/outputs.tf | 0 .../files/pytest.ini | 0 .../files/random.tf | 0 .../files/release-manager.yml | 0 .../files/reports/install/.gitkeep | 0 .../files/requirements.txt | 0 .../files/stackmodulesoutputs.tf | 0 .../files/terraform-data.tf | 0 .../files/test/fixtures/default/backend.tf | 0 .../files/test/fixtures/default/main.tf | 0 .../test/fixtures/default/moduleoutputs.tf | 0 .../files/test/fixtures/default/random.tf | 0 .../default/controls/blueprints.rb | 0 .../integration/default/controls/default.rb | 0 .../test/integration/default/files/.gitkeep | 0 .../files/test/integration/default/inspec.yml | 0 .../test/integration/default/inspec.yml.tmpl | 0 .../test/integration/default/libraries/aws.rb | 0 .../default/libraries/fixture_data.rb | 0 .../default/libraries/terraform_data.rb | 0 .../acceptance/great_expectations/.gitignore | 0 .../checkpoints/Demo_athena_checkpoint.yml | 0 .../checkpoints/Demo_person_checkpoint.yml | 0 .../expectations/athena_validation_suite.json | 0 .../expectations/person_validation_suite.json | 0 .../great_expectations/great_expectations.yml | 0 .../styles/data_docs_custom_styles.css | 0 .../test_preparation/post_requisites.py | 0 .../test_preparation/pre_requisites.py | 0 .../pytest/Demo_allure_pytest_test.py | 0 .../tests/installation/installation_test.py | 0 .../tests/integration/integration_test.py | 0 .../files/utils/checkpoints_executions.py | 0 .../files/utils/json2JUnit.py | 0 .../files/variables.tf | 2 +- .../files/versions.tf | 0 .../prod.yml.template | 0 .../test.yml.template | 0 .../testdata/golden/jenkins-build-stages.json | 0 .../golden/jenkins-provision-stages.json | 0 .../testdata/golden/sonar-scan.json | 0 .../testdata/steps.yml | 2 +- .../testing.yml.template | 0 87 files changed, 55 insertions(+), 7 deletions(-) create mode 100644 docs/modules/quickstarters/pages/e2e-etl-python.adoc rename {e2e-python => e2e-etl-python}/Jenkinsfile (100%) rename {e2e-python => e2e-etl-python}/Jenkinsfile.template (100%) rename {e2e-python => e2e-etl-python}/README.md (86%) rename {e2e-python => e2e-etl-python}/dev.yml.template (100%) rename {e2e-python => e2e-etl-python}/files/.devcontainer/devcontainer.json.template (100%) rename {e2e-python => e2e-etl-python}/files/.editorconfig (100%) rename {e2e-python => e2e-etl-python}/files/.gitignore (100%) rename {e2e-python => e2e-etl-python}/files/.pre-commit-config.yaml (100%) rename {e2e-python => e2e-etl-python}/files/.rubocop.yml (100%) rename {e2e-python => e2e-etl-python}/files/.ruby-version (100%) rename {e2e-python => e2e-etl-python}/files/.terraform-version (100%) rename {e2e-python => e2e-etl-python}/files/Gemfile (100%) rename {e2e-python => e2e-etl-python}/files/Gemfile.lock (100%) rename {e2e-python => e2e-etl-python}/files/Makefile (100%) rename {e2e-python => e2e-etl-python}/files/Pipfile (100%) rename {e2e-python => e2e-etl-python}/files/README.md (99%) rename {e2e-python => e2e-etl-python}/files/backend.tf (100%) rename {e2e-python => e2e-etl-python}/files/cfn-templates/.gitkeep (100%) rename {e2e-python => e2e-etl-python}/files/cfn-templates/cfs3.json (100%) rename {e2e-python => e2e-etl-python}/files/common-tags.tf (100%) rename {e2e-python => e2e-etl-python}/files/environments/dev.json (100%) rename {e2e-python => e2e-etl-python}/files/environments/prod.json (100%) rename {e2e-python => e2e-etl-python}/files/environments/test.json (100%) rename {e2e-python => e2e-etl-python}/files/inputs2outputs.tf (100%) rename {e2e-python => e2e-etl-python}/files/kitchen.yml (100%) rename {e2e-python => e2e-etl-python}/files/lib/scripts/aws/check_conf.sh (100%) rename {e2e-python => e2e-etl-python}/files/lib/scripts/createstackfixtureoutputs2yml.sh (100%) rename {e2e-python => e2e-etl-python}/files/lib/scripts/createstackoutputs2yml.sh (100%) rename {e2e-python => e2e-etl-python}/files/main.tf (100%) rename {e2e-python => e2e-etl-python}/files/metadata.yml (73%) rename {e2e-python => e2e-etl-python}/files/modules/codebuild/main.tf (100%) rename {e2e-python => e2e-etl-python}/files/modules/codebuild/output.tf (100%) rename {e2e-python => e2e-etl-python}/files/modules/codebuild/variables.tf (100%) rename {e2e-python => e2e-etl-python}/files/modules/codepipeline/main.tf (100%) rename {e2e-python => e2e-etl-python}/files/modules/codepipeline/output.tf (100%) rename {e2e-python => e2e-etl-python}/files/modules/codepipeline/variables.tf (97%) rename {e2e-python => e2e-etl-python}/files/modules/iam_roles/main.tf (100%) rename {e2e-python => e2e-etl-python}/files/modules/iam_roles/outputs.tf (100%) rename {e2e-python => e2e-etl-python}/files/modules/iam_roles/variables.tf (100%) rename {e2e-python => e2e-etl-python}/files/modules/s3-bucket/main.tf (100%) rename {e2e-python => e2e-etl-python}/files/modules/s3-bucket/outputs.tf (100%) rename {e2e-python => e2e-etl-python}/files/modules/s3-bucket/variables.tf (100%) rename {e2e-python => e2e-etl-python}/files/outputs.tf (100%) rename {e2e-python => e2e-etl-python}/files/pytest.ini (100%) rename {e2e-python => e2e-etl-python}/files/random.tf (100%) rename {e2e-python => e2e-etl-python}/files/release-manager.yml (100%) rename {e2e-python => e2e-etl-python}/files/reports/install/.gitkeep (100%) rename {e2e-python => e2e-etl-python}/files/requirements.txt (100%) rename {e2e-python => e2e-etl-python}/files/stackmodulesoutputs.tf (100%) rename {e2e-python => e2e-etl-python}/files/terraform-data.tf (100%) rename {e2e-python => e2e-etl-python}/files/test/fixtures/default/backend.tf (100%) rename {e2e-python => e2e-etl-python}/files/test/fixtures/default/main.tf (100%) rename {e2e-python => e2e-etl-python}/files/test/fixtures/default/moduleoutputs.tf (100%) rename {e2e-python => e2e-etl-python}/files/test/fixtures/default/random.tf (100%) rename {e2e-python => e2e-etl-python}/files/test/integration/default/controls/blueprints.rb (100%) rename {e2e-python => e2e-etl-python}/files/test/integration/default/controls/default.rb (100%) rename {e2e-python => e2e-etl-python}/files/test/integration/default/files/.gitkeep (100%) rename {e2e-python => e2e-etl-python}/files/test/integration/default/inspec.yml (100%) rename {e2e-python => e2e-etl-python}/files/test/integration/default/inspec.yml.tmpl (100%) rename {e2e-python => e2e-etl-python}/files/test/integration/default/libraries/aws.rb (100%) rename {e2e-python => e2e-etl-python}/files/test/integration/default/libraries/fixture_data.rb (100%) rename {e2e-python => e2e-etl-python}/files/test/integration/default/libraries/terraform_data.rb (100%) rename {e2e-python => e2e-etl-python}/files/tests/acceptance/great_expectations/.gitignore (100%) rename {e2e-python => e2e-etl-python}/files/tests/acceptance/great_expectations/checkpoints/Demo_athena_checkpoint.yml (100%) rename {e2e-python => e2e-etl-python}/files/tests/acceptance/great_expectations/checkpoints/Demo_person_checkpoint.yml (100%) rename {e2e-python => e2e-etl-python}/files/tests/acceptance/great_expectations/expectations/athena_validation_suite.json (100%) rename {e2e-python => e2e-etl-python}/files/tests/acceptance/great_expectations/expectations/person_validation_suite.json (100%) rename {e2e-python => e2e-etl-python}/files/tests/acceptance/great_expectations/great_expectations.yml (100%) rename {e2e-python => e2e-etl-python}/files/tests/acceptance/great_expectations/plugins/custom_data_docs/styles/data_docs_custom_styles.css (100%) rename {e2e-python => e2e-etl-python}/files/tests/acceptance/great_expectations/test_preparation/post_requisites.py (100%) rename {e2e-python => e2e-etl-python}/files/tests/acceptance/great_expectations/test_preparation/pre_requisites.py (100%) rename {e2e-python => e2e-etl-python}/files/tests/acceptance/pytest/Demo_allure_pytest_test.py (100%) rename {e2e-python => e2e-etl-python}/files/tests/installation/installation_test.py (100%) rename {e2e-python => e2e-etl-python}/files/tests/integration/integration_test.py (100%) rename {e2e-python => e2e-etl-python}/files/utils/checkpoints_executions.py (100%) rename {e2e-python => e2e-etl-python}/files/utils/json2JUnit.py (100%) rename {e2e-python => e2e-etl-python}/files/variables.tf (98%) rename {e2e-python => e2e-etl-python}/files/versions.tf (100%) rename {e2e-python => e2e-etl-python}/prod.yml.template (100%) rename {e2e-python => e2e-etl-python}/test.yml.template (100%) rename {e2e-python => e2e-etl-python}/testdata/golden/jenkins-build-stages.json (100%) rename {e2e-python => e2e-etl-python}/testdata/golden/jenkins-provision-stages.json (100%) rename {e2e-python => e2e-etl-python}/testdata/golden/sonar-scan.json (100%) rename {e2e-python => e2e-etl-python}/testdata/steps.yml (93%) rename {e2e-python => e2e-etl-python}/testing.yml.template (100%) diff --git a/.github/dependabot.yml b/.github/dependabot.yml index 9c253b318..530f34db9 100644 --- a/.github/dependabot.yml +++ b/.github/dependabot.yml @@ -8,6 +8,9 @@ updates: schedule: # Check for updates to GitHub Actions every week interval: "weekly" + labels: + - "dependencies" + - "skip changelog" labels: - "dependencies" - "skip changelog" \ No newline at end of file diff --git a/docs/modules/quickstarters/pages/e2e-etl-python.adoc b/docs/modules/quickstarters/pages/e2e-etl-python.adoc new file mode 100644 index 000000000..3902da385 --- /dev/null +++ b/docs/modules/quickstarters/pages/e2e-etl-python.adoc @@ -0,0 +1,46 @@ += End-to-end tests with Great Expectations and Pytest (e2e-etl-python) + +End-to-end tests for ETLs quickstarter project + +== Purpose of this quickstarter + +This is a python based quicktarter intended to develop end-to-end tests for data pipelines. +In order to do that it uses two testing technologies: + 1. Great Expectations, meant for data transformation testing data within relational tables. + e.g.: You could test the schema of a database, the number of rows, that a specific column has no null values, etc + 2. Pytest together with Boto it allows for testing etl triggers, notification system, content of S3 buckets, etc + +== What files / architecture is generated? + +---- +├── Jenkinsfile - This file contains Jenkins stages. +├── README.md +├── environments +│ ├── dev.json - This file describes parameters for the development AWS environment. +│ ├── test.json - This file describes parameters for the test AWS environment. +│ └── prod.json - This file describes parameters for the production AWS environment. +├── tests - This folder contains the root for test-kitchen +│ ├── acceptance/great_expectations - This folder contains the Great Expecations tests to test +│ └── acceptance/pytest - This folder contains the pytest tests to test + + +---- + +== Frameworks used + +* https://greatexpectations.io[Great-expectations] +* https://pytest.org[Pytest] + + +== Usage - how do you start after you provisioned this quickstarter + +Check the README.md file at root level for further instructions after the quickstarter has been provisioned. + + +== Builder agent used + +This quickstarter uses https://github.com/opendevstack/ods-quickstarters/tree/master/common/jenkins-agents/terraform[terraform] Jenkins agent. + +== Known limitations + +Let us know if you find any, thanks! diff --git a/e2e-python/Jenkinsfile b/e2e-etl-python/Jenkinsfile similarity index 100% rename from e2e-python/Jenkinsfile rename to e2e-etl-python/Jenkinsfile diff --git a/e2e-python/Jenkinsfile.template b/e2e-etl-python/Jenkinsfile.template similarity index 100% rename from e2e-python/Jenkinsfile.template rename to e2e-etl-python/Jenkinsfile.template diff --git a/e2e-python/README.md b/e2e-etl-python/README.md similarity index 86% rename from e2e-python/README.md rename to e2e-etl-python/README.md index 8ded3b2c9..0e8c848bb 100644 --- a/e2e-python/README.md +++ b/e2e-etl-python/README.md @@ -1,4 +1,4 @@ -# e2e-python Quickstarter (e2e-python) +# e2e-etl-python Quickstarter (e2e-etl-python) Documentation is located in our [official documentation](https://www.opendevstack.org/ods-documentation/opendevstack/latest/getting-started/index.html) diff --git a/e2e-python/dev.yml.template b/e2e-etl-python/dev.yml.template similarity index 100% rename from e2e-python/dev.yml.template rename to e2e-etl-python/dev.yml.template diff --git a/e2e-python/files/.devcontainer/devcontainer.json.template b/e2e-etl-python/files/.devcontainer/devcontainer.json.template similarity index 100% rename from e2e-python/files/.devcontainer/devcontainer.json.template rename to e2e-etl-python/files/.devcontainer/devcontainer.json.template diff --git a/e2e-python/files/.editorconfig b/e2e-etl-python/files/.editorconfig similarity index 100% rename from e2e-python/files/.editorconfig rename to e2e-etl-python/files/.editorconfig diff --git a/e2e-python/files/.gitignore b/e2e-etl-python/files/.gitignore similarity index 100% rename from e2e-python/files/.gitignore rename to e2e-etl-python/files/.gitignore diff --git a/e2e-python/files/.pre-commit-config.yaml b/e2e-etl-python/files/.pre-commit-config.yaml similarity index 100% rename from e2e-python/files/.pre-commit-config.yaml rename to e2e-etl-python/files/.pre-commit-config.yaml diff --git a/e2e-python/files/.rubocop.yml b/e2e-etl-python/files/.rubocop.yml similarity index 100% rename from e2e-python/files/.rubocop.yml rename to e2e-etl-python/files/.rubocop.yml diff --git a/e2e-python/files/.ruby-version b/e2e-etl-python/files/.ruby-version similarity index 100% rename from e2e-python/files/.ruby-version rename to e2e-etl-python/files/.ruby-version diff --git a/e2e-python/files/.terraform-version b/e2e-etl-python/files/.terraform-version similarity index 100% rename from e2e-python/files/.terraform-version rename to e2e-etl-python/files/.terraform-version diff --git a/e2e-python/files/Gemfile b/e2e-etl-python/files/Gemfile similarity index 100% rename from e2e-python/files/Gemfile rename to e2e-etl-python/files/Gemfile diff --git a/e2e-python/files/Gemfile.lock b/e2e-etl-python/files/Gemfile.lock similarity index 100% rename from e2e-python/files/Gemfile.lock rename to e2e-etl-python/files/Gemfile.lock diff --git a/e2e-python/files/Makefile b/e2e-etl-python/files/Makefile similarity index 100% rename from e2e-python/files/Makefile rename to e2e-etl-python/files/Makefile diff --git a/e2e-python/files/Pipfile b/e2e-etl-python/files/Pipfile similarity index 100% rename from e2e-python/files/Pipfile rename to e2e-etl-python/files/Pipfile diff --git a/e2e-python/files/README.md b/e2e-etl-python/files/README.md similarity index 99% rename from e2e-python/files/README.md rename to e2e-etl-python/files/README.md index 540bd2c2f..7ab1904b2 100644 --- a/e2e-python/files/README.md +++ b/e2e-etl-python/files/README.md @@ -4,7 +4,6 @@ This is a python based quicktarter intended to develop end-to-end tests for data In order to do that it uses two testing technologies: 1. Great Expectations, meant for data transformation testing data within relational tables. e.g.: You could test the schema of a database, the number of rows, that a specific column has no null values, etc - 2. Pytest together with Boto it allows for testing etl triggers, notification system, content of S3 buckets, etc This quickstarter project was generated from the *inf-terraform-aws* ODS quickstarter. diff --git a/e2e-python/files/backend.tf b/e2e-etl-python/files/backend.tf similarity index 100% rename from e2e-python/files/backend.tf rename to e2e-etl-python/files/backend.tf diff --git a/e2e-python/files/cfn-templates/.gitkeep b/e2e-etl-python/files/cfn-templates/.gitkeep similarity index 100% rename from e2e-python/files/cfn-templates/.gitkeep rename to e2e-etl-python/files/cfn-templates/.gitkeep diff --git a/e2e-python/files/cfn-templates/cfs3.json b/e2e-etl-python/files/cfn-templates/cfs3.json similarity index 100% rename from e2e-python/files/cfn-templates/cfs3.json rename to e2e-etl-python/files/cfn-templates/cfs3.json diff --git a/e2e-python/files/common-tags.tf b/e2e-etl-python/files/common-tags.tf similarity index 100% rename from e2e-python/files/common-tags.tf rename to e2e-etl-python/files/common-tags.tf diff --git a/e2e-python/files/environments/dev.json b/e2e-etl-python/files/environments/dev.json similarity index 100% rename from e2e-python/files/environments/dev.json rename to e2e-etl-python/files/environments/dev.json diff --git a/e2e-python/files/environments/prod.json b/e2e-etl-python/files/environments/prod.json similarity index 100% rename from e2e-python/files/environments/prod.json rename to e2e-etl-python/files/environments/prod.json diff --git a/e2e-python/files/environments/test.json b/e2e-etl-python/files/environments/test.json similarity index 100% rename from e2e-python/files/environments/test.json rename to e2e-etl-python/files/environments/test.json diff --git a/e2e-python/files/inputs2outputs.tf b/e2e-etl-python/files/inputs2outputs.tf similarity index 100% rename from e2e-python/files/inputs2outputs.tf rename to e2e-etl-python/files/inputs2outputs.tf diff --git a/e2e-python/files/kitchen.yml b/e2e-etl-python/files/kitchen.yml similarity index 100% rename from e2e-python/files/kitchen.yml rename to e2e-etl-python/files/kitchen.yml diff --git a/e2e-python/files/lib/scripts/aws/check_conf.sh b/e2e-etl-python/files/lib/scripts/aws/check_conf.sh similarity index 100% rename from e2e-python/files/lib/scripts/aws/check_conf.sh rename to e2e-etl-python/files/lib/scripts/aws/check_conf.sh diff --git a/e2e-python/files/lib/scripts/createstackfixtureoutputs2yml.sh b/e2e-etl-python/files/lib/scripts/createstackfixtureoutputs2yml.sh similarity index 100% rename from e2e-python/files/lib/scripts/createstackfixtureoutputs2yml.sh rename to e2e-etl-python/files/lib/scripts/createstackfixtureoutputs2yml.sh diff --git a/e2e-python/files/lib/scripts/createstackoutputs2yml.sh b/e2e-etl-python/files/lib/scripts/createstackoutputs2yml.sh similarity index 100% rename from e2e-python/files/lib/scripts/createstackoutputs2yml.sh rename to e2e-etl-python/files/lib/scripts/createstackoutputs2yml.sh diff --git a/e2e-python/files/main.tf b/e2e-etl-python/files/main.tf similarity index 100% rename from e2e-python/files/main.tf rename to e2e-etl-python/files/main.tf diff --git a/e2e-python/files/metadata.yml b/e2e-etl-python/files/metadata.yml similarity index 73% rename from e2e-python/files/metadata.yml rename to e2e-etl-python/files/metadata.yml index cca8805ed..0c2a142b6 100644 --- a/e2e-python/files/metadata.yml +++ b/e2e-etl-python/files/metadata.yml @@ -1,7 +1,7 @@ --- -name: e2e-python +name: e2e-etl-python # yamllint disable-line rule:line-length -description: "This end-to-end testing project was generated from the e2e-python ODS quickstarter." +description: "This end-to-end testing project was generated from the e2e-etl-python ODS quickstarter." supplier: https://es.python.org/ version: 1.0 type: ods-test diff --git a/e2e-python/files/modules/codebuild/main.tf b/e2e-etl-python/files/modules/codebuild/main.tf similarity index 100% rename from e2e-python/files/modules/codebuild/main.tf rename to e2e-etl-python/files/modules/codebuild/main.tf diff --git a/e2e-python/files/modules/codebuild/output.tf b/e2e-etl-python/files/modules/codebuild/output.tf similarity index 100% rename from e2e-python/files/modules/codebuild/output.tf rename to e2e-etl-python/files/modules/codebuild/output.tf diff --git a/e2e-python/files/modules/codebuild/variables.tf b/e2e-etl-python/files/modules/codebuild/variables.tf similarity index 100% rename from e2e-python/files/modules/codebuild/variables.tf rename to e2e-etl-python/files/modules/codebuild/variables.tf diff --git a/e2e-python/files/modules/codepipeline/main.tf b/e2e-etl-python/files/modules/codepipeline/main.tf similarity index 100% rename from e2e-python/files/modules/codepipeline/main.tf rename to e2e-etl-python/files/modules/codepipeline/main.tf diff --git a/e2e-python/files/modules/codepipeline/output.tf b/e2e-etl-python/files/modules/codepipeline/output.tf similarity index 100% rename from e2e-python/files/modules/codepipeline/output.tf rename to e2e-etl-python/files/modules/codepipeline/output.tf diff --git a/e2e-python/files/modules/codepipeline/variables.tf b/e2e-etl-python/files/modules/codepipeline/variables.tf similarity index 97% rename from e2e-python/files/modules/codepipeline/variables.tf rename to e2e-etl-python/files/modules/codepipeline/variables.tf index bba038a71..5a89ce6c3 100644 --- a/e2e-python/files/modules/codepipeline/variables.tf +++ b/e2e-etl-python/files/modules/codepipeline/variables.tf @@ -63,5 +63,5 @@ variable "aws_region" { variable "repository" { description = "QS bitbucket repository" type = string - default = "e2e-python" + default = "e2e-etl-python" } diff --git a/e2e-python/files/modules/iam_roles/main.tf b/e2e-etl-python/files/modules/iam_roles/main.tf similarity index 100% rename from e2e-python/files/modules/iam_roles/main.tf rename to e2e-etl-python/files/modules/iam_roles/main.tf diff --git a/e2e-python/files/modules/iam_roles/outputs.tf b/e2e-etl-python/files/modules/iam_roles/outputs.tf similarity index 100% rename from e2e-python/files/modules/iam_roles/outputs.tf rename to e2e-etl-python/files/modules/iam_roles/outputs.tf diff --git a/e2e-python/files/modules/iam_roles/variables.tf b/e2e-etl-python/files/modules/iam_roles/variables.tf similarity index 100% rename from e2e-python/files/modules/iam_roles/variables.tf rename to e2e-etl-python/files/modules/iam_roles/variables.tf diff --git a/e2e-python/files/modules/s3-bucket/main.tf b/e2e-etl-python/files/modules/s3-bucket/main.tf similarity index 100% rename from e2e-python/files/modules/s3-bucket/main.tf rename to e2e-etl-python/files/modules/s3-bucket/main.tf diff --git a/e2e-python/files/modules/s3-bucket/outputs.tf b/e2e-etl-python/files/modules/s3-bucket/outputs.tf similarity index 100% rename from e2e-python/files/modules/s3-bucket/outputs.tf rename to e2e-etl-python/files/modules/s3-bucket/outputs.tf diff --git a/e2e-python/files/modules/s3-bucket/variables.tf b/e2e-etl-python/files/modules/s3-bucket/variables.tf similarity index 100% rename from e2e-python/files/modules/s3-bucket/variables.tf rename to e2e-etl-python/files/modules/s3-bucket/variables.tf diff --git a/e2e-python/files/outputs.tf b/e2e-etl-python/files/outputs.tf similarity index 100% rename from e2e-python/files/outputs.tf rename to e2e-etl-python/files/outputs.tf diff --git a/e2e-python/files/pytest.ini b/e2e-etl-python/files/pytest.ini similarity index 100% rename from e2e-python/files/pytest.ini rename to e2e-etl-python/files/pytest.ini diff --git a/e2e-python/files/random.tf b/e2e-etl-python/files/random.tf similarity index 100% rename from e2e-python/files/random.tf rename to e2e-etl-python/files/random.tf diff --git a/e2e-python/files/release-manager.yml b/e2e-etl-python/files/release-manager.yml similarity index 100% rename from e2e-python/files/release-manager.yml rename to e2e-etl-python/files/release-manager.yml diff --git a/e2e-python/files/reports/install/.gitkeep b/e2e-etl-python/files/reports/install/.gitkeep similarity index 100% rename from e2e-python/files/reports/install/.gitkeep rename to e2e-etl-python/files/reports/install/.gitkeep diff --git a/e2e-python/files/requirements.txt b/e2e-etl-python/files/requirements.txt similarity index 100% rename from e2e-python/files/requirements.txt rename to e2e-etl-python/files/requirements.txt diff --git a/e2e-python/files/stackmodulesoutputs.tf b/e2e-etl-python/files/stackmodulesoutputs.tf similarity index 100% rename from e2e-python/files/stackmodulesoutputs.tf rename to e2e-etl-python/files/stackmodulesoutputs.tf diff --git a/e2e-python/files/terraform-data.tf b/e2e-etl-python/files/terraform-data.tf similarity index 100% rename from e2e-python/files/terraform-data.tf rename to e2e-etl-python/files/terraform-data.tf diff --git a/e2e-python/files/test/fixtures/default/backend.tf b/e2e-etl-python/files/test/fixtures/default/backend.tf similarity index 100% rename from e2e-python/files/test/fixtures/default/backend.tf rename to e2e-etl-python/files/test/fixtures/default/backend.tf diff --git a/e2e-python/files/test/fixtures/default/main.tf b/e2e-etl-python/files/test/fixtures/default/main.tf similarity index 100% rename from e2e-python/files/test/fixtures/default/main.tf rename to e2e-etl-python/files/test/fixtures/default/main.tf diff --git a/e2e-python/files/test/fixtures/default/moduleoutputs.tf b/e2e-etl-python/files/test/fixtures/default/moduleoutputs.tf similarity index 100% rename from e2e-python/files/test/fixtures/default/moduleoutputs.tf rename to e2e-etl-python/files/test/fixtures/default/moduleoutputs.tf diff --git a/e2e-python/files/test/fixtures/default/random.tf b/e2e-etl-python/files/test/fixtures/default/random.tf similarity index 100% rename from e2e-python/files/test/fixtures/default/random.tf rename to e2e-etl-python/files/test/fixtures/default/random.tf diff --git a/e2e-python/files/test/integration/default/controls/blueprints.rb b/e2e-etl-python/files/test/integration/default/controls/blueprints.rb similarity index 100% rename from e2e-python/files/test/integration/default/controls/blueprints.rb rename to e2e-etl-python/files/test/integration/default/controls/blueprints.rb diff --git a/e2e-python/files/test/integration/default/controls/default.rb b/e2e-etl-python/files/test/integration/default/controls/default.rb similarity index 100% rename from e2e-python/files/test/integration/default/controls/default.rb rename to e2e-etl-python/files/test/integration/default/controls/default.rb diff --git a/e2e-python/files/test/integration/default/files/.gitkeep b/e2e-etl-python/files/test/integration/default/files/.gitkeep similarity index 100% rename from e2e-python/files/test/integration/default/files/.gitkeep rename to e2e-etl-python/files/test/integration/default/files/.gitkeep diff --git a/e2e-python/files/test/integration/default/inspec.yml b/e2e-etl-python/files/test/integration/default/inspec.yml similarity index 100% rename from e2e-python/files/test/integration/default/inspec.yml rename to e2e-etl-python/files/test/integration/default/inspec.yml diff --git a/e2e-python/files/test/integration/default/inspec.yml.tmpl b/e2e-etl-python/files/test/integration/default/inspec.yml.tmpl similarity index 100% rename from e2e-python/files/test/integration/default/inspec.yml.tmpl rename to e2e-etl-python/files/test/integration/default/inspec.yml.tmpl diff --git a/e2e-python/files/test/integration/default/libraries/aws.rb b/e2e-etl-python/files/test/integration/default/libraries/aws.rb similarity index 100% rename from e2e-python/files/test/integration/default/libraries/aws.rb rename to e2e-etl-python/files/test/integration/default/libraries/aws.rb diff --git a/e2e-python/files/test/integration/default/libraries/fixture_data.rb b/e2e-etl-python/files/test/integration/default/libraries/fixture_data.rb similarity index 100% rename from e2e-python/files/test/integration/default/libraries/fixture_data.rb rename to e2e-etl-python/files/test/integration/default/libraries/fixture_data.rb diff --git a/e2e-python/files/test/integration/default/libraries/terraform_data.rb b/e2e-etl-python/files/test/integration/default/libraries/terraform_data.rb similarity index 100% rename from e2e-python/files/test/integration/default/libraries/terraform_data.rb rename to e2e-etl-python/files/test/integration/default/libraries/terraform_data.rb diff --git a/e2e-python/files/tests/acceptance/great_expectations/.gitignore b/e2e-etl-python/files/tests/acceptance/great_expectations/.gitignore similarity index 100% rename from e2e-python/files/tests/acceptance/great_expectations/.gitignore rename to e2e-etl-python/files/tests/acceptance/great_expectations/.gitignore diff --git a/e2e-python/files/tests/acceptance/great_expectations/checkpoints/Demo_athena_checkpoint.yml b/e2e-etl-python/files/tests/acceptance/great_expectations/checkpoints/Demo_athena_checkpoint.yml similarity index 100% rename from e2e-python/files/tests/acceptance/great_expectations/checkpoints/Demo_athena_checkpoint.yml rename to e2e-etl-python/files/tests/acceptance/great_expectations/checkpoints/Demo_athena_checkpoint.yml diff --git a/e2e-python/files/tests/acceptance/great_expectations/checkpoints/Demo_person_checkpoint.yml b/e2e-etl-python/files/tests/acceptance/great_expectations/checkpoints/Demo_person_checkpoint.yml similarity index 100% rename from e2e-python/files/tests/acceptance/great_expectations/checkpoints/Demo_person_checkpoint.yml rename to e2e-etl-python/files/tests/acceptance/great_expectations/checkpoints/Demo_person_checkpoint.yml diff --git a/e2e-python/files/tests/acceptance/great_expectations/expectations/athena_validation_suite.json b/e2e-etl-python/files/tests/acceptance/great_expectations/expectations/athena_validation_suite.json similarity index 100% rename from e2e-python/files/tests/acceptance/great_expectations/expectations/athena_validation_suite.json rename to e2e-etl-python/files/tests/acceptance/great_expectations/expectations/athena_validation_suite.json diff --git a/e2e-python/files/tests/acceptance/great_expectations/expectations/person_validation_suite.json b/e2e-etl-python/files/tests/acceptance/great_expectations/expectations/person_validation_suite.json similarity index 100% rename from e2e-python/files/tests/acceptance/great_expectations/expectations/person_validation_suite.json rename to e2e-etl-python/files/tests/acceptance/great_expectations/expectations/person_validation_suite.json diff --git a/e2e-python/files/tests/acceptance/great_expectations/great_expectations.yml b/e2e-etl-python/files/tests/acceptance/great_expectations/great_expectations.yml similarity index 100% rename from e2e-python/files/tests/acceptance/great_expectations/great_expectations.yml rename to e2e-etl-python/files/tests/acceptance/great_expectations/great_expectations.yml diff --git a/e2e-python/files/tests/acceptance/great_expectations/plugins/custom_data_docs/styles/data_docs_custom_styles.css b/e2e-etl-python/files/tests/acceptance/great_expectations/plugins/custom_data_docs/styles/data_docs_custom_styles.css similarity index 100% rename from e2e-python/files/tests/acceptance/great_expectations/plugins/custom_data_docs/styles/data_docs_custom_styles.css rename to e2e-etl-python/files/tests/acceptance/great_expectations/plugins/custom_data_docs/styles/data_docs_custom_styles.css diff --git a/e2e-python/files/tests/acceptance/great_expectations/test_preparation/post_requisites.py b/e2e-etl-python/files/tests/acceptance/great_expectations/test_preparation/post_requisites.py similarity index 100% rename from e2e-python/files/tests/acceptance/great_expectations/test_preparation/post_requisites.py rename to e2e-etl-python/files/tests/acceptance/great_expectations/test_preparation/post_requisites.py diff --git a/e2e-python/files/tests/acceptance/great_expectations/test_preparation/pre_requisites.py b/e2e-etl-python/files/tests/acceptance/great_expectations/test_preparation/pre_requisites.py similarity index 100% rename from e2e-python/files/tests/acceptance/great_expectations/test_preparation/pre_requisites.py rename to e2e-etl-python/files/tests/acceptance/great_expectations/test_preparation/pre_requisites.py diff --git a/e2e-python/files/tests/acceptance/pytest/Demo_allure_pytest_test.py b/e2e-etl-python/files/tests/acceptance/pytest/Demo_allure_pytest_test.py similarity index 100% rename from e2e-python/files/tests/acceptance/pytest/Demo_allure_pytest_test.py rename to e2e-etl-python/files/tests/acceptance/pytest/Demo_allure_pytest_test.py diff --git a/e2e-python/files/tests/installation/installation_test.py b/e2e-etl-python/files/tests/installation/installation_test.py similarity index 100% rename from e2e-python/files/tests/installation/installation_test.py rename to e2e-etl-python/files/tests/installation/installation_test.py diff --git a/e2e-python/files/tests/integration/integration_test.py b/e2e-etl-python/files/tests/integration/integration_test.py similarity index 100% rename from e2e-python/files/tests/integration/integration_test.py rename to e2e-etl-python/files/tests/integration/integration_test.py diff --git a/e2e-python/files/utils/checkpoints_executions.py b/e2e-etl-python/files/utils/checkpoints_executions.py similarity index 100% rename from e2e-python/files/utils/checkpoints_executions.py rename to e2e-etl-python/files/utils/checkpoints_executions.py diff --git a/e2e-python/files/utils/json2JUnit.py b/e2e-etl-python/files/utils/json2JUnit.py similarity index 100% rename from e2e-python/files/utils/json2JUnit.py rename to e2e-etl-python/files/utils/json2JUnit.py diff --git a/e2e-python/files/variables.tf b/e2e-etl-python/files/variables.tf similarity index 98% rename from e2e-python/files/variables.tf rename to e2e-etl-python/files/variables.tf index 0404142ee..020005f4d 100644 --- a/e2e-python/files/variables.tf +++ b/e2e-etl-python/files/variables.tf @@ -66,7 +66,7 @@ variable "environment" { variable "repository" { description = "QS bitbucket repository" type = string - default = "e2e-python" + default = "e2e-etl-python" } variable "branch_name" { diff --git a/e2e-python/files/versions.tf b/e2e-etl-python/files/versions.tf similarity index 100% rename from e2e-python/files/versions.tf rename to e2e-etl-python/files/versions.tf diff --git a/e2e-python/prod.yml.template b/e2e-etl-python/prod.yml.template similarity index 100% rename from e2e-python/prod.yml.template rename to e2e-etl-python/prod.yml.template diff --git a/e2e-python/test.yml.template b/e2e-etl-python/test.yml.template similarity index 100% rename from e2e-python/test.yml.template rename to e2e-etl-python/test.yml.template diff --git a/e2e-python/testdata/golden/jenkins-build-stages.json b/e2e-etl-python/testdata/golden/jenkins-build-stages.json similarity index 100% rename from e2e-python/testdata/golden/jenkins-build-stages.json rename to e2e-etl-python/testdata/golden/jenkins-build-stages.json diff --git a/e2e-python/testdata/golden/jenkins-provision-stages.json b/e2e-etl-python/testdata/golden/jenkins-provision-stages.json similarity index 100% rename from e2e-python/testdata/golden/jenkins-provision-stages.json rename to e2e-etl-python/testdata/golden/jenkins-provision-stages.json diff --git a/e2e-python/testdata/golden/sonar-scan.json b/e2e-etl-python/testdata/golden/sonar-scan.json similarity index 100% rename from e2e-python/testdata/golden/sonar-scan.json rename to e2e-etl-python/testdata/golden/sonar-scan.json diff --git a/e2e-python/testdata/steps.yml b/e2e-etl-python/testdata/steps.yml similarity index 93% rename from e2e-python/testdata/steps.yml rename to e2e-etl-python/testdata/steps.yml index 0b5433255..293fcfcbc 100644 --- a/e2e-python/testdata/steps.yml +++ b/e2e-etl-python/testdata/steps.yml @@ -1,4 +1,4 @@ -componentID: e2e-python +componentID: e2e-etl-python steps: - type: provision provisionParams: diff --git a/e2e-python/testing.yml.template b/e2e-etl-python/testing.yml.template similarity index 100% rename from e2e-python/testing.yml.template rename to e2e-etl-python/testing.yml.template From 82c4273a6cd170321e272c8689a103ee67009a05 Mon Sep 17 00:00:00 2001 From: Your Name Date: Tue, 6 Feb 2024 11:46:40 +0100 Subject: [PATCH 28/32] duplicated code don't really know why --- .github/dependabot.yml | 3 --- 1 file changed, 3 deletions(-) diff --git a/.github/dependabot.yml b/.github/dependabot.yml index 530f34db9..9c253b318 100644 --- a/.github/dependabot.yml +++ b/.github/dependabot.yml @@ -8,9 +8,6 @@ updates: schedule: # Check for updates to GitHub Actions every week interval: "weekly" - labels: - - "dependencies" - - "skip changelog" labels: - "dependencies" - "skip changelog" \ No newline at end of file From fb51f560def83da2ef765b5f21f350b3d1ad716c Mon Sep 17 00:00:00 2001 From: Your Name Date: Tue, 6 Feb 2024 13:04:50 +0100 Subject: [PATCH 29/32] update index and nav docs adding reference e2e-etl-python --- docs/modules/quickstarters/nav.adoc | 1 + docs/modules/quickstarters/pages/index.adoc | 1 + 2 files changed, 2 insertions(+) diff --git a/docs/modules/quickstarters/nav.adoc b/docs/modules/quickstarters/nav.adoc index e8a0583a7..600b9ba4e 100644 --- a/docs/modules/quickstarters/nav.adoc +++ b/docs/modules/quickstarters/nav.adoc @@ -13,6 +13,7 @@ ** xref:quickstarters:ds-rshiny.adoc[Data Science RShiny app] ** xref:quickstarters:ds-streamlit.adoc[Data Science Streamlit app] ** xref:quickstarters:e2e-cypress.adoc[Cypress E2E testing] +** xref:quickstarters:e2e-etl-python.adoc[ETL Python E2E testing] ** xref:quickstarters:e2e-spock-geb.adoc[Spock, Geb and Unirest E2E testing] ** xref:quickstarters:inf-terraform-aws.adoc[INF Terraform AWS] ** xref:quickstarters:inf-terraform-azure.adoc[INF Terraform AZURE] diff --git a/docs/modules/quickstarters/pages/index.adoc b/docs/modules/quickstarters/pages/index.adoc index 0a447eaa9..284808a55 100644 --- a/docs/modules/quickstarters/pages/index.adoc +++ b/docs/modules/quickstarters/pages/index.adoc @@ -42,6 +42,7 @@ Quickstarters are used from the https://github.com/opendevstack/ods-provisioning === E2E Test Quickstarter * xref::e2e-cypress.adoc[E2E test - Cypress] * xref::e2e-spock-geb.adoc[E2E test - Spock / Geb] +* xref::e2e-etl-python.adoc[E2E test - ETL Python] === Infrastructure Terraform Quickstarter * xref::inf-terraform-aws.adoc[AWS deployments utilizing terraform tooling] From ee69f568ea6c2f54519b21f093c21aeeed340bf0 Mon Sep 17 00:00:00 2001 From: Your Name Date: Wed, 7 Feb 2024 20:26:57 +0100 Subject: [PATCH 30/32] remove test folder remove devcontainer remove test references from the Makefile --- e2e-etl-python/Jenkinsfile.template | 1 - .../.devcontainer/devcontainer.json.template | 3 - e2e-etl-python/files/Makefile | 30 +--- .../files/test/fixtures/default/backend.tf | 5 - .../files/test/fixtures/default/main.tf | 16 -- .../test/fixtures/default/moduleoutputs.tf | 5 - .../files/test/fixtures/default/random.tf | 9 - .../default/controls/blueprints.rb | 1 - .../integration/default/controls/default.rb | 25 --- .../test/integration/default/files/.gitkeep | 0 .../files/test/integration/default/inspec.yml | 10 -- .../test/integration/default/inspec.yml.tmpl | 8 - .../test/integration/default/libraries/aws.rb | 161 ------------------ .../default/libraries/fixture_data.rb | 49 ------ .../default/libraries/terraform_data.rb | 15 -- 15 files changed, 2 insertions(+), 336 deletions(-) delete mode 100644 e2e-etl-python/files/.devcontainer/devcontainer.json.template delete mode 100644 e2e-etl-python/files/test/fixtures/default/backend.tf delete mode 100644 e2e-etl-python/files/test/fixtures/default/main.tf delete mode 100644 e2e-etl-python/files/test/fixtures/default/moduleoutputs.tf delete mode 100644 e2e-etl-python/files/test/fixtures/default/random.tf delete mode 100644 e2e-etl-python/files/test/integration/default/controls/blueprints.rb delete mode 100644 e2e-etl-python/files/test/integration/default/controls/default.rb delete mode 100644 e2e-etl-python/files/test/integration/default/files/.gitkeep delete mode 100644 e2e-etl-python/files/test/integration/default/inspec.yml delete mode 100644 e2e-etl-python/files/test/integration/default/inspec.yml.tmpl delete mode 100644 e2e-etl-python/files/test/integration/default/libraries/aws.rb delete mode 100644 e2e-etl-python/files/test/integration/default/libraries/fixture_data.rb delete mode 100644 e2e-etl-python/files/test/integration/default/libraries/terraform_data.rb diff --git a/e2e-etl-python/Jenkinsfile.template b/e2e-etl-python/Jenkinsfile.template index 531ab53ec..86c76e602 100644 --- a/e2e-etl-python/Jenkinsfile.template +++ b/e2e-etl-python/Jenkinsfile.template @@ -154,7 +154,6 @@ def retrieveReportsFromAWS(def context, results3_name) { sh "aws s3 cp s3://${results3_name}/pytest_results/integration/integration_allure_report_complete.html ./build/test-results/test/artifacts/integration/integration_pytest_report.html" sh "ls build/test-results/test" - sh "rm build/test-results/test/default.xml" } def addVars2envJsonFile(def context) { diff --git a/e2e-etl-python/files/.devcontainer/devcontainer.json.template b/e2e-etl-python/files/.devcontainer/devcontainer.json.template deleted file mode 100644 index f0810403b..000000000 --- a/e2e-etl-python/files/.devcontainer/devcontainer.json.template +++ /dev/null @@ -1,3 +0,0 @@ -{ - "image": "ghcr.io/nichtraunzer/terrarium:latest" -} diff --git a/e2e-etl-python/files/Makefile b/e2e-etl-python/files/Makefile index f829aae54..ae981f2d6 100644 --- a/e2e-etl-python/files/Makefile +++ b/e2e-etl-python/files/Makefile @@ -48,27 +48,7 @@ prep-test: .PHONY: test # Run (pre-deployment) tests. test: install-test-deps - @$(call check_aws_credentials) - - # output aws account and user id for testing - aws sts get-caller-identity --output text | tee $(INSTALL_REPORT_HOME)/aws_testing_account.log - - # Remove any previously created Terraform test artefacts. - for dir in .terraform terraform.tfstate.d; do \ - find test/fixtures -name $$dir -print0 | xargs -0 rm -rf; \ - done \ - - inspec_profiles=$$(ls -1 ./test/integration); \ - for fdir in $$inspec_profiles; do \ - mkdir -p test/integration/$$fdir/files ; \ - ./.venv/bin/python3 ./.venv/bin/hcl2tojson test/fixtures/$$fdir/main.tf test/integration/$$fdir/files/main.json; \ - done \ - - # See https://github.com/test-kitchen/test-kitchen/issues/1436 for why a simple `bundle exec kitchen test` is not an option. - for suite in $$(bundle exec kitchen list --bare); do \ - bundle exec kitchen verify $$suite || { bundle exec kitchen destroy $$suite; exit 1; }; \ - bundle exec kitchen destroy $$suite; \ - done + #Removed for the e2e-etl-python QS .PHONY: plan # Plan infrastructure deployment. @@ -91,13 +71,7 @@ deploy: init-terraform plan .PHONY: deployment-test # Run (post-deployment) tests. deployment-test: install-test-deps - @$(call check_aws_credentials) - - sh ./lib/scripts/createstackoutputs2yml.sh - inspec_profiles=$$(ls -1 ./test/integration); \ - for profile in $$inspec_profiles; do \ - JSON_VARS_FILE="$(PWD)/terraform.tfvars.json" bundle exec cinc-auditor exec ./test/integration/$$profile --no-create-lockfile --no-distinct-exit --input-file ./test/integration/$$profile/files/inputs-from-tfo-stack.yml --reporter=cli junit2:build/test-results/test/$$profile.xml json:reports/install/data/inspec/post-install/$$profile.json --target aws://; \ - done + #Removed for the e2e-etl-python QS .PHONY: install-report install-report: diff --git a/e2e-etl-python/files/test/fixtures/default/backend.tf b/e2e-etl-python/files/test/fixtures/default/backend.tf deleted file mode 100644 index 2314bd81b..000000000 --- a/e2e-etl-python/files/test/fixtures/default/backend.tf +++ /dev/null @@ -1,5 +0,0 @@ -terraform { - backend "local" { - } -} - diff --git a/e2e-etl-python/files/test/fixtures/default/main.tf b/e2e-etl-python/files/test/fixtures/default/main.tf deleted file mode 100644 index deb46277d..000000000 --- a/e2e-etl-python/files/test/fixtures/default/main.tf +++ /dev/null @@ -1,16 +0,0 @@ -locals { - name = "stack-aws-quickstarter-test" - tags = { - Name = local.name - } -} - -data "aws_region" "current" {} - -module "stack-aws-quickstarter-test" { - # module name and value of name parameter have to be equal - source = "../../.." - - name = local.name - meta_environment = "DEVELOPMENT" -} diff --git a/e2e-etl-python/files/test/fixtures/default/moduleoutputs.tf b/e2e-etl-python/files/test/fixtures/default/moduleoutputs.tf deleted file mode 100644 index b04eb0d8e..000000000 --- a/e2e-etl-python/files/test/fixtures/default/moduleoutputs.tf +++ /dev/null @@ -1,5 +0,0 @@ -# This file has been created automatically. - -output "module_ods_quickstarters" { - value = module.stack-aws-quickstarter-test.* -} diff --git a/e2e-etl-python/files/test/fixtures/default/random.tf b/e2e-etl-python/files/test/fixtures/default/random.tf deleted file mode 100644 index 0cc81ef09..000000000 --- a/e2e-etl-python/files/test/fixtures/default/random.tf +++ /dev/null @@ -1,9 +0,0 @@ -provider "random" {} - -resource "random_id" "id" { - byte_length = 4 -} - -locals { - id = random_id.id.hex -} diff --git a/e2e-etl-python/files/test/integration/default/controls/blueprints.rb b/e2e-etl-python/files/test/integration/default/controls/blueprints.rb deleted file mode 100644 index 04b6ef760..000000000 --- a/e2e-etl-python/files/test/integration/default/controls/blueprints.rb +++ /dev/null @@ -1 +0,0 @@ -# This file has been created automatically. diff --git a/e2e-etl-python/files/test/integration/default/controls/default.rb b/e2e-etl-python/files/test/integration/default/controls/default.rb deleted file mode 100644 index cad5d7f69..000000000 --- a/e2e-etl-python/files/test/integration/default/controls/default.rb +++ /dev/null @@ -1,25 +0,0 @@ -require_relative '../libraries/terraform_data.rb' -require_relative '../libraries/fixture_data.rb' -require_relative '../libraries/aws.rb' - -t = SpecHelper::TerraformData.new -id = t['id'] -name = t['name'] -tags = { :Name => name + '-' + id } - -f = SpecHelper::FixtureData.new.for_module(name) - -control 'stack' do - impact 1.0 - title "Test Suite: 'Stack'" - desc "This test suite asserts the correct functionality of the stack under test." - tag name - - describe aws_region(region_name: t['current_region']) do - its('endpoint') { should be_in ['ec2.eu-west-1.amazonaws.com','ec2.us-east-1.amazonaws.com'] } - end - - describe "Stack Testing" do - it { expect(true).to be_truthy } - end -end diff --git a/e2e-etl-python/files/test/integration/default/files/.gitkeep b/e2e-etl-python/files/test/integration/default/files/.gitkeep deleted file mode 100644 index e69de29bb..000000000 diff --git a/e2e-etl-python/files/test/integration/default/inspec.yml b/e2e-etl-python/files/test/integration/default/inspec.yml deleted file mode 100644 index 4ae1a130d..000000000 --- a/e2e-etl-python/files/test/integration/default/inspec.yml +++ /dev/null @@ -1,10 +0,0 @@ -# This file has been created automatically. - ---- -name: stack -supports: - - platform: aws -depends: - - name: inspec-aws - git: https://github.com/inspec/inspec-aws - tag: v1.83.60 diff --git a/e2e-etl-python/files/test/integration/default/inspec.yml.tmpl b/e2e-etl-python/files/test/integration/default/inspec.yml.tmpl deleted file mode 100644 index c4935f920..000000000 --- a/e2e-etl-python/files/test/integration/default/inspec.yml.tmpl +++ /dev/null @@ -1,8 +0,0 @@ ---- -name: stack -supports: - - platform: aws -depends: - - name: inspec-aws - git: https://github.com/inspec/inspec-aws - tag: v1.83.60 diff --git a/e2e-etl-python/files/test/integration/default/libraries/aws.rb b/e2e-etl-python/files/test/integration/default/libraries/aws.rb deleted file mode 100644 index d78efd511..000000000 --- a/e2e-etl-python/files/test/integration/default/libraries/aws.rb +++ /dev/null @@ -1,161 +0,0 @@ -require 'aws-sdk' -require 'ipaddr' -require 'singleton' - -module SpecHelper - class AWS - # See https://docs.aws.amazon.com/sdkforruby/api/Aws.html - class SDK - include Singleton - - def client(clazz, region = ENV['AWS_DEFAULT_REGION']) - client_clazz = Module.const_get(clazz.to_s + '::Client') - client_clazz.new(region: region) - end - - def resource(clazz, region = ENV['AWS_DEFAULT_REGION']) - client = client(clazz, region) - - resource_clazz = Module.const_get(clazz.to_s + '::Resource') - resource_clazz.new(client: client) - end - end - - def self.sdk - return SDK.instance - end - - def self.convert_aws_tags_to_hash(tags) - results = {} - - tags.each do |tag| - results[tag.key] = tag.value - end - - results - end - - def self.convert_tags_hash_to_array(tags) - tags.to_a.map do |tag| - { key: tag.first.to_s, value: tag.last } - end - end - - def self.convert_tags_hash_to_aws_filters(tags) - tags.to_a.map do |tag| - { name: "tag:#{tag.first}", values: [tag.last] } - end - end - - def self.filter_resources(resource, type, filters) - matches = resource.send(type, { filters: filters }).map(&:id) - - if matches.count == 1 - matches[0] - elsif matches.count == 0 - STDERR.puts "Error: could not find any resources of type '#{type}' with tag:Name = '#{name}'" - [] - else - STDERR.puts "Error: there is more than one resource of type '#{type}' with tag:Name = '#{name}'" - matches - end - end - - def self.get_asg_name_by_tags(tags, region = ENV['AWS_DEFAULT_REGION']) - client = self.sdk.client(Aws::AutoScaling, region) - - # Convert the incoming tags into an array - tags = convert_tags_hash_to_array(tags) - - names = client.describe_auto_scaling_groups().data['auto_scaling_groups'].find_all { |group| - # Convert the auto scaling group's tags into an array - group_tags = group.tags.map do |tag| - { key: tag.key, value: tag.value } - end - - # Check if all incoming tags are present in the auto scaling group - (tags - group_tags).empty? - }.map(&:auto_scaling_group_name) - - if names.count == 1 - names[0] - elsif names.count == 0 - STDERR.puts "Error: could not find any auto scaling group with tags = '#{tags}'" - [] - else - STDERR.puts "Error: there is more than one auto scaling group with tags = '#{tags}'" - names - end - end - - def self.get_ec2_instance_id_by_tags(tags, region = ENV['AWS_DEFAULT_REGION']) - filters = convert_tags_hash_to_aws_filters(tags) - filters << { name: 'instance-state-name', values: ['pending', 'running'] } - - # See https://docs.aws.amazon.com/sdkforruby/api/Aws/EC2/Resource.html#instances-instance_method. - filter_resources(self.sdk.resource(Aws::EC2, region), 'instances', filters) - end - - def self.get_rds_instance_id_by_tags(tags, region = ENV['AWS_DEFAULT_REGION']) - client = self.sdk.client(Aws::RDS, region) - - # Convert the incoming tags into an array - tags = convert_tags_hash_to_array(tags) - - ids = client.describe_db_instances().db_instances.find_all { |instance| - resp = client.list_tags_for_resource({ :resource_name => instance.db_instance_arn }) - if resp.nil? or resp.tag_list.empty? - STDERR.puts "Error: could not find any RDS database instance with tags = '#{tags}'" - return [] - end - - # Check if all incoming tags are present in the RDS database instance - instance_tags = convert_tags_hash_to_array(convert_aws_tags_to_hash(resp.tag_list)) - (tags - instance_tags).empty? - }.map(&:db_instance_identifier) - - if ids.count == 1 - ids[0] - elsif ids.count == 0 - STDERR.puts "Error: could not find any RDS database instance with tags = '#{tags}'" - [] - else - STDERR.puts "Error: there is more than one RDS database instance with tags = '#{tags}'" - ids - end - end - - def self.get_security_group_id_by_tags(tags, region = ENV['AWS_DEFAULT_REGION']) - filters = convert_tags_hash_to_aws_filters(tags) - - # See https://docs.aws.amazon.com/sdkforruby/api/Aws/EC2/Resource.html#security_groups-instance_method. - filter_resources(self.sdk.resource(Aws::EC2, region), 'security_groups', filters) - end - - def self.get_vpc_id_by_tags(tags, region = ENV['AWS_DEFAULT_REGION']) - filters = convert_tags_hash_to_aws_filters(tags) - - # See https://docs.aws.amazon.com/sdkforruby/api/Aws/EC2/Resource.html#vpcs-instance_method. - filter_resources(self.sdk.resource(Aws::EC2, region), 'vpcs', filters) - end - - def self.get_subnet_ids_by_vpc_id(id, region = ENV['AWS_DEFAULT_REGION']) - # See https://docs.aws.amazon.com/sdkforruby/api/Aws/EC2/Resource.html#vpc-instance_method. - vpc = self.sdk.resource(Aws::EC2, region).vpc(id) - - unless vpc.nil? - # See https://docs.aws.amazon.com/sdkforruby/api/Aws/EC2/Vpc.html#subnets-instance_method. - vpc.subnets().sort_by { |subnet| - IPAddr.new(subnet.cidr_block) - }.map(&:id) - else - STDERR.puts "Error: could not find a VPC with ID = '#{id}'" - [] - end - end - - private_constant :SDK - private_class_method :convert_tags_hash_to_aws_filters - private_class_method :filter_resources - end -end diff --git a/e2e-etl-python/files/test/integration/default/libraries/fixture_data.rb b/e2e-etl-python/files/test/integration/default/libraries/fixture_data.rb deleted file mode 100644 index 2f9d7ec03..000000000 --- a/e2e-etl-python/files/test/integration/default/libraries/fixture_data.rb +++ /dev/null @@ -1,49 +0,0 @@ -require 'json' - -module SpecHelper - class FixtureData - @data - - def json_vars?() - ENV.has_key?('JSON_VARS_FILE') and ENV['JSON_VARS_FILE'] != '' - end - - def initialize(suite = 'default') - if json_vars? then - @data = JSON.parse(File.read(ENV['JSON_VARS_FILE'])) - else - @data = JSON.parse(File.read('test/integration/' + suite + '/files/main.json')) - extract_first_element_of_array(@data) - end - end - - def locals - json_vars? ? @data : extract_first_element_of_array(@data['locals']) - end - - def for_module(name = nil) - json_vars? ? @data : extract_first_element_of_array(@data['module'].select { |x| x[name] }.first[name]) - end - - def for_resource(type = nil, name = nil) - tdata = @data['resource'].select { |x| x[type] } # array having all resources of given type - tdata = tdata.select { |x| x[type][name] }.first # select the item matching resource name - extract_first_element_of_array(tdata[type][name]) # trim given structure - json_vars? ? @data : tdata[type][name] - end - - private :json_vars? - - private - - def extract_first_element_of_array(myhash = nil) - myhash.each do |k, v| - if !(['module', 'resource', 'data'].include? k.to_s) - if v.kind_of?(Array) - myhash[k] = v[0] - end - end - end - end - end -end diff --git a/e2e-etl-python/files/test/integration/default/libraries/terraform_data.rb b/e2e-etl-python/files/test/integration/default/libraries/terraform_data.rb deleted file mode 100644 index ca996c30b..000000000 --- a/e2e-etl-python/files/test/integration/default/libraries/terraform_data.rb +++ /dev/null @@ -1,15 +0,0 @@ -require 'json' - -module SpecHelper - class TerraformData - @data - - def initialize(path = '.terraform-data.json') - @data = JSON.parse(File.read(path)) - end - - def [](key) - @data[key] - end - end -end From b825efcbca7b32fa0333dac6b6e4827a5cd23bc3 Mon Sep 17 00:00:00 2001 From: Your Name Date: Thu, 8 Feb 2024 09:37:44 +0100 Subject: [PATCH 31/32] change comment on the Makefile --- e2e-etl-python/files/Makefile | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/e2e-etl-python/files/Makefile b/e2e-etl-python/files/Makefile index ae981f2d6..313c6db9f 100644 --- a/e2e-etl-python/files/Makefile +++ b/e2e-etl-python/files/Makefile @@ -48,7 +48,7 @@ prep-test: .PHONY: test # Run (pre-deployment) tests. test: install-test-deps - #Removed for the e2e-etl-python QS + #Needed for "Infrastructure as Code (IaC)" stage .PHONY: plan # Plan infrastructure deployment. @@ -71,7 +71,7 @@ deploy: init-terraform plan .PHONY: deployment-test # Run (post-deployment) tests. deployment-test: install-test-deps - #Removed for the e2e-etl-python QS + #Needed for "Infrastructure as Code (IaC)" stage .PHONY: install-report install-report: From 670540aa89be9cc2a9c71944e7c75b72e6c9ee03 Mon Sep 17 00:00:00 2001 From: Your Name Date: Mon, 12 Feb 2024 09:39:19 +0100 Subject: [PATCH 32/32] add default value to AWS_REGION --- e2e-etl-python/Jenkinsfile.template | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/e2e-etl-python/Jenkinsfile.template b/e2e-etl-python/Jenkinsfile.template index 86c76e602..88cef29b9 100644 --- a/e2e-etl-python/Jenkinsfile.template +++ b/e2e-etl-python/Jenkinsfile.template @@ -3,7 +3,7 @@ @Library('ods-jenkins-shared-library@@shared_library_ref@') _ node { - aws_region = env.AWS_REGION + aws_region = env.AWS_REGION ?: 'eu-west-1' dockerRegistry = env.DOCKER_REGISTRY }