diff --git a/.github/workflows/e2e.yml b/.github/workflows/e2e.yml new file mode 100644 index 000000000000..cfdb219d6114 --- /dev/null +++ b/.github/workflows/e2e.yml @@ -0,0 +1,105 @@ +# Copyright © 2023 Cask Data, Inc. +# Licensed under the Apache License, Version 2.0 (the "License"); you may not +# use this file except in compliance with the License. You may obtain a copy of +# the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations under +# the License. + +# This workflow will build a Java project with Maven +# For more information see: https://help.github.com/actions/language-and-framework-guides/building-and-testing-java-with-maven +# Note: Any changes to this workflow would be used only after merging into develop +name: Build e2e tests + +on: + push: + branches: [ develop ] + pull_request: + branches: [ develop ] + types: [ opened, synchronize, reopened, labeled ] + workflow_dispatch: + +jobs: + build: + runs-on: k8s-runner-e2e + # We allow builds: + # 1) When triggered manually + # 2) When it's a merge into a branch + # 3) For PRs that are labeled as build and + # - It's a code change + # - A build label was just added + # A bit complex, but prevents builds when other labels are manipulated + if: > + github.event_name == 'workflow_dispatch' + || github.event_name == 'push' + || (contains(github.event.pull_request.labels.*.name, 'build') + && (github.event.action != 'labeled' || github.event.label.name == 'build') + ) + strategy: + matrix: + module: [cdap-e2e-tests] + fail-fast: false + + steps: + # Pinned 1.0.0 version + - uses: actions/checkout@v3 + with: + path: plugin + submodules: 'recursive' + ref: ${{ github.event.workflow_run.head_sha }} + + - uses: dorny/paths-filter@b2feaf19c27470162a626bd6fa8438ae5b263721 + if: github.event_name != 'workflow_dispatch' && github.event_name != 'push' + id: filter + with: + working-directory: plugin + filters: | + e2e-test: + - '${{ matrix.module }}/**/e2e-test/**' + + - name: Checkout e2e test repo + uses: actions/checkout@v3 + with: + repository: cdapio/cdap-e2e-tests + path: e2e + + - name: Cache + uses: actions/cache@v3 + with: + path: ~/.m2/repository + key: ${{ runner.os }}-maven-${{ github.workflow }}-${{ hashFiles('**/pom.xml') }} + restore-keys: | + ${{ runner.os }}-maven-${{ github.workflow }} + + - name: Run required e2e tests + if: github.event_name != 'workflow_dispatch' && github.event_name != 'push' && steps.filter.outputs.e2e-test == 'false' + run: python3 e2e/src/main/scripts/run_e2e_test.py --module ${{ matrix.module }} --testRunner TestRunnerRequired.java + + - name: Run all e2e tests + if: github.event_name == 'workflow_dispatch' || github.event_name == 'push' || steps.filter.outputs.e2e-test == 'true' + run: python3 e2e/src/main/scripts/run_e2e_test.py --module ${{ matrix.module }} + + - name: Upload report + uses: actions/upload-artifact@v3 + if: always() + with: + name: Cucumber report - ${{ matrix.module }} + path: ./**/target/cucumber-reports + + - name: Upload debug files + uses: actions/upload-artifact@v3 + if: always() + with: + name: Debug files - ${{ matrix.module }} + path: ./**/target/e2e-debug + + - name: Upload files to GCS + uses: google-github-actions/upload-cloud-storage@v0 + if: always() + with: + path: ./plugin + destination: e2e-tests-cucumber-reports/${{ github.event.repository.name }}/${{ github.ref }} + glob: '**/target/cucumber-reports/**' \ No newline at end of file diff --git a/cdap-e2e-tests/src/e2e-test/features/namespaceadmin/NSAdminDesignTime.feature b/cdap-e2e-tests/src/e2e-test/features/namespaceadmin/NSAdminDesignTime.feature new file mode 100644 index 000000000000..fd6028e2e5ff --- /dev/null +++ b/cdap-e2e-tests/src/e2e-test/features/namespaceadmin/NSAdminDesignTime.feature @@ -0,0 +1,123 @@ +# +# Copyright © 2023 Cask Data, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may not +# use this file except in compliance with the License. You may obtain a copy of +# the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations under +# the License. +# +@Namespaceadmin +Feature: NameSpaceAdmin - Validate nameSpace admin design time scenarios + + @Namespaceadmin + Scenario:Verify user is able to click on the namespace admin tab and successfully navigates to the page + Given Open Datafusion Project to configure pipeline + When Click on the Hamburger bar on the left panel + Then Click on NameSpace Admin link from the menu + Then Verify if user successfully navigated to namespace admin page + + @Namespaceadmin + Scenario:Validate user is able to open compute profile page and create a profile for selected a provisioner + Given Open Datafusion Project to configure pipeline + Then Click on the Hamburger bar on the left panel + Then Click on NameSpace Admin link from the menu + Then Click on create profile button for "default" Namespace + Then Select a provisioner: "remoteHadoopProvisioner" for the compute profile + Then Verify the Create a Profile page is loaded for selected provisioner + Then Enter input plugin property: "profileLabel" with value: "validProfile" + Then Enter textarea plugin property: "profileDescription" with value: "validDescription" + Then Enter input plugin property: "host" with value: "testHost" + Then Enter input plugin property: "user" with value: "testUser" + Then Enter textarea plugin property: "sshKey" with value: "testSSHKey" + Then Click on: "Create" button in the properties + Then Verify the created compute profile: "validProfile" is displayed in system compute profile list + + + @Namespaceadmin + Scenario: Validate user is able to create new namespace preferences and able to delete the added namespace preferences successfully + Given Open Datafusion Project to configure pipeline + Then Click on the Hamburger bar on the left panel + Then Click on NameSpace Admin link from the menu + Then Click "preferences" tab from Configuration page for "default" Namespace + Then Click on edit namespace preferences to set namespace preferences + Then Set namespace preferences with key: "keyValue" and value: "nameSpacePreferences1" + Then Click on the Save & Close preferences button + Then Click on edit namespace preferences to set namespace preferences + Then Delete the preferences + Then Click on the Save & Close preferences button + + Scenario: Validate user is able to add multiple namespace preferences inside namespace admin successfully + Given Open Datafusion Project to configure pipeline + Then Click on the Hamburger bar on the left panel + Then Click on NameSpace Admin link from the menu + Then Click "preferences" tab from Configuration page for "default" Namespace + Then Click on edit namespace preferences to set namespace preferences + Then Set namespace preferences with key: "keyValue" and value: "nameSpacePreferences2" + Then Click on the Save & Close preferences button + Then Click on edit namespace preferences to set namespace preferences + Then Delete the preferences + Then Delete the preferences + Then Click on the Save & Close preferences button + + Scenario: Validate user is able reset the namespace preferences added inside namespace admin successfully + Given Open Datafusion Project to configure pipeline + Then Click on the Hamburger bar on the left panel + Then Click on NameSpace Admin link from the menu + Then Click "preferences" tab from Configuration page for "default" Namespace + Then Click on edit namespace preferences to set namespace preferences + Then Set namespace preferences with key: "keyValue" and value: "nameSpacePreferences1" + Then Reset the preferences + Then Verify the reset is successful for added preferences + + Scenario: To verify the validation error message with invalid cluster name + Given Open Datafusion Project to configure pipeline + Then Click on the Hamburger bar on the left panel + Then Click on NameSpace Admin link from the menu + Then Click on create profile button for "default" Namespace + Then Select a provisioner: "existingDataProc" for the compute profile + Then Enter input plugin property: "profileLabel" with value: "validProfile" + Then Enter textarea plugin property: "profileDescription" with value: "validDescription" + Then Enter input plugin property: "clusterName" with value: "invalidClusterName" + Then Click on: "Create" button in the properties + Then Verify that the compute profile is displaying an error message: "errorInvalidClusterName" on the footer + + Scenario:To verify the validation error message with invalid profile name + Given Open Datafusion Project to configure pipeline + Then Click on the Hamburger bar on the left panel + Then Click on NameSpace Admin link from the menu + Then Click on create profile button for "default" Namespace + Then Select a provisioner: "existingDataProc" for the compute profile + Then Enter input plugin property: "profileLabel" with value: "invalidProfile" + Then Enter textarea plugin property: "profileDescription" with value: "validDescription" + Then Enter input plugin property: "clusterName" with value: "validClusterName" + Then Click on: "Create" button in the properties + Then Verify that the compute profile is displaying an error message: "errorInvalidProfileName" on the footer + + Scenario:To verify the validation error message with invalid namespace name + Given Open Datafusion Project to configure pipeline + Then Click on the Hamburger bar on the left panel + Then Click on Namespace dropdown button + Then Click on the Add Namespace tab + Then Enter the New Namespace Name with value: "invalidNamespaceName" + Then Enter the Namespace Description with value: "validNamespaceDescription" + Then Click on: "Finish" button in the properties + Then Verify the failed error message: "errorInvalidNamespace" displayed on dialog box + + Scenario: Validate user is able to create new namespace from hamburger menu and switch to newly created namespace + Given Open Datafusion Project to configure pipeline + Then Click on the Hamburger bar on the left panel + Then Click on Namespace dropdown button + Then Click on the Add Namespace tab + Then Enter the New Namespace Name with value: "validNamespaceName" + Then Enter the Namespace Description with value: "validNamespaceDescription" + Then Click on: "Finish" button in the properties + Then Switch to the newly created Namespace + Then Click on the Hamburger bar on the left panel + Then Verify the namespace is switched to "validNamespaceName" successfully diff --git a/cdap-e2e-tests/src/e2e-test/features/namespaceadmin/NSAdminRunTime.feature b/cdap-e2e-tests/src/e2e-test/features/namespaceadmin/NSAdminRunTime.feature new file mode 100644 index 000000000000..e26a268a2c33 --- /dev/null +++ b/cdap-e2e-tests/src/e2e-test/features/namespaceadmin/NSAdminRunTime.feature @@ -0,0 +1,103 @@ +# +# Copyright © 2023 Cask Data, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may not +# use this file except in compliance with the License. You may obtain a copy of +# the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations under +# the License. +# +@Namespaceadmin +Feature: NameSpaceAdmin - Validate nameSpace admin run time scenarios + + @BQ_SOURCE_TEST @BQ_SINK_TEST + Scenario:To verify if user is able to run a pipeline successfully using the namespace preferences + Given Open Datafusion Project to configure pipeline + Then Click on the Hamburger bar on the left panel + Then Click on NameSpace Admin link from the menu + Then Click "preferences" tab from Configuration page for "default" Namespace + Then Click on edit namespace preferences to set namespace preferences + Then Set namespace preferences with key: "keyValue" and value: "nameSpacePreferences2" + Then Click on the Save & Close preferences button + Then Click on the Hamburger bar on the left panel + Then Select navigation item: "studio" from the Hamburger menu list + When Expand Plugin group in the LHS plugins list: "Source" + When Select plugin: "BigQuery" from the plugins list as: "Source" + When Expand Plugin group in the LHS plugins list: "Sink" + When Select plugin: "BigQuery" from the plugins list as: "Sink" + Then Connect plugins: "BigQuery" and "BigQuery2" to establish connection + Then Navigate to the properties page of plugin: "BigQuery" + Then Enter input plugin property: "referenceName" with value: "BQReferenceName" + Then Click on the Macro button of Property: "projectId" and set the value to: "projectId" + Then Click on the Macro button of Property: "datasetProjectId" and set the value to: "datasetprojectId" + Then Enter input plugin property: "dataset" with value: "dataset" + Then Enter input plugin property: "table" with value: "bqSourceTable" + Then Validate "BigQuery" plugin properties + Then Close the Plugin Properties page + Then Navigate to the properties page of plugin: "BigQuery2" + Then Enter input plugin property: "referenceName" with value: "BQReferenceName" + Then Click on the Macro button of Property: "projectId" and set the value to: "projectId" + Then Click on the Macro button of Property: "datasetProjectId" and set the value to: "datasetprojectId" + Then Enter input plugin property: "dataset" with value: "dataset" + Then Enter input plugin property: "table" with value: "bqSourceTable" + Then Validate "BigQuery" plugin properties + Then Close the Plugin Properties page + Then Save the pipeline + Then Deploy the pipeline + Then Run the Pipeline in Runtime + Then Wait till pipeline is in running state + Then Open and capture logs + Then Verify the pipeline status is "Succeeded" + + @BQ_SOURCE_TEST @BQ_SINK_TEST + Scenario: To verify if user is able to create a connection from namespace admin and configure it for required plugins + Given Open Datafusion Project to configure pipeline + Then Click on the Hamburger bar on the left panel + Then Click on NameSpace Admin link from the menu + Then Click "connections" tab from Configuration page for "default" Namespace + Then Click on the Add Connection button + Then Add connection type as "bqConnection" and provide a "ConnectionName" + Then Click on the Test Connection button + Then Click on the Create button + Then Click on the Hamburger bar on the left panel + Then Select navigation item: "studio" from the Hamburger menu list + When Select plugin: "BigQuery" from the plugins list as: "Source" + When Expand Plugin group in the LHS plugins list: "Sink" + When Select plugin: "BigQuery" from the plugins list as: "Sink" + Then Connect plugins: "BigQuery" and "BigQuery2" to establish connection + Then Navigate to the properties page of plugin: "BigQuery" + Then Click plugin property: "switch-useConnection" + Then Click on the Browse Connections button + Then Select connection: "ConnectionName" + Then Enter input plugin property: "referenceName" with value: "BQReferenceName" + Then Click on the Browse button inside plugin properties + Then Click SELECT button inside connection data row with name: "dataset" + Then Wait till connection data loading completes with a timeout of 60 seconds + Then Enter input plugin property: "table" with value: "bqSourceTable" + Then Validate "BigQuery" plugin properties + Then Close the Plugin Properties page + Then Navigate to the properties page of plugin: "BigQuery2" + Then Click plugin property: "useConnection" + Then Click on the Browse Connections button + Then Select connection: "ConnectionName" + Then Enter input plugin property: "referenceName" with value: "BQSinkReferenceName" + Then Click on the Browse button inside plugin properties + Then Click SELECT button inside connection data row with name: "dataset" + Then Wait till connection data loading completes with a timeout of 60 seconds + Then Verify input plugin property: "dataset" contains value: "dataset" + Then Enter input plugin property: "table" with value: "bqTargetTable" + Then Click plugin property: "truncateTable" + Then Validate "BigQuery" plugin properties + Then Close the Plugin Properties page + Then Save the pipeline + Then Deploy the pipeline + Then Run the Pipeline in Runtime + Then Wait till pipeline is in running state + Then Open and capture logs + Then Verify the pipeline status is "Succeeded" diff --git a/cdap-e2e-tests/src/e2e-test/java/io/cdap/cdap/namespaceadmin/TestRunner.java b/cdap-e2e-tests/src/e2e-test/java/io/cdap/cdap/namespaceadmin/TestRunner.java new file mode 100644 index 000000000000..4233334fbcae --- /dev/null +++ b/cdap-e2e-tests/src/e2e-test/java/io/cdap/cdap/namespaceadmin/TestRunner.java @@ -0,0 +1,36 @@ +/* + * Copyright © 2023 Cask Data, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not + * use this file except in compliance with the License. You may obtain a copy of + * the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations under + * the License. + */ + +package io.cdap.cdap.namespaceadmin; + +import io.cucumber.junit.Cucumber; +import io.cucumber.junit.CucumberOptions; +import org.junit.runner.RunWith; + +/** + * Test Runner to execute namespace admin related test cases. + */ +@RunWith(Cucumber.class) +@CucumberOptions( + features = {"src/e2e-test/features"}, + glue = {"io.cdap.cdap.stepsdesign", "stepsdesign"}, + tags = {"@NameSpaceadmin"}, + plugin = {"pretty", "html:target/cucumber-html-report/namespaceadmin", + "json:target/cucumber-reports/cucumber-namespaceadmin.json", + "junit:target/cucumber-reports/cucumber-namespaceadmin.xml"} +) +public class TestRunner { +} \ No newline at end of file diff --git a/cdap-e2e-tests/src/e2e-test/java/io/cdap/cdap/namespaceadmin/package-info.java b/cdap-e2e-tests/src/e2e-test/java/io/cdap/cdap/namespaceadmin/package-info.java new file mode 100644 index 000000000000..6927c466231e --- /dev/null +++ b/cdap-e2e-tests/src/e2e-test/java/io/cdap/cdap/namespaceadmin/package-info.java @@ -0,0 +1,20 @@ +/* + * Copyright © 2023 Cask Data, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not + * use this file except in compliance with the License. You may obtain a copy of + * the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations under + * the License. + */ + +/** + * Package contains the runners for nameSpace admin features. + */ +package io.cdap.cdap.namespaceadmin; diff --git a/cdap-e2e-tests/src/e2e-test/java/io/cdap/cdap/stepsdesign/TestSetupHooks.java b/cdap-e2e-tests/src/e2e-test/java/io/cdap/cdap/stepsdesign/TestSetupHooks.java new file mode 100644 index 000000000000..40e5159d6fcc --- /dev/null +++ b/cdap-e2e-tests/src/e2e-test/java/io/cdap/cdap/stepsdesign/TestSetupHooks.java @@ -0,0 +1,94 @@ +/* + * Copyright © 2023 Cask Data, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not + * use this file except in compliance with the License. You may obtain a copy of + * the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations under + * the License. + */ +package io.cdap.cdap.stepsdesign; + +import com.google.cloud.bigquery.BigQueryException; +import io.cdap.e2e.utils.BigQueryClient; +import io.cdap.e2e.utils.PluginPropertyUtils; +import io.cucumber.java.After; +import io.cucumber.java.Before; +import java.io.IOException; +import java.util.UUID; +import org.apache.commons.lang3.StringUtils; +import org.junit.Assert; +import stepsdesign.BeforeActions; + +/** + * GCP test hooks. + */ +public class TestSetupHooks { + + public static String bqTargetTable = StringUtils.EMPTY; + public static String bqSourceTable = StringUtils.EMPTY; + public static String datasetName = PluginPropertyUtils.pluginProp("dataset"); + + @Before(order = 1, value = "@BQ_SINK_TEST") + public static void setTempTargetBQTableName() { + bqTargetTable = "E2E_TARGET_" + UUID.randomUUID().toString().replaceAll("-", "_"); + PluginPropertyUtils.addPluginProp("bqTargetTable", bqTargetTable); + BeforeActions.scenario.write("BQ Target table name - " + bqTargetTable); + } + + @After(order = 1, value = "@BQ_SINK_TEST") + public static void deleteTempTargetBQTable() throws IOException, InterruptedException { + try { + BigQueryClient.dropBqQuery(bqTargetTable); + PluginPropertyUtils.removePluginProp("bqTargetTable"); + BeforeActions.scenario.write("BQ Target table - " + bqTargetTable + " deleted successfully"); + bqTargetTable = StringUtils.EMPTY; + } catch (BigQueryException e) { + if (e.getMessage().contains("Not found: Table")) { + BeforeActions.scenario.write("BQ Target Table " + bqTargetTable + " does not exist"); + } else { + Assert.fail(e.getMessage()); + } + } + } + + /** + * Create BigQuery table with 3 columns (Id - Int, Value - Int, UID - string) containing random testdata. + * Sample row: + * Id | Value | UID + * 22 | 968 | 245308db-6088-4db2-a933-f0eea650846a + */ + @Before(order = 1, value = "@BQ_SOURCE_TEST") + public static void createTempSourceBQTable() throws IOException, InterruptedException { + bqSourceTable = "E2E_SOURCE_" + UUID.randomUUID().toString().replaceAll("-", "_"); + StringBuilder records = new StringBuilder(StringUtils.EMPTY); + for (int index = 2; index <= 25; index++) { + records.append(" (").append(index).append(", ").append((int) (Math.random() * 1000 + 1)).append(", '") + .append(UUID.randomUUID()).append("'), "); + } + BigQueryClient.getSoleQueryResult("create table `" + datasetName + "." + bqSourceTable + "` as " + + "SELECT * FROM UNNEST([ " + + " STRUCT(1 AS Id, " + ((int) (Math.random() * 1000 + 1)) + " as Value, " + + "'" + UUID.randomUUID() + "' as UID), " + + records + + " (26, " + ((int) (Math.random() * 1000 + 1)) + ", " + + "'" + UUID.randomUUID() + "') " + + "])"); + PluginPropertyUtils.addPluginProp("bqSourceTable", bqSourceTable); + BeforeActions.scenario.write("BQ source Table " + bqSourceTable + " created successfully"); + } + + @After(order = 1, value = "@BQ_SOURCE_TEST") + public static void deleteTempSourceBQTable() throws IOException, InterruptedException { + BigQueryClient.dropBqQuery(bqSourceTable); + PluginPropertyUtils.removePluginProp("bqSourceTable"); + BeforeActions.scenario.write("BQ source Table " + bqSourceTable + " deleted successfully"); + bqSourceTable = StringUtils.EMPTY; + } +} \ No newline at end of file diff --git a/cdap-e2e-tests/src/e2e-test/java/io/cdap/cdap/stepsdesign/package-info.java b/cdap-e2e-tests/src/e2e-test/java/io/cdap/cdap/stepsdesign/package-info.java new file mode 100644 index 000000000000..d293fef4b678 --- /dev/null +++ b/cdap-e2e-tests/src/e2e-test/java/io/cdap/cdap/stepsdesign/package-info.java @@ -0,0 +1,19 @@ +/* + * Copyright © 2023 Cask Data, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not + * use this file except in compliance with the License. You may obtain a copy of + * the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations under + * the License. + */ +/** + * Package contains the stepDesign for the common features. + */ +package io.cdap.cdap.stepsdesign; diff --git a/cdap-e2e-tests/src/e2e-test/resources/errorMessage.properties b/cdap-e2e-tests/src/e2e-test/resources/errorMessage.properties new file mode 100644 index 000000000000..7ed281255812 --- /dev/null +++ b/cdap-e2e-tests/src/e2e-test/resources/errorMessage.properties @@ -0,0 +1,5 @@ +validationResetSuccessMessage=Reset Successful +errorInvalidClusterName=Unable to get credentials from the environment. Please explicitly set the account key. +errorInvalidProfileName=Invalid profile ID: 6*&gjh879. Should only contain alphanumeric characters and _ or -. +errorInvalidNamespace=Failed to Add namespace +validationSuccessMessage=No errors found. \ No newline at end of file diff --git a/cdap-e2e-tests/src/e2e-test/resources/pluginDataCyAttributes.properties b/cdap-e2e-tests/src/e2e-test/resources/pluginDataCyAttributes.properties new file mode 100644 index 000000000000..56e140e134b8 --- /dev/null +++ b/cdap-e2e-tests/src/e2e-test/resources/pluginDataCyAttributes.properties @@ -0,0 +1,20 @@ +systemPreferences=system-prefs-accordion +keyValue=key-value-pair- +existingDataProc=provisioner-gcp-existing-dataproc +gcpDataProc=provisioner-gcp-dataproc +remoteHadoopProvisioner=provisioner-remote-hadoop +Create=profile-create-btn +Finish=wizard-finish-btn +Next=wizard-next-btn +Previous=wizard-previous-btn +amazonEMRProvisioner=provisioner-aws-emr +studio=pipeline-studio +projectId=project +datasetProjectId=datasetProject +connection=connection +useConnection=switch-useConnection +addConnection=add-connection-button +bqConnection=connector-BigQuery +testConnection=connection-test-button +connectionCreate=connection-submit-button +name=name \ No newline at end of file diff --git a/cdap-e2e-tests/src/e2e-test/resources/pluginParameters.properties b/cdap-e2e-tests/src/e2e-test/resources/pluginParameters.properties index f91925346d6d..3a607a0b46c0 100644 --- a/cdap-e2e-tests/src/e2e-test/resources/pluginParameters.properties +++ b/cdap-e2e-tests/src/e2e-test/resources/pluginParameters.properties @@ -3,3 +3,19 @@ clientUrl=http://localhost:11011 serverUrl=https://placeholder.com/api # command to generate token: gcloud auth print-access-token serverAccessToken=placeholder + +## NAMESPACEADMIN-PROPERTIES-START +nameSpacePreferences1=[{"key":"dataset","value":"test_automation"}] +nameSpacePreferences2=[{"key":"projectId","value":"cdf-athena"},{"key":"datasetprojectId","value":"cdf-athena"}] +validProfile=TestProfile +validDescription=TestDescription +validClusterName=TestClusterName +invalidNamespaceName=^%&&3%% +validNamespaceDescription=Test Description +invalidProfile=6*&gjh879 +invalidClusterName=$^%%&^GHJJH89 +validNamespaceName=AutoTest +ConnectionName=TestConnection +projectId=cdf-athena +dataset=test_automation +## NAMESPACEADMIN-PROPERTIES-END