diff --git a/e2e-test/pom.xml b/e2e-test/pom.xml
new file mode 100644
index 000000000..853bcc6e9
--- /dev/null
+++ b/e2e-test/pom.xml
@@ -0,0 +1,103 @@
+
+
+
+ 4.0.0
+
+ io.cdap.plugin
+ e2e-test
+ 2.9.0-SNAPSHOT
+
+ 8
+ 8
+
+
+
+ e2e-tests
+
+ e2e-test/src/test
+
+
+
+
+ org.apache.maven.plugins
+ maven-surefire-plugin
+ 2.18.1
+
+ true
+
+
+
+
+ org.apache.maven.plugins
+ maven-failsafe-plugin
+ 3.0.0-M5
+
+
+ TestRunner.java
+
+
+
+
+
+ integration-test
+
+
+
+
+
+
+ net.masterthought
+ maven-cucumber-reporting
+ 5.5.0
+
+
+
+ execution
+ verify
+
+ generate
+
+
+ Cucumber Reports
+ target/cucumber-reports/advanced-reports
+ 1
+ false
+ ${project.build.directory}/cucumber-reports
+
+ **/*.json
+
+ ${project.build.directory}/cucumber-reports
+ true
+
+
+
+
+
+
+
+
+ io.cdap.tests.e2e
+ cdap-e2e-framework
+ 0.0.1-SNAPSHOT
+ test
+
+
+
+
+
diff --git a/e2e-test/src/test/features/cloudSqlPsqlDesigntimeSink.feature b/e2e-test/src/test/features/cloudSqlPsqlDesigntimeSink.feature
new file mode 100755
index 000000000..5924a23c3
--- /dev/null
+++ b/e2e-test/src/test/features/cloudSqlPsqlDesigntimeSink.feature
@@ -0,0 +1,45 @@
+Feature: CloudSQLPostgreSQL Sink and Error Validation
+
+ @cloudSQLPostgreSQL
+ Scenario Outline:Verify CloudSQLPostgreSQL Sink properties validation errors for mandatory fields
+ Given Open DataFusion Project to configure pipeline
+ When Target is CloudSQLPostgreSQL
+ Then Open cloudSQLPostgreSQL Properties
+ Then Enter the CloudSQLPostgreSQL Sink Properties with blank property ""
+ Then Validate mandatory property error for ""
+ Examples:
+ | property |
+ | referenceName |
+ | database |
+ | connectionName |
+ | tableName |
+ | jdbcPluginName |
+
+ @cloudSQLPostgreSQL
+ Scenario: Verify error is displayed and validation fails for incorrect Driver name value
+ Given Open DataFusion Project to configure pipeline
+ When Target is CloudSQLPostgreSQL
+ Then Open cloudSQLPostgreSQL Properties
+ Then Enter Reference Name & Database Name with valid test data
+ Then Enter Table Name "cloudPSQLTableName" and Connection Name
+ Then Validate Connector properties
+ Then Enter Driver Name with Invalid value for Driver name field "cloudPSQLDriverNameInvalid"
+ Then Verify invalid Driver name error message is displayed for Driver "cloudPSQLDriverNameInvalid"
+ Then Verify plugin validation fails with error
+ Then Close the cloudSQLPostgreSQL properties
+
+ @cloudSQLPostgreSQL
+ Scenario:Verify error is displayed for Reference name and Public connection name with incorrect values
+ Given Open DataFusion Project to configure pipeline
+ When Target is CloudSQLPostgreSQL
+ Then Enter Reference Name and Public Connection Name with incorrect values and table "cloudPSQLTableName"
+ Then Verify error is displayed for Reference name & connection name with incorrect values
+ Then Close the cloudSQLPostgreSQL properties
+
+ @cloudSQLPostgreSQL
+ Scenario:Verify error is displayed for Reference name and Private connection name with incorrect values
+ Given Open DataFusion Project to configure pipeline
+ When Target is CloudSQLPostgreSQL
+ Then Enter Reference Name and Private Connection Name with incorrect values and table "cloudPSQLTableName"
+ Then Verify error is displayed for incorrect Connection Name with private instance type
+ Then Close the cloudSQLPostgreSQL properties
diff --git a/e2e-test/src/test/features/cloudSqlPsqlDesigntimeSource.feature b/e2e-test/src/test/features/cloudSqlPsqlDesigntimeSource.feature
new file mode 100755
index 000000000..fe6b14f69
--- /dev/null
+++ b/e2e-test/src/test/features/cloudSqlPsqlDesigntimeSource.feature
@@ -0,0 +1,45 @@
+Feature: CloudSQLPostgreSQL Source Design Time and error validation
+
+ @cloudSQLPostgreSQL
+ Scenario Outline:Verify CloudSQLPostgreSQL Source properties validation errors for mandatory fields
+ Given Open DataFusion Project to configure pipeline
+ When Source is CloudSQLPostgreSQL
+ Then Open cloudSQLPostgreSQL Properties
+ Then Enter the CloudSQLPostgreSQL Source Properties with blank property ""
+ Then Validate mandatory property error for ""
+ Examples:
+ | property |
+ | referenceName |
+ | database |
+ | connectionName |
+ | importQuery |
+ | jdbcPluginName |
+
+ @cloudSQLPostgreSQL
+ Scenario:Verify Driver Name field validation error with invalid test data
+ Given Open DataFusion Project to configure pipeline
+ When Source is CloudSQLPostgreSQL
+ Then Open cloudSQLPostgreSQL Properties
+ Then Enter Reference Name & Database Name with valid test data
+ Then Enter Connection Name and Import Query "cloudPSQLImportQuery"
+ Then Validate Connector properties
+ Then Enter Driver Name with Invalid value for Driver name field "cloudPSQLDriverNameInvalid"
+ Then Verify invalid Driver name error message is displayed for Driver "cloudPSQLDriverNameInvalid"
+ Then Verify plugin validation fails with error
+ Then Close the cloudSQLPostgreSQL properties
+
+ @cloudSQLPostgreSQL
+ Scenario:Verify error is displayed for Reference name and Public connection name with incorrect values
+ Given Open DataFusion Project to configure pipeline
+ When Source is CloudSQLPostgreSQL
+ Then Enter Reference Name & Connection Name with incorrect values and import query "cloudPSQLImportQuery"
+ Then Verify error is displayed for Reference name & connection name with incorrect values
+ Then Close the cloudSQLPostgreSQL properties
+
+ @cloudSQLPostgreSQL
+ Scenario:Verify error is displayed for Reference name and Private connection name with incorrect values
+ Given Open DataFusion Project to configure pipeline
+ When Source is CloudSQLPostgreSQL
+ Then Enter Reference Name and private Connection Name with incorrect values and import query "cloudPSQLImportQuery"
+ Then Verify error is displayed for incorrect Connection Name with private instance type
+ Then Close the cloudSQLPostgreSQL properties
diff --git a/e2e-test/src/test/features/cloudSqlPsqlEndtoEnd.feature b/e2e-test/src/test/features/cloudSqlPsqlEndtoEnd.feature
new file mode 100644
index 000000000..4795979da
--- /dev/null
+++ b/e2e-test/src/test/features/cloudSqlPsqlEndtoEnd.feature
@@ -0,0 +1,264 @@
+Feature: End to End records transfer from cloudSQLPostgreSQL to BigQuery and GCS
+
+ @cloudSQLPostgreSQL
+ Scenario:Verify all the records transfer from cloudSQLPostgreSQL to Bigquery supporting different data types
+ Given Open DataFusion Project to configure pipeline
+ When Source is CloudSQLPostgreSQL
+ When Sink is BigQuery
+ Then Open cloudSQLPostgreSQL Properties
+ Then Enter the cloudSQLPostgreSQL properties for database "cloudPSQLDbName" using query "cloudPSQLDBImportQueryForAll"
+ Then Capture output schema
+ Then Validate cloudSQLPostgreSQL properties
+ Then Close the cloudSQLPostgreSQL properties
+ Then Open BigQuery Target Properties
+ Then Enter the BigQuery Target Properties for table "cloudPsqlBigQuery"
+ Then Validate Bigquery properties
+ Then Close the BigQuery properties
+ Then Connect Source as "CloudSQL-PostgreSQL" and sink as "BigQuery" to establish connection
+ Then Add pipeline name
+ Then Preview and run the pipeline
+ Then Verify the preview of pipeline is "success"
+ Then Click on PreviewData for BigQuery
+ Then Verify Preview output schema matches the outputSchema captured in properties
+ Then Close the Preview and deploy the pipeline
+ Then Run the Pipeline in Runtime
+ Then Wait till pipeline is in running state
+ Then Verify the pipeline status is "Succeeded"
+ Then Open the Logs and capture raw logs
+ Then Get Count of no of records transferred to BigQuery in "cloudPsqlBigQuery"
+ Then Validate records out from cloudSQLPostgreSQL is equal to records transferred in BigQuery "cloudPsqlBigQuery" output records
+
+ @cloudSQLPostgreSQL
+ Scenario:Verify all the duplicate records are fetched and transferred to BigQuery
+ Given Open DataFusion Project to configure pipeline
+ When Source is CloudSQLPostgreSQL
+ When Sink is BigQuery
+ Then Open cloudSQLPostgreSQL Properties
+ Then Enter the cloudSQLPostgreSQL properties for database "cloudPSQLDbName" using query "cloudPSQLDBImportQueryDuplicate" for duplicate values "cloudPSQLSplitColumnDuplicateValue"
+ Then Capture output schema
+ Then Validate cloudSQLPostgreSQL properties
+ Then Close the cloudSQLPostgreSQL properties
+ Then Open BigQuery Target Properties
+ Then Enter the BigQuery Target Properties for table "cloudPsqlBigQuery"
+ Then Validate Bigquery properties
+ Then Close the BigQuery properties
+ Then Connect Source as "CloudSQL-PostgreSQL" and sink as "BigQuery" to establish connection
+ Then Add pipeline name
+ Then Preview and run the pipeline
+ Then Verify the preview of pipeline is "success"
+ Then Click on PreviewData for BigQuery
+ Then Verify Preview output schema matches the outputSchema captured in properties
+ Then Close the Preview and deploy the pipeline
+ Then Run the Pipeline in Runtime
+ Then Wait till pipeline is in running state
+ Then Verify the pipeline status is "Succeeded"
+ Then Open the Logs and capture raw logs
+ Then Get Count of no of records transferred to BigQuery in "cloudPsqlBigQuery"
+ Then Validate records out from cloudSQLPostgreSQL is equal to records transferred in BigQuery "cloudPsqlBigQuery" output records
+
+ @cloudSQLPostgreSQL
+ Scenario Outline: Verify records get transferred on combining different tables using joins
+ Given Open DataFusion Project to configure pipeline
+ When Source is CloudSQLPostgreSQL
+ When Sink is BigQuery
+ Then Open cloudSQLPostgreSQL Properties
+ Then Enter the cloudSQLPostgreSQL properties for database "cloudPSQLDbName" using different join queries ""
+ Then Capture output schema
+ Then Validate cloudSQLPostgreSQL properties
+ Then Close the cloudSQLPostgreSQL properties
+ Then Open BigQuery Target Properties
+ Then Enter the BigQuery Target Properties for table "cloudPsqlBigQuery"
+ Then Validate Bigquery properties
+ Then Close the BigQuery properties
+ Then Connect Source as "CloudSQL-PostgreSQL" and sink as "BigQuery" to establish connection
+ Then Add pipeline name
+ Then Preview and run the pipeline
+ Then Verify the preview of pipeline is "success"
+ Then Click on PreviewData for BigQuery
+ Then Verify Preview output schema matches the outputSchema captured in properties
+ Then Close the Preview and deploy the pipeline
+ Then Run the Pipeline in Runtime
+ Then Wait till pipeline is in running state
+ Then Verify the pipeline status is "Succeeded"
+ Then Open the Logs and capture raw logs
+ Then Get Count of no of records transferred to BigQuery in "cloudPsqlBigQuery"
+ Then Validate records out from cloudSQLPostgreSQL is equal to records transferred in BigQuery "cloudPsqlBigQuery" output records
+ Examples:
+ | cloudPSQLDBImportQueryJoins |
+ | cloudPSQLDBImportQueryInnerJoin |
+ | cloudPSQLDBImportQueryLeftJoin |
+ | cloudPSQLDBImportQueryRightJoin |
+ | cloudPSQLDBImportQueryOuterJoin |
+
+ @cloudSQLPostgreSQL
+ Scenario:Verify only distinct records are transferred
+ Given Open DataFusion Project to configure pipeline
+ When Source is CloudSQLPostgreSQL
+ When Sink is BigQuery
+ Then Open cloudSQLPostgreSQL Properties
+ Then Enter the cloudSQLPostgreSQL properties for database "cloudPSQLDbName" using query "cloudPSQLDBImportQueryDistinct" for distinct values "cloudPSQLSplitColumnDistinctValue"
+ Then Capture output schema
+ Then Validate cloudSQLPostgreSQL properties
+ Then Close the cloudSQLPostgreSQL properties
+ Then Open BigQuery Target Properties
+ Then Enter the BigQuery Target Properties for table "cloudPsqlBigQuery"
+ Then Validate Bigquery properties
+ Then Close the BigQuery properties
+ Then Connect Source as "CloudSQL-PostgreSQL" and sink as "BigQuery" to establish connection
+ Then Add pipeline name
+ Then Preview and run the pipeline
+ Then Verify the preview of pipeline is "success"
+ Then Click on PreviewData for BigQuery
+ Then Verify Preview output schema matches the outputSchema captured in properties
+ Then Close the Preview and deploy the pipeline
+ Then Run the Pipeline in Runtime
+ Then Wait till pipeline is in running state
+ Then Verify the pipeline status is "Succeeded"
+ Then Open the Logs and capture raw logs
+ Then Get Count of no of records transferred to BigQuery in "cloudPsqlBigQuery"
+ Then Validate records out from cloudSQLPostgreSQL is equal to records transferred in BigQuery "cloudPsqlBigQuery" output records
+
+ @cloudSQLPostgreSQL
+ Scenario:Verify records with maximum values are transferred from cloudPSQL to BigQuery
+ Given Open DataFusion Project to configure pipeline
+ When Source is CloudSQLPostgreSQL
+ When Sink is BigQuery
+ Then Open cloudSQLPostgreSQL Properties
+ Then Enter the cloudSQLPostgreSQL properties for database "cloudPSQLDbName" using query "cloudPSQLDBImportQueryForMax" for max values "cloudPSQLSplitColumnMaxValue"
+ Then Capture output schema
+ Then Validate cloudSQLPostgreSQL properties
+ Then Close the cloudSQLPostgreSQL properties
+ Then Open BigQuery Target Properties
+ Then Enter the BigQuery Target Properties for table "cloudPsqlBigQuery"
+ Then Validate Bigquery properties
+ Then Close the BigQuery properties
+ Then Connect Source as "CloudSQL-PostgreSQL" and sink as "BigQuery" to establish connection
+ Then Add pipeline name
+ Then Preview and run the pipeline
+ Then Verify the preview of pipeline is "success"
+ Then Click on PreviewData for BigQuery
+ Then Verify Preview output schema matches the outputSchema captured in properties
+ Then Close the Preview and deploy the pipeline
+ Then Run the Pipeline in Runtime
+ Then Wait till pipeline is in running state
+ Then Verify the pipeline status is "Succeeded"
+ Then Open the Logs and capture raw logs
+ Then Get Count of no of records transferred to BigQuery in "cloudPsqlBigQuery"
+ Then Validate records out from cloudSQLPostgreSQL is equal to records transferred in BigQuery "cloudPsqlBigQuery" output records
+
+ @cloudSQLPostgreSQL
+ Scenario:Verify records with minimum values are transferred from cloudPSQL to BigQuery
+ Given Open DataFusion Project to configure pipeline
+ When Source is CloudSQLPostgreSQL
+ When Sink is BigQuery
+ Then Open cloudSQLPostgreSQL Properties
+ Then Enter the cloudSQLPostgreSQL properties for database "cloudPSQLDbName" using query "cloudPSQLDBImportQueryForMin" for min values "cloudPSQLSplitColumnMinValue"
+ Then Capture output schema
+ Then Validate cloudSQLPostgreSQL properties
+ Then Close the cloudSQLPostgreSQL properties
+ Then Open BigQuery Target Properties
+ Then Enter the BigQuery Target Properties for table "cloudPsqlBigQuery"
+ Then Validate Bigquery properties
+ Then Close the BigQuery properties
+ Then Connect Source as "CloudSQL-PostgreSQL" and sink as "BigQuery" to establish connection
+ Then Add pipeline name
+ Then Preview and run the pipeline
+ Then Verify the preview of pipeline is "success"
+ Then Click on PreviewData for BigQuery
+ Then Verify Preview output schema matches the outputSchema captured in properties
+ Then Close the Preview and deploy the pipeline
+ Then Run the Pipeline in Runtime
+ Then Wait till pipeline is in running state
+ Then Verify the pipeline status is "Succeeded"
+ Then Open the Logs and capture raw logs
+ Then Get Count of no of records transferred to BigQuery in "cloudPsqlBigQuery"
+ Then Validate records out from cloudSQLPostgreSQL is equal to records transferred in BigQuery "cloudPsqlBigQuery" output records
+
+
+ @cloudSQLPostgreSQL
+ Scenario Outline: Verify all the records transfer from cloudSQLPostgreSQL to Bigquery for different where clause
+ Given Open DataFusion Project to configure pipeline
+ When Source is CloudSQLPostgreSQL
+ When Sink is BigQuery
+ Then Open cloudSQLPostgreSQL Properties
+ Then Enter the cloudSQLPostgreSQL properties for database "cloudPSQLDbName" using query "" for max and min ""
+ Then Capture output schema
+ Then Validate cloudSQLPostgreSQL properties
+ Then Close the cloudSQLPostgreSQL properties
+ Then Open BigQuery Target Properties
+ Then Enter the BigQuery Target Properties for table "cloudPsqlBigQuery"
+ Then Validate Bigquery properties
+ Then Close the BigQuery properties
+ Then Connect Source as "CloudSQL-PostgreSQL" and sink as "BigQuery" to establish connection
+ Then Add pipeline name
+ Then Preview and run the pipeline
+ Then Verify the preview of pipeline is "success"
+ Then Click on PreviewData for BigQuery
+ Then Verify Preview output schema matches the outputSchema captured in properties
+ Then Close the Preview and deploy the pipeline
+ Then Run the Pipeline in Runtime
+ Then Wait till pipeline is in running state
+ Then Verify the pipeline status is "Succeeded"
+ Then Open the Logs and capture raw logs
+ Then Get Count of no of records transferred to BigQuery in "cloudPsqlBigQuery"
+ Then Validate records out from cloudSQLPostgreSQL is equal to records transferred in BigQuery "cloudPsqlBigQuery" output records
+ Examples:
+ | cloudPostgresSQLDatabaseImportQuery | cloudPostgresSQLSplitColumnValues |
+ | cloudPSQLDBImportQueryForBetween | cloudPSQLSplitColumnBetweenValue |
+ | cloudPSQLDBImportQueryForIn | cloudPSQLSplitColumnInValue |
+ | cloudPSQLDBImportQueryNotIn | cloudPSQLSplitColumnNotInValue |
+ | cloudPSQLDBImportQueryOrderBy | cloudPSQLSplitColumnOrderByValue |
+
+
+ @cloudSQLPostgreSQL
+ Scenario:Verify records are transferred from cloudSQLPostgreSQL to BigQuery using Bounding Query
+ Given Open DataFusion Project to configure pipeline
+ When Source is CloudSQLPostgreSQL
+ When Sink is BigQuery
+ Then Open cloudSQLPostgreSQL Properties
+ Then Enter the cloudSQLPostgreSQL properties for database "cloudPSQLDbName" using query "cloudPSQLQuery" for max values "cloudPSQLSplitColumnMaxValue" with bounding query "cloudPSQLDBBoundingQuery" and "cloudPsqlNoOfSplits"
+ Then Capture output schema
+ Then Validate cloudSQLPostgreSQL properties
+ Then Close the cloudSQLPostgreSQL properties
+ Then Open BigQuery Target Properties
+ Then Enter the BigQuery Target Properties for table "cloudPsqlBigQuery"
+ Then Validate Bigquery properties
+ Then Close the BigQuery properties
+ Then Connect Source as "CloudSQL-PostgreSQL" and sink as "BigQuery" to establish connection
+ Then Add pipeline name
+ Then Preview and run the pipeline
+ Then Verify the preview of pipeline is "success"
+ Then Click on PreviewData for BigQuery
+ Then Verify Preview output schema matches the outputSchema captured in properties
+ Then Close the Preview and deploy the pipeline
+ Then Run the Pipeline in Runtime
+ Then Wait till pipeline is in running state
+ Then Verify the pipeline status is "Succeeded"
+ Then Open the Logs and capture raw logs
+ Then Get Count of no of records transferred to BigQuery in "cloudPsqlBigQuery"
+ Then Validate records out from cloudSQLPostgreSQL is equal to records transferred in BigQuery "cloudPsqlBigQuery" output records
+
+ @cloudSQLPostgreSQL
+ Scenario:Verify all the records transfer from cloudSQLPostgreSQL to GCS supporting different data types
+ Given Open DataFusion Project to configure pipeline
+ When Source is CloudSQLPostgreSQL
+ When Sink is GCS
+ Then Open cloudSQLPostgreSQL Properties
+ Then Enter the cloudSQLPostgreSQL properties for database "cloudPSQLDbName" using query "cloudPSQLDBImportQueryForAll"
+ Then Capture output schema
+ Then Validate cloudSQLPostgreSQL properties
+ Then Close the cloudSQLPostgreSQL properties
+ Then Enter the GCS Properties
+ Then Close the GCS Properties
+ Then Connect Source as "CloudSQL-PostgreSQL" and sink as "GCS" to establish connection
+ Then Add pipeline name
+ Then Preview and run the pipeline
+ Then Verify the preview of pipeline is "success"
+ Then Click on PreviewData for GCS
+ Then Verify Preview output schema matches the outputSchema captured in properties
+ Then Close the Preview and deploy the pipeline
+ Then Run the Pipeline in Runtime
+ Then Wait till pipeline is in running state
+ Then Verify the pipeline status is "Succeeded"
+ Then Open the Logs and capture raw logs
+ Then Validate OUT record count is equal to IN record count
diff --git a/e2e-test/src/test/java/io/cdap/plugin/cloudsqlpostgresql/actions/CloudSqlPostgreSqlActions.java b/e2e-test/src/test/java/io/cdap/plugin/cloudsqlpostgresql/actions/CloudSqlPostgreSqlActions.java
new file mode 100644
index 000000000..e8963d72d
--- /dev/null
+++ b/e2e-test/src/test/java/io/cdap/plugin/cloudsqlpostgresql/actions/CloudSqlPostgreSqlActions.java
@@ -0,0 +1,140 @@
+/*
+ * Copyright © 2021 Cask Data, Inc.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not
+ * use this file except in compliance with the License. You may obtain a copy of
+ * the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+package io.cdap.plugin.cloudsqlpostgresql.actions;
+
+import io.cdap.e2e.pages.locators.CdfBigQueryPropertiesLocators;
+import io.cdap.e2e.utils.SeleniumDriver;
+import io.cdap.e2e.utils.SeleniumHelper;
+import io.cdap.plugin.cloudsqlpostgresql.locators.CloudSqlPostgreSqlLocators;
+import org.openqa.selenium.By;
+
+import java.io.IOException;
+
+/**
+ * cloudSqlPostgreSql connector related Step Actions.
+ */
+
+public class CloudSqlPostgreSqlActions {
+
+ static {
+ SeleniumHelper.getPropertiesLocators(CloudSqlPostgreSqlLocators.class);
+ SeleniumHelper.getPropertiesLocators(CdfBigQueryPropertiesLocators.class);
+ }
+
+ public static void selectCloudSQLPostgreSQLSource() throws InterruptedException {
+ SeleniumHelper.waitAndClick(CloudSqlPostgreSqlLocators.cloudSqlPSqlSource);
+ }
+
+ public static void selectCloudSQLPostgreSQLSink() throws InterruptedException {
+ CloudSqlPostgreSqlLocators.sink.click();
+ SeleniumHelper.waitAndClick(CloudSqlPostgreSqlLocators.cloudSqlPSqlSink);
+ }
+
+ public static void clickCloudSqlPostgreSqlProperties() {
+ CloudSqlPostgreSqlLocators.cloudSqlPSqlProperties.click();
+ }
+
+ public static void clickValidateButton() {
+ CloudSqlPostgreSqlLocators.validateButton.click();
+ }
+
+ public static void enterReferenceName(String reference) {
+ CloudSqlPostgreSqlLocators.referenceName.sendKeys(reference);
+ }
+
+ public static void enterDriverName(String driver) {
+ CloudSqlPostgreSqlLocators.driverName.sendKeys(driver);
+ }
+
+ public static void enterDefaultDriver(String driverNameValid) {
+ SeleniumHelper.replaceElementValue(CloudSqlPostgreSqlLocators.driverName, driverNameValid);
+ }
+
+ public static void enterDatabaseName(String database) {
+ CloudSqlPostgreSqlLocators.database.sendKeys(database);
+ }
+
+ public static void enterUserName(String username) {
+ CloudSqlPostgreSqlLocators.username.sendKeys(username);
+ }
+
+ public static void enterPassword(String password) {
+ CloudSqlPostgreSqlLocators.password.sendKeys(password);
+ }
+
+ public static void enterConnectionName(String connection) {
+ CloudSqlPostgreSqlLocators.connectionName.sendKeys(connection);
+ }
+
+ public static void closeButton() {
+ CloudSqlPostgreSqlLocators.closeButton.click();
+ }
+
+ public static void enterSplitColumn(String splitColumn) {
+ CloudSqlPostgreSqlLocators.splitColumn.sendKeys(splitColumn);
+ }
+
+ public static void enterNumberOfSplits(String numberOfSplits) {
+ SeleniumHelper.replaceElementValue(CloudSqlPostgreSqlLocators.numberOfSplits, numberOfSplits);
+ }
+
+ public static void replaceTableValue(String tableName) {
+ SeleniumHelper.replaceElementValue(CloudSqlPostgreSqlLocators.sqlTableName, tableName);
+ }
+
+ public static void enterImportQuery(String query) throws IOException, InterruptedException {
+ CloudSqlPostgreSqlLocators.importQuery.sendKeys(query);
+ }
+
+ public static void enterBoundingQuery(String query) throws IOException, InterruptedException {
+ CloudSqlPostgreSqlLocators.boundingQuery.sendKeys(query);
+ }
+
+ public static void enterTableName(String table) {
+ CloudSqlPostgreSqlLocators.sqlTableName.sendKeys(table);
+ }
+
+ public static void enterConnectionTimeout(String connectionTimeout) {
+ SeleniumHelper.replaceElementValue(CloudSqlPostgreSqlLocators.connectionTimeout, connectionTimeout);
+ }
+
+ public static void clickPrivateInstance() {
+ CloudSqlPostgreSqlLocators.instanceType.click();
+ }
+
+ public static void getSchema() {
+ CloudSqlPostgreSqlLocators.getSchemaButton.click();
+ }
+
+ public static void clickPreviewData() {
+ SeleniumHelper.waitElementIsVisible(CloudSqlPostgreSqlLocators.previewData);
+ CloudSqlPostgreSqlLocators.previewData.click();
+ }
+
+ public static void replaceSplitValue(String numberOfSplits) throws IOException {
+ SeleniumHelper.replaceElementValue(CloudSqlPostgreSqlLocators.numberOfSplits, numberOfSplits);
+ }
+
+ public static void clickPreviewPropertiesTab() {
+ CloudSqlPostgreSqlLocators.previewPropertiesTab.click();
+ }
+
+ public static void clickPluginPreviewData(String plugin) {
+ SeleniumDriver.getDriver().findElement(
+ By.xpath("//*[contains(@data-cy,'" + plugin + "') and contains(@data-cy,'-preview-data-btn') " +
+ "and @class='node-preview-data-btn ng-scope']")).click();
+ }
+}
diff --git a/e2e-test/src/test/java/io/cdap/plugin/cloudsqlpostgresql/actions/package-info.java b/e2e-test/src/test/java/io/cdap/plugin/cloudsqlpostgresql/actions/package-info.java
new file mode 100755
index 000000000..3ee15df84
--- /dev/null
+++ b/e2e-test/src/test/java/io/cdap/plugin/cloudsqlpostgresql/actions/package-info.java
@@ -0,0 +1,19 @@
+/*
+ * Copyright © 2021 Cask Data, Inc.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not
+ * use this file except in compliance with the License. You may obtain a copy of
+ * the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+/**
+ * Package contains actions for the cloudSqlPostgreSql actions.
+ */
+package io.cdap.plugin.cloudsqlpostgresql.actions;
diff --git a/e2e-test/src/test/java/io/cdap/plugin/cloudsqlpostgresql/locators/CloudSqlPostgreSqlLocators.java b/e2e-test/src/test/java/io/cdap/plugin/cloudsqlpostgresql/locators/CloudSqlPostgreSqlLocators.java
new file mode 100644
index 000000000..5a01e877a
--- /dev/null
+++ b/e2e-test/src/test/java/io/cdap/plugin/cloudsqlpostgresql/locators/CloudSqlPostgreSqlLocators.java
@@ -0,0 +1,131 @@
+/*
+ * Copyright © 2021 Cask Data, Inc.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not
+ * use this file except in compliance with the License. You may obtain a copy of
+ * the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+package io.cdap.plugin.cloudsqlpostgresql.locators;
+
+import org.openqa.selenium.WebElement;
+import org.openqa.selenium.support.FindBy;
+import org.openqa.selenium.support.How;
+
+import java.util.List;
+
+/**
+ * CloudSqlPostgreSql Connector Locators.
+ */
+
+public class CloudSqlPostgreSqlLocators {
+ @FindBy(how = How.XPATH, using = "//*[@data-cy='plugin-CloudSQLPostgreSQL-batchsource']")
+ public static WebElement cloudSqlPSqlSource;
+
+ @FindBy(how = How.XPATH, using = "//*[@data-cy='plugin-CloudSQLPostgreSQL-batchsink']")
+ public static WebElement cloudSqlPSqlSink;
+
+ @FindBy(how = How.XPATH, using = "//*[text()='Sink ']")
+ public static WebElement sink;
+
+ @FindBy(how = How.XPATH, using = "//*[@data-cy='referenceName' and @class='MuiInputBase-input']")
+ public static WebElement referenceName;
+
+ @FindBy(how = How.XPATH, using = "//*[@data-cy='property-row-error' and contains(text(),'Invalid reference name')]")
+ public static WebElement referenceNameError;
+
+ @FindBy(how = How.XPATH, using = "//*[@data-cy='jdbcPluginName' and @class='MuiInputBase-input']")
+ public static WebElement driverName;
+
+ @FindBy(how = How.XPATH, using = "//*[@data-cy='database' and @class='MuiInputBase-input']")
+ public static WebElement database;
+
+ @FindBy(how = How.XPATH, using = "//*[@data-cy='importQuery']//textarea")
+ public static WebElement importQuery;
+
+ @FindBy(how = How.XPATH, using = "//*[@data-cy='boundingQuery']//textarea")
+ public static WebElement boundingQuery;
+
+ @FindBy(how = How.XPATH, using = "//*[@data-cy='property-row-error' and contains(text(),'JDBC Driver class for')]")
+ public static WebElement driverNameError;
+
+ @FindBy(how = How.XPATH, using = "//*[@data-cy='user' and @class='MuiInputBase-input']")
+ public static WebElement username;
+
+ @FindBy(how = How.XPATH, using = "//*[@placeholder='The password to use to connect to the CloudSQL database']")
+ public static WebElement password;
+
+ @FindBy(how = How.XPATH, using = "//input [@type='radio' and @value='private']")
+ public static WebElement instanceType;
+
+ @FindBy(how = How.XPATH, using = "//*[@data-cy='connectionName' and @class='MuiInputBase-input']")
+ public static WebElement connectionName;
+
+ @FindBy(how = How.XPATH, using = "//*[@data-cy='property-row-error' and contains(text(),'Enter the internal')]")
+ public static WebElement connectionNameError;
+
+ @FindBy(how = How.XPATH, using = "//*[@data-cy='property-row-error' and contains(text(),'Connection Name must')]")
+ public static WebElement connectionNameFormatError;
+
+ @FindBy(how = How.XPATH, using = "//*[@data-cy='splitBy' and @class='MuiInputBase-input']")
+ public static WebElement splitColumn;
+
+ @FindBy(how = How.XPATH, using = "//*[@data-cy='numSplits' and @class='MuiInputBase-input']")
+ public static WebElement numberOfSplits;
+
+ @FindBy(how = How.XPATH, using = "//*[@data-cy='tableName' and @class='MuiInputBase-input']")
+ public static WebElement sqlTableName;
+
+ @FindBy(how = How.XPATH, using = "//*[@data-cy='connectionTimeout' and @class='MuiInputBase-input']")
+ public static WebElement connectionTimeout;
+
+ @FindBy(how = How.XPATH, using = "//*[@class='fa fa-remove']")
+ public static WebElement closeButton;
+
+ @FindBy(how = How.XPATH, using = "//*[@data-cy='plugin-properties-validate-btn']")
+ public static WebElement validateButton;
+
+ @FindBy(how = How.XPATH, using = "//*[@title=\"CloudSQL PostgreSQL\"]//following-sibling::div")
+ public static WebElement cloudSqlPSqlProperties;
+
+ @FindBy(how = How.XPATH, using = "//*[contains(text(),'Get Schema')]")
+ public static WebElement getSchemaButton;
+
+ @FindBy(how = How.XPATH, using = "//*[@data-cy='CloudSQLPostgreSQL-preview-data-btn' and " +
+ "@class='node-preview-data-btn ng-scope']")
+ public static WebElement previewData;
+
+ @FindBy(how = How.XPATH, using = "//*[@data-cy='get-schema-btn']//span[text()='Get Schema']")
+ public static WebElement getSchemaLoadComplete;
+
+ @FindBy(how = How.XPATH,
+ using = "//div[@data-cy='Output Schema']//div[@data-cy='schema-fields-list']//*[@placeholder='Field name']")
+ public static List outputSchemaColumnNames;
+
+ @FindBy(how = How.XPATH,
+ using = "//div[@data-cy='Output Schema']//div[@data-cy='schema-fields-list']//select")
+ public static List outputSchemaDataTypes;
+
+ @FindBy(how = How.XPATH, using = "(//h2[text()='Input Records']/parent::div/div/div/div/div)[1]//div[text()!='']")
+ public static List previewInputRecordColumnNames;
+
+ @FindBy(how = How.XPATH, using = "//*[@role='tablist']/li[contains(text(),'Properties')]")
+ public static WebElement previewPropertiesTab;
+
+ @FindBy(how = How.XPATH,
+ using = "//div[@data-cy='Input Schema']//div[@data-cy='schema-fields-list']//select")
+ public static List inputSchemaDataTypes;
+
+ @FindBy(how = How.XPATH,
+ using = "//div[@data-cy='Input Schema']//div[@data-cy='schema-fields-list']//*[@placeholder='Field name']")
+ public static List inputSchemaColumnNames;
+
+}
+
diff --git a/e2e-test/src/test/java/io/cdap/plugin/cloudsqlpostgresql/locators/package-info.java b/e2e-test/src/test/java/io/cdap/plugin/cloudsqlpostgresql/locators/package-info.java
new file mode 100755
index 000000000..dcf3f8688
--- /dev/null
+++ b/e2e-test/src/test/java/io/cdap/plugin/cloudsqlpostgresql/locators/package-info.java
@@ -0,0 +1,19 @@
+/*
+ * Copyright © 2021 Cask Data, Inc.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not
+ * use this file except in compliance with the License. You may obtain a copy of
+ * the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+/**
+ * Package contains locators for the cloudSqlPostgreSql locators.
+ */
+package io.cdap.plugin.cloudsqlpostgresql.locators;
diff --git a/e2e-test/src/test/java/io/cdap/plugin/cloudsqlpostgresql/stepsdesign/CloudSqlPostgreSql.java b/e2e-test/src/test/java/io/cdap/plugin/cloudsqlpostgresql/stepsdesign/CloudSqlPostgreSql.java
new file mode 100644
index 000000000..0b6481233
--- /dev/null
+++ b/e2e-test/src/test/java/io/cdap/plugin/cloudsqlpostgresql/stepsdesign/CloudSqlPostgreSql.java
@@ -0,0 +1,589 @@
+/*
+ * Copyright © 2021 Cask Data, Inc.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not
+ * use this file except in compliance with the License. You may obtain a copy of
+ * the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+package io.cdap.plugin.cloudsqlpostgresql.stepsdesign;
+
+import io.cdap.e2e.pages.actions.CdfBigQueryPropertiesActions;
+import io.cdap.e2e.pages.actions.CdfGcsActions;
+import io.cdap.e2e.pages.actions.CdfPipelineRunAction;
+import io.cdap.e2e.pages.actions.CdfStudioActions;
+import io.cdap.e2e.pages.locators.CdfStudioLocators;
+import io.cdap.e2e.utils.CdfHelper;
+import io.cdap.e2e.utils.GcpClient;
+import io.cdap.e2e.utils.SeleniumDriver;
+import io.cdap.e2e.utils.SeleniumHelper;
+import io.cdap.plugin.cloudsqlpostgresql.actions.CloudSqlPostgreSqlActions;
+import io.cdap.plugin.cloudsqlpostgresql.locators.CloudSqlPostgreSqlLocators;
+import io.cdap.plugin.utils.E2ETestConstants;
+import io.cdap.plugin.utils.E2ETestUtils;
+import io.cucumber.java.en.Given;
+import io.cucumber.java.en.Then;
+import io.cucumber.java.en.When;
+import org.junit.Assert;
+import org.openqa.selenium.By;
+import org.openqa.selenium.WebElement;
+import org.openqa.selenium.support.ui.ExpectedConditions;
+import org.openqa.selenium.support.ui.WebDriverWait;
+import stepsdesign.BeforeActions;
+
+import java.io.FileNotFoundException;
+import java.io.IOException;
+import java.io.PrintWriter;
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.UUID;
+/**
+ * StepsDesign for CloudSqlPostgreSql.
+ */
+public class CloudSqlPostgreSql implements CdfHelper {
+ List propertiesSchemaColumnList = new ArrayList<>();
+ Map sourcePropertiesOutputSchema = new HashMap<>();
+ static PrintWriter out;
+
+ static {
+ try {
+ out = new PrintWriter(BeforeActions.myObj);
+ } catch (FileNotFoundException e) {
+ BeforeActions.scenario.write(e.toString());
+ }
+ }
+
+ @Given("Open DataFusion Project to configure pipeline")
+ public void openDataFusionProjectToConfigurePipeline() throws IOException, InterruptedException {
+ openCdf();
+ }
+
+ @When("Source is CloudSQLPostgreSQL")
+ public void sourceIsCloudSQLPostgreSQL() throws InterruptedException {
+ CloudSqlPostgreSqlActions.selectCloudSQLPostgreSQLSource();
+ }
+
+ @When("Target is CloudSQLPostgreSQL")
+ public void targetIsCloudSQLPostgreSQL() throws InterruptedException {
+ CloudSqlPostgreSqlActions.selectCloudSQLPostgreSQLSink();
+ }
+
+ @When("Sink is BigQuery")
+ public void sinkIsBigQuery() {
+ CdfStudioActions.sinkBigQuery();
+ }
+
+ @Then("Validate Connector properties")
+ public void validatePipeline() throws InterruptedException {
+ CloudSqlPostgreSqlActions.clickValidateButton();
+ SeleniumHelper.waitElementIsVisible(CloudSqlPostgreSqlLocators.closeButton, 10);
+ }
+
+ @Then("Enter Reference Name & Connection Name with Invalid Test Data in Sink")
+ public void enterTheSinkInvalidData() throws InterruptedException, IOException {
+ CloudSqlPostgreSqlActions.clickCloudSqlPostgreSqlProperties();
+ CloudSqlPostgreSqlActions.enterReferenceName(E2ETestUtils.pluginProp("cloudPsqlReferenceNameInvalid"));
+ CloudSqlPostgreSqlActions.enterDatabaseName(E2ETestUtils.pluginProp("cloudPSQLDbName"));
+ CloudSqlPostgreSqlActions.enterConnectionName(E2ETestUtils.pluginProp("cloudPSQLConnectionNameInvalid"));
+ CloudSqlPostgreSqlActions.enterTableName(E2ETestUtils.pluginProp("cloudPSQLTableName"));
+ }
+
+ @Then("Verify error is displayed for Reference name & connection name with incorrect values")
+ public void verifyErrorIsDisplayedForReferenceNameConnectionNameWithIncorrectValues() {
+ Assert.assertTrue(CloudSqlPostgreSqlLocators.referenceNameError.isDisplayed());
+ Assert.assertTrue(CloudSqlPostgreSqlLocators.connectionNameFormatError.isDisplayed());
+ }
+
+ @Then("Enter Connection Name with private instance type")
+ public void enterTheInvalidPrivate() throws InterruptedException, IOException {
+ CloudSqlPostgreSqlActions.clickPrivateInstance();
+ CloudSqlPostgreSqlActions.clickValidateButton();
+ }
+
+ @Then("Verify error is displayed for incorrect Connection Name with private instance type")
+ public void verifyTheCldMySqlInvalidPrivate() throws InterruptedException {
+ Assert.assertTrue(CloudSqlPostgreSqlLocators.connectionNameError.isDisplayed());
+ }
+
+ @Then("Enter the CloudSQLPostgreSQL Source Properties with blank property {string}")
+ public void enterTheCloudSQLPostgreSQLSourcePropertiesWithBlankProperty(String property) throws IOException,
+ InterruptedException {
+ if (property.equalsIgnoreCase("referenceName")) {
+ CloudSqlPostgreSqlActions.enterDatabaseName(E2ETestUtils.pluginProp("cloudPSQLDbName"));
+ CloudSqlPostgreSqlActions.enterConnectionName(System.getenv("Cloud_Psql_ConnectionName"));
+ CloudSqlPostgreSqlActions.enterImportQuery(E2ETestUtils.pluginProp("cloudPSQLImportQuery"));
+ } else if (property.equalsIgnoreCase("database")) {
+ CloudSqlPostgreSqlActions.enterReferenceName(E2ETestUtils.pluginProp("cloudPSQLReferenceNameValid"));
+ CloudSqlPostgreSqlActions.enterConnectionName(System.getenv("Cloud_Psql_ConnectionName"));
+ CloudSqlPostgreSqlActions.enterImportQuery(E2ETestUtils.pluginProp("cloudPSQLImportQuery"));
+ } else if (property.equalsIgnoreCase("connectionName")) {
+ CloudSqlPostgreSqlActions.enterReferenceName(E2ETestUtils.pluginProp("cloudPSQLReferenceNameValid"));
+ CloudSqlPostgreSqlActions.enterDatabaseName(E2ETestUtils.pluginProp("cloudPSQLDbName"));
+ CloudSqlPostgreSqlActions.enterImportQuery(E2ETestUtils.pluginProp("cloudPSQLImportQuery"));
+ } else if (property.equalsIgnoreCase("importQuery")) {
+ CloudSqlPostgreSqlActions.enterReferenceName(E2ETestUtils.pluginProp("cloudPSQLReferenceNameValid"));
+ CloudSqlPostgreSqlActions.enterDatabaseName(E2ETestUtils.pluginProp("cloudPSQLDbName"));
+ CloudSqlPostgreSqlActions.enterConnectionName(System.getenv("Cloud_Psql_ConnectionName"));
+ } else if (property.equalsIgnoreCase("jdbcPluginName")) {
+ CloudSqlPostgreSqlActions.enterReferenceName(E2ETestUtils.pluginProp("cloudPSQLReferenceNameValid"));
+ SeleniumHelper.replaceElementValue(CloudSqlPostgreSqlLocators.driverName, "");
+ CloudSqlPostgreSqlActions.enterDatabaseName(E2ETestUtils.pluginProp("cloudPSQLDbName"));
+ CloudSqlPostgreSqlActions.enterConnectionName(System.getenv("Cloud_Psql_ConnectionName"));
+ CloudSqlPostgreSqlActions.enterImportQuery(E2ETestUtils.pluginProp("cloudPSQLImportQuery"));
+ } else {
+ Assert.fail("Invalid cloudSqlPsql Mandatory Field : " + property);
+ }
+ }
+
+ @Then("Enter the CloudSQLPostgreSQL Sink Properties with blank property {string}")
+ public void enterTheCloudSQLPostgreSQLSinkPropertiesWithBlankProperty(String property) throws IOException,
+ InterruptedException {
+ if (property.equalsIgnoreCase("referenceName")) {
+ CloudSqlPostgreSqlActions.enterDatabaseName(E2ETestUtils.pluginProp("cloudPSQLDbName"));
+ CloudSqlPostgreSqlActions.enterConnectionName(System.getenv("Cloud_Psql_ConnectionName"));
+ CloudSqlPostgreSqlActions.enterTableName(E2ETestUtils.pluginProp("cloudPSQLTableName"));
+ } else if (property.equalsIgnoreCase("database")) {
+ CloudSqlPostgreSqlActions.enterReferenceName(E2ETestUtils.pluginProp("cloudPSQLReferenceNameValid"));
+ CloudSqlPostgreSqlActions.enterConnectionName(System.getenv("Cloud_Psql_ConnectionName"));
+ CloudSqlPostgreSqlActions.enterTableName(E2ETestUtils.pluginProp("cloudPSQLTableName"));
+ } else if (property.equalsIgnoreCase("connectionName")) {
+ CloudSqlPostgreSqlActions.enterReferenceName(E2ETestUtils.pluginProp("cloudPSQLReferenceNameValid"));
+ CloudSqlPostgreSqlActions.enterDatabaseName(E2ETestUtils.pluginProp("cloudPSQLDbName"));
+ CloudSqlPostgreSqlActions.enterTableName(E2ETestUtils.pluginProp("cloudPSQLTableName"));
+ } else if (property.equalsIgnoreCase("tableName")) {
+ CloudSqlPostgreSqlActions.enterReferenceName(E2ETestUtils.pluginProp("cloudPSQLReferenceNameValid"));
+ CloudSqlPostgreSqlActions.enterDatabaseName(E2ETestUtils.pluginProp("cloudPSQLDbName"));
+ CloudSqlPostgreSqlActions.enterConnectionName(System.getenv("Cloud_Psql_ConnectionName"));
+ } else if (property.equalsIgnoreCase("jdbcPluginName")) {
+ CloudSqlPostgreSqlActions.enterReferenceName(E2ETestUtils.pluginProp("cloudPSQLReferenceNameValid"));
+ SeleniumHelper.replaceElementValue(CloudSqlPostgreSqlLocators.driverName, "");
+ CloudSqlPostgreSqlActions.enterDatabaseName(E2ETestUtils.pluginProp("cloudPSQLDbName"));
+ CloudSqlPostgreSqlActions.enterConnectionName(System.getenv("Cloud_Psql_ConnectionName"));
+ CloudSqlPostgreSqlActions.enterTableName(E2ETestUtils.pluginProp("cloudPSQLTableName"));
+ } else {
+ Assert.fail("Invalid cLoudSqlPsql Mandatory Field : " + property);
+ }
+ }
+
+ @Then("Validate mandatory property error for {string}")
+ public void validateMandatoryPropertyErrorFor(String property) {
+ CdfStudioActions.clickValidateButton();
+ SeleniumHelper.waitElementIsVisible(CdfStudioLocators.validateButton, 5L);
+ E2ETestUtils.validateMandatoryPropertyError(property);
+ }
+
+ @Then("Enter Reference Name & Database Name with valid test data")
+ public void enterReferenceNameAndDatabaseNameWithValidTestData() throws InterruptedException, IOException {
+ CloudSqlPostgreSqlActions.enterReferenceName(E2ETestUtils.pluginProp("cloudPSQLReferenceNameValid"));
+ CloudSqlPostgreSqlActions.enterDatabaseName(E2ETestUtils.pluginProp("cloudPSQLDbName"));
+ }
+
+ @Then("Enter Table Name {string} and Connection Name")
+ public void enterTableNameInTableField(String tableName) throws IOException {
+ CloudSqlPostgreSqlActions.enterTableName(E2ETestUtils.pluginProp(tableName));
+ CloudSqlPostgreSqlActions.clickPrivateInstance();
+ CloudSqlPostgreSqlActions.enterConnectionName(System.getenv("Cloud_Psql_ConnectionName"));
+ }
+
+ @Then("Enter Driver Name with Invalid value")
+ public void enterDriverNameDefaultValue() throws IOException {
+ CloudSqlPostgreSqlActions.enterDriverName(E2ETestUtils.pluginProp("cloudPSQLDriverNameInvalid"));
+ CloudSqlPostgreSqlActions.clickValidateButton();
+ }
+
+ @Then("Verify Driver Name field with Invalid value entered")
+ public void verifyDriverNameFieldWithInvalidValueEntered() {
+ Assert.assertTrue(CloudSqlPostgreSqlLocators.driverNameError.isDisplayed());
+ }
+
+ @Then("Close the CloudSQLPostGreSQL Properties")
+ public void closeTheCloudSQLPostGreSQLProperties() {
+ CloudSqlPostgreSqlActions.closeButton();
+ }
+
+ @Then("Enter Connection Name and Import Query {string}")
+ public void enterConnectionImportField(String query) throws IOException, InterruptedException {
+ CloudSqlPostgreSqlActions.clickPrivateInstance();
+ CloudSqlPostgreSqlActions.enterConnectionName(System.getenv("Cloud_Psql_ConnectionName"));
+ CloudSqlPostgreSqlActions.enterImportQuery(E2ETestUtils.pluginProp(query));
+ }
+
+ @Then("Enter Reference Name & Connection Name with incorrect values and import query {string}")
+ public void enterReferenceNameConnectionNameWithIncorrectValuesAndImportQuery(String query)
+ throws IOException, InterruptedException {
+ CloudSqlPostgreSqlActions.clickCloudSqlPostgreSqlProperties();
+ CloudSqlPostgreSqlActions.enterReferenceName(E2ETestUtils.pluginProp("cloudPSQLReferenceNameInvalid"));
+ CloudSqlPostgreSqlActions.enterDatabaseName(E2ETestUtils.pluginProp("cloudPSQLDbName"));
+ CloudSqlPostgreSqlActions.enterConnectionName(E2ETestUtils.pluginProp("cloudPSQLConnectionNameInvalid"));
+ CloudSqlPostgreSqlActions.enterImportQuery(E2ETestUtils.pluginProp(query));
+ SeleniumHelper.waitAndClick(CloudSqlPostgreSqlLocators.validateButton, 50);
+ }
+
+ @Then("Enter Reference Name and Public Connection Name with incorrect values and table {string}")
+ public void enterReferenceNameAndPublicConnectionNameWithIncorrectValuesAndTable(String tableName) {
+ CloudSqlPostgreSqlActions.clickCloudSqlPostgreSqlProperties();
+ CloudSqlPostgreSqlActions.enterReferenceName(E2ETestUtils.pluginProp("cloudPSQLReferenceNameInvalid"));
+ CloudSqlPostgreSqlActions.enterDatabaseName(E2ETestUtils.pluginProp("cloudPSQLDbName"));
+ CloudSqlPostgreSqlActions.enterTableName(E2ETestUtils.pluginProp(tableName));
+ CloudSqlPostgreSqlActions.enterConnectionName(E2ETestUtils.pluginProp("cloudPSQLConnectionNameInvalid"));
+ SeleniumHelper.waitAndClick(CloudSqlPostgreSqlLocators.validateButton, 50);
+ }
+ @Then("Enter Reference Name and private Connection Name with incorrect values and import query {string}")
+ public void enterReferenceNameAndPrivateConnectionNameWithIncorrectValuesAndImportQuery(String query)
+ throws IOException, InterruptedException {
+ CloudSqlPostgreSqlActions.clickCloudSqlPostgreSqlProperties();
+ CloudSqlPostgreSqlActions.enterReferenceName(E2ETestUtils.pluginProp("cloudPSQLReferenceNameInvalid"));
+ CloudSqlPostgreSqlActions.enterDatabaseName(E2ETestUtils.pluginProp("cloudPSQLDbName"));
+ CloudSqlPostgreSqlActions.clickPrivateInstance();
+ CloudSqlPostgreSqlActions.enterConnectionName(E2ETestUtils.pluginProp("cloudPSQLConnectionNameInvalid"));
+ CloudSqlPostgreSqlActions.enterImportQuery(E2ETestUtils.pluginProp(query));
+ SeleniumHelper.waitAndClick(CloudSqlPostgreSqlLocators.validateButton, 50);
+ }
+
+ @Then("Enter Reference Name and Private Connection Name with incorrect values and table {string}")
+ public void enterReferenceNameAndPrivateConnectionNameWithIncorrectValuesAndTable(String tableName) {
+ CloudSqlPostgreSqlActions.clickCloudSqlPostgreSqlProperties();
+ CloudSqlPostgreSqlActions.enterReferenceName(E2ETestUtils.pluginProp("cloudPSQLReferenceNameInvalid"));
+ CloudSqlPostgreSqlActions.enterDatabaseName(E2ETestUtils.pluginProp("cloudPSQLDbName"));
+ CloudSqlPostgreSqlActions.enterTableName(E2ETestUtils.pluginProp(tableName));
+ CloudSqlPostgreSqlActions.clickPrivateInstance();
+ CloudSqlPostgreSqlActions.enterConnectionName(E2ETestUtils.pluginProp("cloudPSQLConnectionNameInvalid"));
+ SeleniumHelper.waitAndClick(CloudSqlPostgreSqlLocators.validateButton, 50);
+ }
+
+ @Then("Open cloudSQLPostgreSQL Properties")
+ public void openCloudSQLPostgreSQLProperties() {
+ CloudSqlPostgreSqlActions.clickCloudSqlPostgreSqlProperties();
+ }
+
+ @Then("Enter the cloudSQLPostgreSQL properties for database {string} using query {string}")
+ public void enterTheCloudSQLPostgreSQLPropertiesForDatabaseUsingQueryToGetAllValues
+ (String database, String importQuery) throws IOException, InterruptedException {
+ enterTheCloudSQLPostgreSQLPropertiesForDatabase(database);
+ CloudSqlPostgreSqlActions.enterImportQuery(E2ETestUtils.pluginProp(importQuery));
+ }
+
+ public void enterTheCloudSQLPostgreSQLPropertiesForDatabase(String database) throws IOException {
+ CloudSqlPostgreSqlActions.enterReferenceName("cloudSQLPostgreSQL" + UUID.randomUUID().toString());
+ CloudSqlPostgreSqlActions.enterDatabaseName(E2ETestUtils.pluginProp(database));
+ CloudSqlPostgreSqlActions.clickPrivateInstance();
+ CloudSqlPostgreSqlActions.clickPrivateInstance();
+ CloudSqlPostgreSqlActions.enterUserName(System.getenv("Cloud_Psql_User_Name"));
+ CloudSqlPostgreSqlActions.enterPassword(System.getenv("Cloud_Psql_Password"));
+ CloudSqlPostgreSqlActions.enterConnectionName(System.getenv("Cloud_Psql_ConnectionName"));
+ }
+
+ @Then("Capture output schema")
+ public void captureOutputSchema() {
+ CloudSqlPostgreSqlActions.getSchema();
+ SeleniumHelper.waitElementIsVisible(CloudSqlPostgreSqlLocators.getSchemaLoadComplete, 10L);
+ SeleniumHelper.waitElementIsVisible(CloudSqlPostgreSqlLocators.outputSchemaColumnNames.get(0), 2L);
+ int index = 0;
+ for (WebElement element : CloudSqlPostgreSqlLocators.outputSchemaColumnNames) {
+ propertiesSchemaColumnList.add(element.getAttribute("value"));
+ sourcePropertiesOutputSchema.put(element.getAttribute("value"),
+ CloudSqlPostgreSqlLocators.outputSchemaDataTypes.get(index).getAttribute("title"));
+ index++;
+ }
+ Assert.assertTrue(propertiesSchemaColumnList.size() >= 1);
+ }
+
+ @Then("Validate cloudSQLPostgreSQL properties")
+ public void validateCloudSQLPostgreSQLProperties() {
+ CloudSqlPostgreSqlActions.clickValidateButton();
+ SeleniumHelper.waitElementIsVisible(CloudSqlPostgreSqlLocators.validateButton);
+ String expectedErrorMessage = E2ETestUtils.errorProp(E2ETestConstants.ERROR_MSG_VALIDATION);
+ String actualErrorMessage = CdfStudioLocators.pluginValidationSuccessMsg.getText();
+ Assert.assertEquals(expectedErrorMessage, actualErrorMessage);
+ }
+
+ @Then("Close the cloudSQLPostgreSQL properties")
+ public void closeTheCloudSQLPostgreSQLProperties() {
+ CloudSqlPostgreSqlActions.closeButton();
+ }
+
+ @Then("Open BigQuery Target Properties")
+ public void openBigQueryTargetProperties() {
+ CdfStudioActions.clickProperties("BigQuery");
+ }
+
+ @Then("Enter the BigQuery Target Properties for table {string}")
+ public void enterTheBigQueryTargetPropertiesForTable(String tableName) throws IOException {
+ CdfBigQueryPropertiesActions.enterProjectId(E2ETestUtils.pluginProp("projectId"));
+ CdfBigQueryPropertiesActions.enterDatasetProjectId(E2ETestUtils.pluginProp("projectId"));
+ CdfBigQueryPropertiesActions.enterBigQueryReferenceName("BQ_Ref_" + UUID.randomUUID().toString());
+ CdfBigQueryPropertiesActions.enterBigQueryDataset(E2ETestUtils.pluginProp("dataset"));
+ CdfBigQueryPropertiesActions.enterBigQueryTable(E2ETestUtils.pluginProp(tableName));
+ CdfBigQueryPropertiesActions.clickUpdateTable();
+ CdfBigQueryPropertiesActions.clickTruncatableSwitch();
+ }
+
+ @Then("Validate Bigquery properties")
+ public void validateBigqueryProperties() {
+ CdfGcsActions.clickValidateButton();
+ String expectedErrorMessage = E2ETestUtils.errorProp(E2ETestConstants.ERROR_MSG_VALIDATION);
+ String actualErrorMessage = CdfStudioLocators.pluginValidationSuccessMsg.getText();
+ Assert.assertEquals(expectedErrorMessage, actualErrorMessage);
+ }
+
+ @Then("Close the BigQuery properties")
+ public void closeTheBigQueryProperties() {
+ CdfStudioActions.clickCloseButton();
+ }
+
+ @Then("Connect Source as {string} and sink as {string} to establish connection")
+ public void connectSourceAsAndSinkAsToEstablishConnection(String source, String sink) {
+ CdfStudioActions.connectSourceAndSink(source, sink);
+ }
+
+ @Then("Add pipeline name")
+ public void addPipelineName() {
+ CdfStudioActions.pipelineName();
+ CdfStudioActions.pipelineNameIp("cloudSQLPostgreSQL_BQ" + UUID.randomUUID().toString());
+ CdfStudioActions.pipelineSave();
+ SeleniumHelper.waitElementIsVisible(CdfStudioLocators.statusBanner);
+ WebDriverWait wait = new WebDriverWait(SeleniumDriver.getDriver(), 5);
+ wait.until(ExpectedConditions.invisibilityOf(CdfStudioLocators.statusBanner));
+ }
+
+ @Then("Preview and run the pipeline")
+ public void previewAndRunThePipeline() {
+ SeleniumHelper.waitAndClick(CdfStudioLocators.preview, 5L);
+ CdfStudioLocators.runButton.click();
+ }
+
+ @Then("Verify the preview of pipeline is {string}")
+ public void verifyThePreviewOfPipelineIs(String previewStatus) {
+ WebDriverWait wait = new WebDriverWait(SeleniumDriver.getDriver(), 180);
+ wait.until(ExpectedConditions.visibilityOf(CdfStudioLocators.statusBanner));
+ Assert.assertTrue(CdfStudioLocators.statusBannerText.getText().contains(previewStatus));
+ if (!previewStatus.equalsIgnoreCase("failed")) {
+ wait.until(ExpectedConditions.invisibilityOf(CdfStudioLocators.statusBanner));
+ }
+ }
+
+ @Then("Click on PreviewData for cloudSQLPostgreSQL")
+ public void clickOnPreviewDataForCloudSQLPostgreSQL() {
+ CloudSqlPostgreSqlActions.clickPreviewData();
+ }
+
+ @Then("Close the Preview and deploy the pipeline")
+ public void closeThePreviewAndDeployThePipeline() {
+ SeleniumHelper.waitAndClick(CdfStudioLocators.closeButton, 5L);
+ CdfStudioActions.previewSelect();
+ SeleniumHelper.waitElementIsVisible(CdfStudioLocators.pipelineDeploy, 2);
+ CdfStudioActions.pipelineDeploy();
+ }
+
+ @Then("Open the Logs and capture raw logs")
+ public void openTheLogsAndCaptureRawLogs() {
+ CdfPipelineRunAction.logsClick();
+ }
+
+ @Then("Validate records out from cloudSQLPostgreSQL is equal to records transferred in " +
+ "BigQuery {string} output records")
+ public void validateRecordsOutFromCloudSQLPostgreSQLIsEqualToRecordsTransferredInBigQueryOutputRecords
+ (String tableName) throws IOException, InterruptedException {
+ int countRecords;
+ countRecords = GcpClient.countBqQuery(E2ETestUtils.pluginProp(tableName));
+ Assert.assertEquals(countRecords, recordOut());
+ }
+
+ @Then("Run the Pipeline in Runtime")
+ public void runThePipelineInRuntime() throws InterruptedException {
+ CdfPipelineRunAction.runClick();
+ }
+
+ @Then("Wait till pipeline is in running state")
+ public void waitTillPipelineIsInRunningState() throws InterruptedException {
+ Boolean bool = true;
+ WebDriverWait wait = new WebDriverWait(SeleniumDriver.getDriver(), 200);
+ wait.until(ExpectedConditions.or
+ (ExpectedConditions.visibilityOfElementLocated(By.xpath("//*[@data-cy='Succeeded']")),
+ ExpectedConditions.visibilityOfElementLocated(By.xpath("//*[@data-cy='Failed']"))));
+ }
+
+ @Then("Verify the pipeline status is {string}")
+ public void verifyThePipelineStatusIs(String status) {
+ boolean webelement = false;
+ webelement = SeleniumHelper.verifyElementPresent("//*[@data-cy='" + status + "']");
+ Assert.assertTrue(webelement);
+ }
+
+ @Then("Get Count of no of records transferred to BigQuery in {string}")
+ public void getCountOfNoOfRecordsTransferredToBigQueryIn(String table) throws IOException, InterruptedException {
+ int countRecords;
+ countRecords = GcpClient.countBqQuery(E2ETestUtils.pluginProp(table));
+ BeforeActions.scenario.write("**********No of Records Transferred******************:" + countRecords);
+ Assert.assertEquals(countRecords, recordOut());
+ }
+
+ @Then("Enter the cloudSQLPostgreSQL properties for database {string} using query {string} for {string}")
+ public void enterTheCloudSQLPostgreSQLPropertiesForDatabaseUsingQueryForNull
+ (String database, String importQuery, String splitColumnValue) throws IOException, InterruptedException {
+ enterTheCloudSQLPostgreSQLPropertiesForDatabase(database);
+ CloudSqlPostgreSqlActions.enterImportQuery(E2ETestUtils.pluginProp(importQuery));
+ CloudSqlPostgreSqlActions.enterSplitColumn(E2ETestUtils.pluginProp(splitColumnValue));
+ }
+
+ @Then("Enter the cloudSQLPostgreSQL properties for database {string} using " +
+ "query {string} for between values {string}")
+ public void enterTheCloudSQLPostgreSQLPropertiesForDatabaseUsingQueryForBetweenValues
+ (String database, String importQuery, String cloudPostgresSQLSplitColumnBetweenValue)
+ throws IOException, InterruptedException {
+ enterTheCloudSQLPostgreSQLPropertiesForDatabase(database);
+ CloudSqlPostgreSqlActions.enterImportQuery(E2ETestUtils.pluginProp(importQuery));
+ CloudSqlPostgreSqlActions.enterSplitColumn(E2ETestUtils.pluginProp(cloudPostgresSQLSplitColumnBetweenValue));
+ }
+
+ @Then("Enter the cloudSQLPostgreSQL properties for database {string} using query {string} for max and min {string}")
+ public void enterTheCloudSQLPostgreSQLPropertiesForDatabaseUsingQueryForMaxAndMin
+ (String database, String importQuery, String splitColumnField) throws IOException, InterruptedException {
+ enterTheCloudSQLPostgreSQLPropertiesForDatabase(database);
+ CloudSqlPostgreSqlActions.enterImportQuery(E2ETestUtils.pluginProp(importQuery));
+ CloudSqlPostgreSqlActions.enterSplitColumn(E2ETestUtils.pluginProp(splitColumnField));
+ }
+
+ @Then("Enter the cloudSQLPostgreSQL properties for database {string} using query {string} " +
+ "for duplicate values {string}")
+ public void enterTheCloudSQLPostgreSQLPropertiesForDatabaseUsingQueryForDuplicateValues
+ (String database, String importQuery, String splitColumnField) throws IOException, InterruptedException {
+ enterTheCloudSQLPostgreSQLPropertiesForDatabase(database);
+ CloudSqlPostgreSqlActions.enterImportQuery(E2ETestUtils.pluginProp(importQuery));
+ CloudSqlPostgreSqlActions.enterSplitColumn(E2ETestUtils.pluginProp(splitColumnField));
+ }
+
+ @Then("Enter the cloudSQLPostgreSQL properties for database {string} using query {string} for max values {string}")
+ public void enterTheCloudSQLPostgreSQLPropertiesForDatabaseUsingQueryForMaxValues
+ (String database, String importQuery, String splitColumnField) throws IOException, InterruptedException {
+ enterTheCloudSQLPostgreSQLPropertiesForDatabase(database);
+ CloudSqlPostgreSqlActions.enterImportQuery(E2ETestUtils.pluginProp(importQuery));
+ CloudSqlPostgreSqlActions.enterSplitColumn(E2ETestUtils.pluginProp(splitColumnField));
+ }
+
+ @Then("Enter the cloudSQLPostgreSQL properties for database {string} using query {string} for min values {string}")
+ public void enterTheCloudSQLPostgreSQLPropertiesForDatabaseUsingQueryForMinValues
+ (String database, String importQuery, String splitColumnField) throws IOException, InterruptedException {
+ enterTheCloudSQLPostgreSQLPropertiesForDatabase(database);
+ CloudSqlPostgreSqlActions.enterImportQuery(E2ETestUtils.pluginProp(importQuery));
+ CloudSqlPostgreSqlActions.enterSplitColumn(E2ETestUtils.pluginProp(splitColumnField));
+ }
+
+ @Then("Enter the cloudSQLPostgreSQL properties for database {string} " +
+ "using query {string} for distinct values {string}")
+ public void enterTheCloudSQLPostgreSQLPropertiesForDatabaseUsingQueryForDistinctValues
+ (String database, String importQuery, String splitColumnField) throws IOException, InterruptedException {
+ enterTheCloudSQLPostgreSQLPropertiesForDatabase(database);
+ CloudSqlPostgreSqlActions.enterImportQuery(E2ETestUtils.pluginProp(importQuery));
+ CloudSqlPostgreSqlActions.enterSplitColumn(E2ETestUtils.pluginProp(splitColumnField));
+ }
+
+ @Then("Enter the cloudSQLPostgreSQL properties for database {string} using different join queries {string}")
+ public void enterTheCloudSQLPostgreSQLPropertiesForDatabaseUsingDifferentJoinQueries
+ (String database, String importQuery) throws IOException, InterruptedException {
+ enterTheCloudSQLPostgreSQLPropertiesForDatabase(database);
+ CloudSqlPostgreSqlActions.enterImportQuery(E2ETestUtils.pluginProp(importQuery));
+ }
+
+ @When("Sink is GCS")
+ public void sinkIsGCS() {
+ CdfStudioActions.sinkGcs();
+ }
+
+ @Then("Validate OUT record count is equal to IN record count")
+ public void validateOUTRecordCountIsEqualToINRecordCount() {
+ Assert.assertEquals(recordOut(), recordIn());
+ }
+
+ @Then("Enter the GCS Properties")
+ public void enterTheGCSProperties() throws IOException, InterruptedException {
+ CdfGcsActions.gcsProperties();
+ CdfGcsActions.enterReferenceName();
+ CdfGcsActions.enterProjectId();
+ CdfGcsActions.getGcsBucket(E2ETestUtils.pluginProp("cloudPSQLGcsBucketName"));
+ CdfGcsActions.selectFormat("json");
+ CdfGcsActions.clickValidateButton();
+ }
+
+ @Then("Close the GCS Properties")
+ public void closeTheGCSProperties() {
+ CdfGcsActions.closeButton();
+ }
+
+ @Then("Verify Preview output schema matches the outputSchema captured in properties")
+ public void verifyPreviewOutputSchemaMatchesTheOutputSchemaCapturedInProperties() {
+ List previewSchemaColumnList = new ArrayList<>();
+ for (WebElement element : CloudSqlPostgreSqlLocators.previewInputRecordColumnNames) {
+ previewSchemaColumnList.add(element.getAttribute("title"));
+ }
+ Assert.assertTrue(previewSchemaColumnList.equals(propertiesSchemaColumnList));
+ CloudSqlPostgreSqlActions.clickPreviewPropertiesTab();
+ Map previewSinkInputSchema = new HashMap<>();
+ int index = 0;
+ for (WebElement element : CloudSqlPostgreSqlLocators.inputSchemaColumnNames) {
+ previewSinkInputSchema.put(element.getAttribute("value"),
+ CloudSqlPostgreSqlLocators.inputSchemaDataTypes.get(index).getAttribute("title"));
+ index++;
+ }
+ Assert.assertTrue(previewSinkInputSchema.equals(sourcePropertiesOutputSchema));
+ }
+
+ @Then("Enter the cloudSQLPostgreSQL properties for database {string} using query " +
+ "{string} for max values {string} with bounding query {string} and {string}")
+ public void enterTheCloudSQLPostgreSQLPropertiesForDatabaseUsingQueryForMaxValuesWithBoundingQueryAnd
+ (String database, String importQuery, String splitColumnField, String boundingQuery, String splitValue)
+ throws IOException, InterruptedException {
+ enterTheCloudSQLPostgreSQLPropertiesForDatabase(database);
+ CloudSqlPostgreSqlActions.enterImportQuery(E2ETestUtils.pluginProp(importQuery));
+ CloudSqlPostgreSqlActions.enterSplitColumn(E2ETestUtils.pluginProp(splitColumnField));
+ CloudSqlPostgreSqlActions.enterBoundingQuery(E2ETestUtils.pluginProp(boundingQuery));
+ CloudSqlPostgreSqlActions.replaceSplitValue(E2ETestUtils.pluginProp(splitValue));
+ }
+
+ @Then("Enter Driver Name with Invalid value for Driver name field {string}")
+ public void enterDriverNameWithInvalidValueForDriverNameField(String driverName) {
+ CloudSqlPostgreSqlActions.enterDefaultDriver(E2ETestUtils.pluginProp(driverName));
+ }
+
+ @Then("Verify invalid Driver name error message is displayed for Driver {string}")
+ public void verifyInvalidDriverNameErrorMessageIsDisplayedForDriver(String driverName) {
+ CloudSqlPostgreSqlActions.clickValidateButton();
+ SeleniumHelper.waitElementIsVisible(CloudSqlPostgreSqlLocators.validateButton);
+ String expectedErrorMessage = E2ETestUtils.errorProp(E2ETestConstants.ERROR_MSG_INVALID_DRIVER_NAME)
+ .replaceAll("DRIVER_NAME", E2ETestUtils.pluginProp(driverName));
+ String actualErrorMessage = E2ETestUtils.findPropertyErrorElement("jdbcPluginName").getText();
+ Assert.assertEquals(expectedErrorMessage, actualErrorMessage);
+ String actualColor = E2ETestUtils.getErrorColor(E2ETestUtils.findPropertyErrorElement("jdbcPluginName"));
+ String expectedColor = E2ETestUtils.errorProp(E2ETestConstants.ERROR_MSG_COLOR);
+ Assert.assertEquals(expectedColor, actualColor);
+ }
+
+ @Then("Verify plugin validation fails with error")
+ public void verifyPluginValidationFailsWithError() {
+ CdfStudioActions.clickValidateButton();
+ SeleniumHelper.waitElementIsVisible(CdfStudioLocators.pluginValidationErrorMsg, 10L);
+ String expectedErrorMessage = E2ETestUtils.errorProp(E2ETestConstants.ERROR_MSG_ERROR_FOUND_VALIDATION);
+ String actualErrorMessage = CdfStudioLocators.pluginValidationErrorMsg.getText();
+ Assert.assertEquals(expectedErrorMessage, actualErrorMessage);
+ }
+
+ @Then("Click on PreviewData for BigQuery")
+ public void clickOnPreviewDataForBigQuery() {
+ CdfBigQueryPropertiesActions.clickPreviewData();
+ }
+
+ @Then("Click on PreviewData for GCS")
+ public void clickOnPreviewDataForGCS() {
+ CloudSqlPostgreSqlActions.clickPluginPreviewData("GCS");
+ }
+}
+
diff --git a/e2e-test/src/test/java/io/cdap/plugin/cloudsqlpostgresql/stepsdesign/package-info.java b/e2e-test/src/test/java/io/cdap/plugin/cloudsqlpostgresql/stepsdesign/package-info.java
new file mode 100755
index 000000000..d19ed0b1f
--- /dev/null
+++ b/e2e-test/src/test/java/io/cdap/plugin/cloudsqlpostgresql/stepsdesign/package-info.java
@@ -0,0 +1,19 @@
+/*
+ * Copyright © 2021 Cask Data, Inc.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not
+ * use this file except in compliance with the License. You may obtain a copy of
+ * the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+/**
+ * Package contains stepsDesign for the cloudSqlPostgreSql stepsDesign.
+ */
+package io.cdap.plugin.cloudsqlpostgresql.stepsdesign;
diff --git a/e2e-test/src/test/java/io/cdap/plugin/cloudsqlpostgresql/tests/runner/TestRunner.java b/e2e-test/src/test/java/io/cdap/plugin/cloudsqlpostgresql/tests/runner/TestRunner.java
new file mode 100755
index 000000000..e66f25b6b
--- /dev/null
+++ b/e2e-test/src/test/java/io/cdap/plugin/cloudsqlpostgresql/tests/runner/TestRunner.java
@@ -0,0 +1,35 @@
+/*
+ * Copyright © 2021 Cask Data, Inc.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not
+ * use this file except in compliance with the License. You may obtain a copy of
+ * the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+package io.cdap.plugin.cloudsqlpostgresql.tests.runner;
+import io.cucumber.junit.Cucumber;
+import io.cucumber.junit.CucumberOptions;
+import org.junit.runner.RunWith;
+
+
+/**
+ * Test Runner to execute cases.
+ */
+@RunWith(Cucumber.class)
+@CucumberOptions(
+ features = {"src/test/features"},
+ glue = {"io.cdap.plugin.cloudsqlpostgresql.stepsdesign", "stepsdesign"},
+ tags = {"@cloudSQLPostgreSQL-11"},
+ monochrome = true,
+ plugin = {"pretty", "html:target/cucumber-html-report", "json:target/cucumber-reports/cucumber.json",
+ "junit:target/cucumber-reports/cucumber.xml"}
+)
+public class TestRunner {
+}
diff --git a/e2e-test/src/test/java/io/cdap/plugin/cloudsqlpostgresql/tests/runner/package-info.java b/e2e-test/src/test/java/io/cdap/plugin/cloudsqlpostgresql/tests/runner/package-info.java
new file mode 100755
index 000000000..bef50314d
--- /dev/null
+++ b/e2e-test/src/test/java/io/cdap/plugin/cloudsqlpostgresql/tests/runner/package-info.java
@@ -0,0 +1,19 @@
+/*
+ * Copyright © 2021 Cask Data, Inc.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not
+ * use this file except in compliance with the License. You may obtain a copy of
+ * the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+/**
+ * Package contains actions for the cloudSqlPostgreSql runner.
+ */
+package io.cdap.plugin.cloudsqlpostgresql.tests.runner;
diff --git a/e2e-test/src/test/java/io/cdap/plugin/utils/E2ETestConstants.java b/e2e-test/src/test/java/io/cdap/plugin/utils/E2ETestConstants.java
new file mode 100644
index 000000000..bb2371ef6
--- /dev/null
+++ b/e2e-test/src/test/java/io/cdap/plugin/utils/E2ETestConstants.java
@@ -0,0 +1,27 @@
+/*
+ * Copyright © 2021 Cask Data, Inc.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not
+ * use this file except in compliance with the License. You may obtain a copy of
+ * the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+package io.cdap.plugin.utils;
+
+/**
+ * E2E test constants.
+ */
+public class E2ETestConstants {
+ public static final String ERROR_MSG_COLOR = "errorMessageColor";
+ public static final String ERROR_MSG_MANDATORY = "errorMessageMandatory";
+ public static final String ERROR_MSG_VALIDATION = "errorMessageValidation";
+ public static final String ERROR_MSG_ERROR_FOUND_VALIDATION = "errorMessageErrorFoundValidation";
+ public static final String ERROR_MSG_INVALID_DRIVER_NAME = "errorMessageDriverName";
+}
diff --git a/e2e-test/src/test/java/io/cdap/plugin/utils/E2ETestUtils.java b/e2e-test/src/test/java/io/cdap/plugin/utils/E2ETestUtils.java
new file mode 100644
index 000000000..7072cd543
--- /dev/null
+++ b/e2e-test/src/test/java/io/cdap/plugin/utils/E2ETestUtils.java
@@ -0,0 +1,83 @@
+/*
+ * Copyright © 2021 Cask Data, Inc.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not
+ * use this file except in compliance with the License. You may obtain a copy of
+ * the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+package io.cdap.plugin.utils;
+
+import io.cdap.e2e.utils.ConstantsUtil;
+import io.cdap.e2e.utils.SeleniumDriver;
+import org.apache.log4j.Logger;
+import org.junit.Assert;
+import org.openqa.selenium.By;
+import org.openqa.selenium.WebElement;
+
+import java.io.IOException;
+import java.util.Properties;
+
+import static io.cdap.plugin.utils.E2ETestConstants.ERROR_MSG_COLOR;
+import static io.cdap.plugin.utils.E2ETestConstants.ERROR_MSG_MANDATORY;
+
+/**
+ * E2ETestUtils contains the helper functions.
+ */
+public class E2ETestUtils {
+
+ private static final Properties pluginProperties = new Properties();
+ private static final Properties errorProperties = new Properties();
+ private static final Logger logger = Logger.getLogger(E2ETestUtils.class);
+
+ static {
+ try {
+ pluginProperties.load(E2ETestUtils.class.getResourceAsStream("/pluginParameters.properties"));
+ errorProperties.load(E2ETestUtils.class.getResourceAsStream("/errorMessage.properties"));
+ } catch (IOException e) {
+ logger.error("Error while reading properties file" + e);
+ }
+ }
+
+ public static String pluginProp(String property) {
+ return pluginProperties.getProperty(property);
+ }
+
+ public static String errorProp(String property) {
+ return errorProperties.getProperty(property);
+ }
+
+ public static void validateMandatoryPropertyError(String property) {
+ String expectedErrorMessage = errorProp(ERROR_MSG_MANDATORY)
+ .replaceAll("PROPERTY", property);
+ String actualErrorMessage = findPropertyErrorElement(property).getText();
+ Assert.assertEquals(expectedErrorMessage, actualErrorMessage);
+ String actualColor = E2ETestUtils.getErrorColor(E2ETestUtils.findPropertyErrorElement(property));
+ String expectedColor = E2ETestUtils.errorProp(ERROR_MSG_COLOR);
+ Assert.assertEquals(expectedColor, actualColor);
+ }
+
+ public static WebElement findPropertyErrorElement(String property) {
+ return SeleniumDriver.getDriver().findElement(
+ By.xpath("//*[@data-cy='" + property + "']/following-sibling::div[@data-cy='property-row-error']"));
+ }
+
+ public static String getErrorColor(WebElement element) {
+ String color = element.getCssValue(ConstantsUtil.COLOR);
+ String[] hexValue = color.replace("rgba(", "").
+ replace(")", "").split(",");
+ int hexValue1 = Integer.parseInt(hexValue[0]);
+ hexValue[1] = hexValue[1].trim();
+ int hexValue2 = Integer.parseInt(hexValue[1]);
+ hexValue[2] = hexValue[2].trim();
+ int hexValue3 = Integer.parseInt(hexValue[2]);
+ return String.format("#%02x%02x%02x", hexValue1, hexValue2, hexValue3);
+ }
+}
diff --git a/e2e-test/src/test/java/io/cdap/plugin/utils/package-info.java b/e2e-test/src/test/java/io/cdap/plugin/utils/package-info.java
new file mode 100644
index 000000000..aa77fdc52
--- /dev/null
+++ b/e2e-test/src/test/java/io/cdap/plugin/utils/package-info.java
@@ -0,0 +1,19 @@
+/*
+ * Copyright © 2021 Cask Data, Inc.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not
+ * use this file except in compliance with the License. You may obtain a copy of
+ * the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+/**
+ * Package contains the helper utils.
+ */
+package io.cdap.plugin.utils;
diff --git a/e2e-test/src/test/resources/errorMessage.properties b/e2e-test/src/test/resources/errorMessage.properties
new file mode 100644
index 000000000..abadc9bbc
--- /dev/null
+++ b/e2e-test/src/test/resources/errorMessage.properties
@@ -0,0 +1,6 @@
+errorMessageColor=#a40403
+errorMessageValidation=No errors found.
+errorMessageErrorFoundValidation=1 error found
+errorMessageMandatory=Required property 'PROPERTY' has no value.
+errorMessageDriverName=Unable to load JDBC Driver class for plugin name 'DRIVER_NAME'. Ensure that the \
+ plugin 'DRIVER_NAME' of type 'jdbc' containing the driver has been installed correctly.
diff --git a/e2e-test/src/test/resources/pluginParameters.properties b/e2e-test/src/test/resources/pluginParameters.properties
new file mode 100644
index 000000000..1bdd54c6e
--- /dev/null
+++ b/e2e-test/src/test/resources/pluginParameters.properties
@@ -0,0 +1,35 @@
+projectId=cdf-athena
+dataset=test_automation
+cloudPSQLImportQuery=select * from psql;
+cloudPSQLDbName=postgres
+cloudPSQLTableName=psql
+cloudPSQLReferenceNameValid=TestReference
+cloudPSQLDriverNameInvalid=a#b#c#d#
+cloudPSQLReferenceNameInvalid=#@#@#@#@#@
+cloudPSQLConnectionNameInvalid=10
+cloudPSQLDBImportQueryForAll=select* from postgre_datatype
+cloudPSQLDBBoundingQuery= Select MIN(dayofjoining),MAX(dayofjoining) from EmployeeDetails
+cloudPSQLDBImportQueryForBetween=select* from EmployeeDetails where empid between 621375 and 850279
+cloudPSQLSplitColumnBetweenValue=empid
+cloudPSQLDBImportQueryForIn=select* from EmployeeDetails where firstname in ('Reid','Madge')
+cloudPSQLSplitColumnInValue=firstname
+cloudPSQLDBImportQueryNotIn=select* from EmployeeDetails where firstname not in ('Reid','Madge')
+cloudPSQLSplitColumnNotInValue=firstname
+cloudPSQLDBImportQueryOrderBy=select* from EmployeeDetails order By lastname
+cloudPSQLSplitColumnOrderByValue=lastname
+cloudPSQLDBImportQueryDuplicate=select firstname,count(*) from EmployeeDetails Group By firstname Having count(*)>1;
+cloudPSQLSplitColumnDuplicateValue=firstname
+cloudPSQLDBImportQueryForMax=select max(dayofjoining) from EmployeeDetails
+cloudPSQLSplitColumnMaxValue=dayofjoining
+cloudPSQLDBImportQueryForMin=select min(dayofjoining) from EmployeeDetails
+cloudPSQLSplitColumnMinValue=dayofjoining
+cloudPSQLDBImportQueryDistinct=select distinct firstname,lastname from EmployeeDetails
+cloudPSQLSplitColumnDistinctValue=firstname
+cloudPSQLDBImportQueryInnerJoin=select first_name,last_name from customers inner join orders on customers.customer_id=orders.customer_id
+cloudPSQLDBImportQueryLeftJoin=select first_name,last_name from customers left join orders on customers.customer_id=orders.customer_id
+cloudPSQLDBImportQueryRightJoin=select first_name,last_name from customers right join orders on customers.customer_id=orders.customer_id
+cloudPSQLDBImportQueryOuterJoin=select first_name,last_name from customers full outer join orders on customers.customer_id=orders.customer_id
+cloudPSQLQuery=select *from EmployeeDetails where $CONDITIONS;
+cloudPsqlNoOfSplits=3
+cloudPsqlBigQuery=psql
+cloudPSQLGcsBucketName=cdf-athena/Employee_work/2021-09-03-21-00
diff --git a/pom.xml b/pom.xml
index f305ecea5..57f3033aa 100644
--- a/pom.xml
+++ b/pom.xml
@@ -469,6 +469,14 @@
**/org/apache/hadoop/**
**/resources/**
+ e2e-test/**/*.properties
+ e2e-test/**/*.feature
+ e2e-test/**/cucumber**/**
+ e2e-test/**/maven-status/**
+ e2e-test/**/failsafe-reports/**
+ e2e-test/**/e2e-debug/**
+ e2e-test/**/*-result.xml
+ e2e-test/e2e-test.iml