forked from data-integrations/google-cloud
-
Notifications
You must be signed in to change notification settings - Fork 2
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
- Loading branch information
Showing
14 changed files
with
468 additions
and
2 deletions.
There are no files selected for viewing
189 changes: 189 additions & 0 deletions
189
src/e2e-test/features/bigquery/source/BigQuerySqlEngine.feature
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,189 @@ | ||
# Copyright © 2024 Cask Data, Inc. | ||
# | ||
# Licensed under the Apache License, Version 2.0 (the "License"); you may not | ||
# use this file except in compliance with the License. You may obtain a copy of | ||
# the License at | ||
# | ||
# http://www.apache.org/licenses/LICENSE-2.0 | ||
# | ||
# Unless required by applicable law or agreed to in writing, software | ||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT | ||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the | ||
# License for the specific language governing permissions and limitations under | ||
# the License. | ||
|
||
@BigQuery_Sink | ||
Feature: BigQuery sink - Verification of BigQuery to BigQuery successful data transfer | ||
|
||
@BQ_SOURCE_SQLENGINE_TEST @BQ_SINK_TEST @EXISTING_BQ_CONNECTION | ||
Scenario:Validate successful records transfer from BigQuery source to BigQuery sink | ||
Given Open Datafusion Project to configure pipeline | ||
When Expand Plugin group in the LHS plugins list: "Source" | ||
When Select plugin: "BigQuery" from the plugins list as: "Source" | ||
When Expand Plugin group in the LHS plugins list: "Sink" | ||
When Select plugin: "BigQuery" from the plugins list as: "Sink" | ||
Then Connect plugins: "BigQuery" and "BigQuery2" to establish connection | ||
Then Navigate to the properties page of plugin: "BigQuery" | ||
Then Click plugin property: "switch-useConnection" | ||
Then Click on the Browse Connections button | ||
Then Select connection: "bqConnectionName" | ||
Then Enter input plugin property: "referenceName" with value: "BQReferenceName" | ||
And Replace input plugin property: "dataset" with value: "dataset" | ||
And Replace input plugin property: "table" with value: "bqSourceTable" | ||
Then Click on the Get Schema button | ||
Then Validate "BigQuery" plugin properties | ||
And Close the Plugin Properties page | ||
Then Navigate to the properties page of plugin: "BigQuery2" | ||
Then Click plugin property: "useConnection" | ||
Then Click on the Browse Connections button | ||
Then Select connection: "bqConnectionName" | ||
Then Enter input plugin property: "referenceName" with value: "BQSinkReferenceName" | ||
Then Enter input plugin property: "dataset" with value: "dataset" | ||
Then Enter input plugin property: "table" with value: "bqTargetTable" | ||
Then Validate "BigQuery" plugin properties | ||
And Close the Plugin Properties page | ||
Then Save the pipeline | ||
Then Preview and run the pipeline | ||
Then Wait till pipeline preview is in running state | ||
Then Open and capture pipeline preview logs | ||
Then Verify the preview run status of pipeline in the logs is "succeeded" | ||
Then Close the pipeline logs | ||
Then Close the preview | ||
Then Deploy the pipeline | ||
Then Click on "Configure" button | ||
Then Click on "Transformation Pushdown" button | ||
Then Click on "Enable Transformation Pushdown" button | ||
Then Enter input plugin property: "dataset" with value: "test_sqlengine" | ||
Then Click on "Advanced" button | ||
Then Click plugin property: "useConnection" | ||
Then Click on the Browse Connections button | ||
Then Select connection: "bqConnectionName" | ||
Then Click on "Save" button | ||
Then Run the Pipeline in Runtime | ||
Then Wait till pipeline is in running state | ||
Then Open and capture logs | ||
Then Close the pipeline logs | ||
Then Verify the pipeline status is "Succeeded" | ||
Then Validate The Data From BQ To BQ With Actual And Expected File for: "bqExpectedFile" | ||
|
||
@BQ_SOURCE_SQLENGINE_TEST @BQ_SINK_TEST @EXISTING_BQ_CONNECTION | ||
Scenario:Validate successful records transfer from BigQuery source to BigQuery sink using group by | ||
Given Open Datafusion Project to configure pipeline | ||
When Expand Plugin group in the LHS plugins list: "Source" | ||
When Select plugin: "BigQuery" from the plugins list as: "Source" | ||
When Expand Plugin group in the LHS plugins list: "Sink" | ||
When Select plugin: "BigQuery" from the plugins list as: "Sink" | ||
When Expand Plugin group in the LHS plugins list: "Analytics" | ||
When Select plugin: "Group By" from the plugins list as: "Analytics" | ||
Then Navigate to the properties page of plugin: "BigQuery" | ||
Then Click plugin property: "switch-useConnection" | ||
Then Click on the Browse Connections button | ||
Then Select connection: "bqConnectionName" | ||
Then Enter input plugin property: "referenceName" with value: "BQReferenceName" | ||
And Replace input plugin property: "dataset" with value: "dataset" | ||
And Replace input plugin property: "table" with value: "bqSourceTable" | ||
Then Click on the Get Schema button | ||
Then Validate "BigQuery" plugin properties | ||
And Close the Plugin Properties page | ||
Then Connect plugins: "BigQuery" and "Group By" to establish connection | ||
Then Connect plugins: "Group By" and "BigQuery2" to establish connection | ||
Then Navigate to the properties page of plugin: "Group By" | ||
Then Select dropdown plugin property: "groupByFields" with option value: "groupByValidFirstField" | ||
Then Press Escape Key | ||
Then Select dropdown plugin property: "groupByFields" with option value: "groupByValidSecondField" | ||
Then Press Escape Key | ||
Then Enter GroupBy plugin Fields to be Aggregate "groupByGcsAggregateFields" | ||
Then Click on the Get Schema button | ||
Then Click on the Validate button | ||
Then Close the Plugin Properties page | ||
Then Navigate to the properties page of plugin: "BigQuery2" | ||
Then Click plugin property: "useConnection" | ||
Then Click on the Browse Connections button | ||
Then Select connection: "bqConnectionName" | ||
Then Enter input plugin property: "referenceName" with value: "BQSinkReferenceName" | ||
Then Enter input plugin property: "dataset" with value: "dataset" | ||
Then Enter input plugin property: "table" with value: "bqTargetTable" | ||
Then Validate "BigQuery" plugin properties | ||
And Close the Plugin Properties page | ||
Then Save the pipeline | ||
Then Preview and run the pipeline | ||
Then Wait till pipeline preview is in running state | ||
Then Open and capture pipeline preview logs | ||
Then Verify the preview run status of pipeline in the logs is "succeeded" | ||
Then Close the pipeline logs | ||
Then Close the preview | ||
Then Deploy the pipeline | ||
Then Click on "Configure" button | ||
Then Click on "Transformation Pushdown" button | ||
Then Click on "Enable Transformation Pushdown" button | ||
Then Enter input plugin property: "dataset" with value: "test_sqlengine" | ||
Then Click on "Advanced" button | ||
Then Click plugin property: "useConnection" | ||
Then Click on the Browse Connections button | ||
Then Select connection: "bqConnectionName" | ||
Then Click on "Save" button | ||
Then Run the Pipeline in Runtime | ||
Then Wait till pipeline is in running state | ||
Then Open and capture logs | ||
Then Close the pipeline logs | ||
Then Verify the pipeline status is "Succeeded" | ||
Then Validate The Data From BQ To BQ With Actual And Expected File for: "groupByTestOutputFile" | ||
|
||
@BQ_SOURCE_SQLENGINE_TEST @BQ_SINK_TEST @EXISTING_BQ_CONNECTION | ||
Scenario:Validate successful records transfer from BigQuery source to BigQuery sink using deduplicate | ||
Given Open Datafusion Project to configure pipeline | ||
When Expand Plugin group in the LHS plugins list: "Source" | ||
When Select plugin: "BigQuery" from the plugins list as: "Source" | ||
When Expand Plugin group in the LHS plugins list: "Sink" | ||
When Select plugin: "BigQuery" from the plugins list as: "Sink" | ||
When Expand Plugin group in the LHS plugins list: "Analytics" | ||
When Select plugin: "Deduplicate" from the plugins list as: "Analytics" | ||
Then Navigate to the properties page of plugin: "BigQuery" | ||
Then Click plugin property: "switch-useConnection" | ||
Then Click on the Browse Connections button | ||
Then Select connection: "bqConnectionName" | ||
Then Enter input plugin property: "referenceName" with value: "BQReferenceName" | ||
And Replace input plugin property: "dataset" with value: "dataset" | ||
And Replace input plugin property: "table" with value: "bqSourceTable" | ||
Then Click on the Get Schema button | ||
Then Validate "BigQuery" plugin properties | ||
And Close the Plugin Properties page | ||
Then Connect plugins: "BigQuery" and "Deduplicate" to establish connection | ||
Then Connect plugins: "Deduplicate" and "BigQuery2" to establish connection | ||
Then Navigate to the properties page of plugin: "Deduplicate" | ||
Then Select dropdown plugin property: "uniqueFields" with option value: "DeduplicateValidFirstField" | ||
Then Press Escape Key | ||
Then Click on the Validate button | ||
Then Close the Plugin Properties page | ||
Then Navigate to the properties page of plugin: "BigQuery2" | ||
Then Click plugin property: "useConnection" | ||
Then Click on the Browse Connections button | ||
Then Select connection: "bqConnectionName" | ||
Then Enter input plugin property: "referenceName" with value: "BQSinkReferenceName" | ||
Then Enter input plugin property: "dataset" with value: "dataset" | ||
Then Enter input plugin property: "table" with value: "bqTargetTable" | ||
Then Validate "BigQuery" plugin properties | ||
And Close the Plugin Properties page | ||
Then Save the pipeline | ||
Then Preview and run the pipeline | ||
Then Wait till pipeline preview is in running state | ||
Then Open and capture pipeline preview logs | ||
Then Verify the preview run status of pipeline in the logs is "succeeded" | ||
Then Close the pipeline logs | ||
Then Close the preview | ||
Then Deploy the pipeline | ||
Then Click on "Configure" button | ||
Then Click on "Transformation Pushdown" button | ||
Then Click on "Enable Transformation Pushdown" button | ||
Then Enter input plugin property: "dataset" with value: "test_sqlengine" | ||
Then Click on "Advanced" button | ||
Then Click plugin property: "useConnection" | ||
Then Click on the Browse Connections button | ||
Then Select connection: "bqConnectionName" | ||
Then Click on "Save" button | ||
Then Run the Pipeline in Runtime | ||
Then Wait till pipeline is in running state | ||
Then Open and capture logs | ||
Then Close the pipeline logs | ||
Then Verify the pipeline status is "Succeeded" | ||
Then Validate The Data From BQ To BQ With Actual And Expected File for: "deduplicateTestOutputFile" |
70 changes: 70 additions & 0 deletions
70
src/e2e-test/java/io/cdap/plugin/bigquery/actions/GroupByActions.java
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,70 @@ | ||
/* | ||
* Copyright © 2024 Cask Data, Inc. | ||
* | ||
* Licensed under the Apache License, Version 2.0 (the "License"); you may not | ||
* use this file except in compliance with the License. You may obtain a copy of | ||
* the License at | ||
* | ||
* http://www.apache.org/licenses/LICENSE-2.0 | ||
* | ||
* Unless required by applicable law or agreed to in writing, software | ||
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT | ||
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the | ||
* License for the specific language governing permissions and limitations under | ||
* the License. | ||
*/ | ||
package io.cdap.plugin.bigquery.actions; | ||
|
||
import io.cdap.e2e.pages.locators.CdfPluginPropertiesLocators; | ||
import io.cdap.e2e.utils.ElementHelper; | ||
import io.cdap.e2e.utils.JsonUtils; | ||
import io.cdap.e2e.utils.PluginPropertyUtils; | ||
import io.cdap.e2e.utils.SeleniumDriver; | ||
import io.cdap.e2e.utils.SeleniumHelper; | ||
import io.cdap.plugin.bigquery.locators.GroupByLocators; | ||
import io.cucumber.core.logging.Logger; | ||
import io.cucumber.core.logging.LoggerFactory; | ||
import org.openqa.selenium.ElementClickInterceptedException; | ||
|
||
import java.util.Map; | ||
|
||
/** | ||
* GroupBy Related Actions. | ||
*/ | ||
public class GroupByActions { | ||
private static final Logger logger = (Logger) LoggerFactory.getLogger(GroupByActions.class); | ||
|
||
static { | ||
SeleniumHelper.getPropertiesLocators(GroupByLocators.class); | ||
} | ||
|
||
public static void enterAggregates(String jsonAggreegatesFields) { | ||
Map<String, String> fieldsMapping = | ||
JsonUtils.convertKeyValueJsonArrayToMap(PluginPropertyUtils.pluginProp(jsonAggreegatesFields)); | ||
int index = 0; | ||
for (Map.Entry<String, String> entry : fieldsMapping.entrySet()) { | ||
ElementHelper.sendKeys(GroupByLocators.field(index), entry.getKey().split("#")[0]); | ||
ElementHelper.clickOnElement(GroupByLocators.fieldFunction(index)); | ||
int attempts = 0; | ||
while (attempts < 5) { | ||
try { | ||
ElementHelper.clickOnElement(SeleniumDriver.getDriver(). | ||
findElement(CdfPluginPropertiesLocators.locateDropdownListItem | ||
(entry.getKey().split("#")[1]))); | ||
break; | ||
} catch (ElementClickInterceptedException e) { | ||
if (attempts == 4) { | ||
throw e; | ||
} | ||
} | ||
attempts++; | ||
} | ||
if (entry.getKey().split("#")[1].contains("If")) { | ||
ElementHelper.sendKeys(GroupByLocators.fieldFunctionCondition(index), entry.getKey().split("#")[2]); | ||
} | ||
ElementHelper.sendKeys(GroupByLocators.fieldFunctionAlias(index), entry.getValue()); | ||
ElementHelper.clickOnElement(GroupByLocators.fieldAddRowButton(index)); | ||
index++; | ||
} | ||
} | ||
} |
4 changes: 4 additions & 0 deletions
4
src/e2e-test/java/io/cdap/plugin/bigquery/actions/package-info.java
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,4 @@ | ||
/** | ||
* Package contains the step actions for the groupby features. | ||
*/ | ||
package io.cdap.plugin.bigquery.actions; |
51 changes: 51 additions & 0 deletions
51
src/e2e-test/java/io/cdap/plugin/bigquery/locators/GroupByLocators.java
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,51 @@ | ||
/* | ||
* Copyright © 2024 Cask Data, Inc. | ||
* | ||
* Licensed under the Apache License, Version 2.0 (the "License"); you may not | ||
* use this file except in compliance with the License. You may obtain a copy of | ||
* the License at | ||
* | ||
* http://www.apache.org/licenses/LICENSE-2.0 | ||
* | ||
* Unless required by applicable law or agreed to in writing, software | ||
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT | ||
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the | ||
* License for the specific language governing permissions and limitations under | ||
* the License. | ||
*/ | ||
package io.cdap.plugin.bigquery.locators; | ||
|
||
import io.cdap.e2e.utils.SeleniumDriver; | ||
import org.openqa.selenium.By; | ||
import org.openqa.selenium.WebElement; | ||
|
||
/** | ||
* GroupBy Related Locators. | ||
*/ | ||
public class GroupByLocators { | ||
|
||
public static WebElement field(int row) { | ||
String xpath = "//div[@data-cy='aggregates']//div[@data-cy= '" + row + "']//input[@placeholder='field']"; | ||
return SeleniumDriver.getDriver().findElement(By.xpath(xpath)); | ||
} | ||
|
||
public static WebElement fieldFunction(int row) { | ||
String xpath = "//div[@data-cy='aggregates']//div[@data-cy= '" + row + "']"; | ||
return SeleniumDriver.getDriver().findElement(By.xpath(xpath)); | ||
} | ||
|
||
public static WebElement fieldFunctionAlias(int row) { | ||
String xpath = "//div[@data-cy='aggregates']//div[@data-cy= '" + row + "']//input[@placeholder='alias']"; | ||
return SeleniumDriver.getDriver().findElement(By.xpath(xpath)); | ||
} | ||
|
||
public static WebElement fieldAddRowButton(int row) { | ||
String xpath = "//*[@data-cy='aggregates']//*[@data-cy='" + row + "']//button[@data-cy='add-row']"; | ||
return SeleniumDriver.getDriver().findElement(By.xpath(xpath)); | ||
} | ||
|
||
public static WebElement fieldFunctionCondition(int row) { | ||
String xpath = "//div[@data-cy='aggregates']//div[@data-cy= '" + row + "']//input[@placeholder='condition']"; | ||
return SeleniumDriver.getDriver().findElement(By.xpath(xpath)); | ||
} | ||
} |
4 changes: 4 additions & 0 deletions
4
src/e2e-test/java/io/cdap/plugin/bigquery/locators/package-info.java
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,4 @@ | ||
/** | ||
* Package contains the locators for the groupby features. | ||
*/ | ||
package io.cdap.plugin.bigquery.locators; |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Oops, something went wrong.