Skip to content

Commit

Permalink
ITN coverage for SQl engine
Browse files Browse the repository at this point in the history
  • Loading branch information
AnkitCLI committed Apr 4, 2024
1 parent 3ecc0ed commit 8b780c3
Show file tree
Hide file tree
Showing 14 changed files with 468 additions and 2 deletions.
189 changes: 189 additions & 0 deletions src/e2e-test/features/bigquery/source/BigQuerySqlEngine.feature
Original file line number Diff line number Diff line change
@@ -0,0 +1,189 @@
# Copyright © 2024 Cask Data, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may not
# use this file except in compliance with the License. You may obtain a copy of
# the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations under
# the License.

@BigQuery_Sink
Feature: BigQuery sink - Verification of BigQuery to BigQuery successful data transfer

@BQ_SOURCE_SQLENGINE_TEST @BQ_SINK_TEST @EXISTING_BQ_CONNECTION
Scenario:Validate successful records transfer from BigQuery source to BigQuery sink
Given Open Datafusion Project to configure pipeline
When Expand Plugin group in the LHS plugins list: "Source"
When Select plugin: "BigQuery" from the plugins list as: "Source"
When Expand Plugin group in the LHS plugins list: "Sink"
When Select plugin: "BigQuery" from the plugins list as: "Sink"
Then Connect plugins: "BigQuery" and "BigQuery2" to establish connection
Then Navigate to the properties page of plugin: "BigQuery"
Then Click plugin property: "switch-useConnection"
Then Click on the Browse Connections button
Then Select connection: "bqConnectionName"
Then Enter input plugin property: "referenceName" with value: "BQReferenceName"
And Replace input plugin property: "dataset" with value: "dataset"
And Replace input plugin property: "table" with value: "bqSourceTable"
Then Click on the Get Schema button
Then Validate "BigQuery" plugin properties
And Close the Plugin Properties page
Then Navigate to the properties page of plugin: "BigQuery2"
Then Click plugin property: "useConnection"
Then Click on the Browse Connections button
Then Select connection: "bqConnectionName"
Then Enter input plugin property: "referenceName" with value: "BQSinkReferenceName"
Then Enter input plugin property: "dataset" with value: "dataset"
Then Enter input plugin property: "table" with value: "bqTargetTable"
Then Validate "BigQuery" plugin properties
And Close the Plugin Properties page
Then Save the pipeline
Then Preview and run the pipeline
Then Wait till pipeline preview is in running state
Then Open and capture pipeline preview logs
Then Verify the preview run status of pipeline in the logs is "succeeded"
Then Close the pipeline logs
Then Close the preview
Then Deploy the pipeline
Then Click on "Configure" button
Then Click on "Transformation Pushdown" button
Then Click on "Enable Transformation Pushdown" button
Then Enter input plugin property: "dataset" with value: "test_sqlengine"
Then Click on "Advanced" button
Then Click plugin property: "useConnection"
Then Click on the Browse Connections button
Then Select connection: "bqConnectionName"
Then Click on "Save" button
Then Run the Pipeline in Runtime
Then Wait till pipeline is in running state
Then Open and capture logs
Then Close the pipeline logs
Then Verify the pipeline status is "Succeeded"
Then Validate The Data From BQ To BQ With Actual And Expected File for: "bqExpectedFile"

@BQ_SOURCE_SQLENGINE_TEST @BQ_SINK_TEST @EXISTING_BQ_CONNECTION
Scenario:Validate successful records transfer from BigQuery source to BigQuery sink using group by
Given Open Datafusion Project to configure pipeline
When Expand Plugin group in the LHS plugins list: "Source"
When Select plugin: "BigQuery" from the plugins list as: "Source"
When Expand Plugin group in the LHS plugins list: "Sink"
When Select plugin: "BigQuery" from the plugins list as: "Sink"
When Expand Plugin group in the LHS plugins list: "Analytics"
When Select plugin: "Group By" from the plugins list as: "Analytics"
Then Navigate to the properties page of plugin: "BigQuery"
Then Click plugin property: "switch-useConnection"
Then Click on the Browse Connections button
Then Select connection: "bqConnectionName"
Then Enter input plugin property: "referenceName" with value: "BQReferenceName"
And Replace input plugin property: "dataset" with value: "dataset"
And Replace input plugin property: "table" with value: "bqSourceTable"
Then Click on the Get Schema button
Then Validate "BigQuery" plugin properties
And Close the Plugin Properties page
Then Connect plugins: "BigQuery" and "Group By" to establish connection
Then Connect plugins: "Group By" and "BigQuery2" to establish connection
Then Navigate to the properties page of plugin: "Group By"
Then Select dropdown plugin property: "groupByFields" with option value: "groupByValidFirstField"
Then Press Escape Key
Then Select dropdown plugin property: "groupByFields" with option value: "groupByValidSecondField"
Then Press Escape Key
Then Enter GroupBy plugin Fields to be Aggregate "groupByGcsAggregateFields"
Then Click on the Get Schema button
Then Click on the Validate button
Then Close the Plugin Properties page
Then Navigate to the properties page of plugin: "BigQuery2"
Then Click plugin property: "useConnection"
Then Click on the Browse Connections button
Then Select connection: "bqConnectionName"
Then Enter input plugin property: "referenceName" with value: "BQSinkReferenceName"
Then Enter input plugin property: "dataset" with value: "dataset"
Then Enter input plugin property: "table" with value: "bqTargetTable"
Then Validate "BigQuery" plugin properties
And Close the Plugin Properties page
Then Save the pipeline
Then Preview and run the pipeline
Then Wait till pipeline preview is in running state
Then Open and capture pipeline preview logs
Then Verify the preview run status of pipeline in the logs is "succeeded"
Then Close the pipeline logs
Then Close the preview
Then Deploy the pipeline
Then Click on "Configure" button
Then Click on "Transformation Pushdown" button
Then Click on "Enable Transformation Pushdown" button
Then Enter input plugin property: "dataset" with value: "test_sqlengine"
Then Click on "Advanced" button
Then Click plugin property: "useConnection"
Then Click on the Browse Connections button
Then Select connection: "bqConnectionName"
Then Click on "Save" button
Then Run the Pipeline in Runtime
Then Wait till pipeline is in running state
Then Open and capture logs
Then Close the pipeline logs
Then Verify the pipeline status is "Succeeded"
Then Validate The Data From BQ To BQ With Actual And Expected File for: "groupByTestOutputFile"

@BQ_SOURCE_SQLENGINE_TEST @BQ_SINK_TEST @EXISTING_BQ_CONNECTION
Scenario:Validate successful records transfer from BigQuery source to BigQuery sink using deduplicate
Given Open Datafusion Project to configure pipeline
When Expand Plugin group in the LHS plugins list: "Source"
When Select plugin: "BigQuery" from the plugins list as: "Source"
When Expand Plugin group in the LHS plugins list: "Sink"
When Select plugin: "BigQuery" from the plugins list as: "Sink"
When Expand Plugin group in the LHS plugins list: "Analytics"
When Select plugin: "Deduplicate" from the plugins list as: "Analytics"
Then Navigate to the properties page of plugin: "BigQuery"
Then Click plugin property: "switch-useConnection"
Then Click on the Browse Connections button
Then Select connection: "bqConnectionName"
Then Enter input plugin property: "referenceName" with value: "BQReferenceName"
And Replace input plugin property: "dataset" with value: "dataset"
And Replace input plugin property: "table" with value: "bqSourceTable"
Then Click on the Get Schema button
Then Validate "BigQuery" plugin properties
And Close the Plugin Properties page
Then Connect plugins: "BigQuery" and "Deduplicate" to establish connection
Then Connect plugins: "Deduplicate" and "BigQuery2" to establish connection
Then Navigate to the properties page of plugin: "Deduplicate"
Then Select dropdown plugin property: "uniqueFields" with option value: "DeduplicateValidFirstField"
Then Press Escape Key
Then Click on the Validate button
Then Close the Plugin Properties page
Then Navigate to the properties page of plugin: "BigQuery2"
Then Click plugin property: "useConnection"
Then Click on the Browse Connections button
Then Select connection: "bqConnectionName"
Then Enter input plugin property: "referenceName" with value: "BQSinkReferenceName"
Then Enter input plugin property: "dataset" with value: "dataset"
Then Enter input plugin property: "table" with value: "bqTargetTable"
Then Validate "BigQuery" plugin properties
And Close the Plugin Properties page
Then Save the pipeline
Then Preview and run the pipeline
Then Wait till pipeline preview is in running state
Then Open and capture pipeline preview logs
Then Verify the preview run status of pipeline in the logs is "succeeded"
Then Close the pipeline logs
Then Close the preview
Then Deploy the pipeline
Then Click on "Configure" button
Then Click on "Transformation Pushdown" button
Then Click on "Enable Transformation Pushdown" button
Then Enter input plugin property: "dataset" with value: "test_sqlengine"
Then Click on "Advanced" button
Then Click plugin property: "useConnection"
Then Click on the Browse Connections button
Then Select connection: "bqConnectionName"
Then Click on "Save" button
Then Run the Pipeline in Runtime
Then Wait till pipeline is in running state
Then Open and capture logs
Then Close the pipeline logs
Then Verify the pipeline status is "Succeeded"
Then Validate The Data From BQ To BQ With Actual And Expected File for: "deduplicateTestOutputFile"
Original file line number Diff line number Diff line change
@@ -0,0 +1,70 @@
/*
* Copyright © 2024 Cask Data, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package io.cdap.plugin.bigquery.actions;

import io.cdap.e2e.pages.locators.CdfPluginPropertiesLocators;
import io.cdap.e2e.utils.ElementHelper;
import io.cdap.e2e.utils.JsonUtils;
import io.cdap.e2e.utils.PluginPropertyUtils;
import io.cdap.e2e.utils.SeleniumDriver;
import io.cdap.e2e.utils.SeleniumHelper;
import io.cdap.plugin.bigquery.locators.GroupByLocators;
import io.cucumber.core.logging.Logger;
import io.cucumber.core.logging.LoggerFactory;
import org.openqa.selenium.ElementClickInterceptedException;

import java.util.Map;

/**
* GroupBy Related Actions.
*/
public class GroupByActions {
private static final Logger logger = (Logger) LoggerFactory.getLogger(GroupByActions.class);

static {
SeleniumHelper.getPropertiesLocators(GroupByLocators.class);
}

public static void enterAggregates(String jsonAggreegatesFields) {
Map<String, String> fieldsMapping =
JsonUtils.convertKeyValueJsonArrayToMap(PluginPropertyUtils.pluginProp(jsonAggreegatesFields));
int index = 0;
for (Map.Entry<String, String> entry : fieldsMapping.entrySet()) {
ElementHelper.sendKeys(GroupByLocators.field(index), entry.getKey().split("#")[0]);
ElementHelper.clickOnElement(GroupByLocators.fieldFunction(index));
int attempts = 0;
while (attempts < 5) {
try {
ElementHelper.clickOnElement(SeleniumDriver.getDriver().
findElement(CdfPluginPropertiesLocators.locateDropdownListItem
(entry.getKey().split("#")[1])));
break;
} catch (ElementClickInterceptedException e) {
if (attempts == 4) {
throw e;
}
}
attempts++;
}
if (entry.getKey().split("#")[1].contains("If")) {
ElementHelper.sendKeys(GroupByLocators.fieldFunctionCondition(index), entry.getKey().split("#")[2]);
}
ElementHelper.sendKeys(GroupByLocators.fieldFunctionAlias(index), entry.getValue());
ElementHelper.clickOnElement(GroupByLocators.fieldAddRowButton(index));
index++;
}
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,4 @@
/**
* Package contains the step actions for the groupby features.
*/
package io.cdap.plugin.bigquery.actions;
Original file line number Diff line number Diff line change
@@ -0,0 +1,51 @@
/*
* Copyright © 2024 Cask Data, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package io.cdap.plugin.bigquery.locators;

import io.cdap.e2e.utils.SeleniumDriver;
import org.openqa.selenium.By;
import org.openqa.selenium.WebElement;

/**
* GroupBy Related Locators.
*/
public class GroupByLocators {

public static WebElement field(int row) {
String xpath = "//div[@data-cy='aggregates']//div[@data-cy= '" + row + "']//input[@placeholder='field']";
return SeleniumDriver.getDriver().findElement(By.xpath(xpath));
}

public static WebElement fieldFunction(int row) {
String xpath = "//div[@data-cy='aggregates']//div[@data-cy= '" + row + "']";
return SeleniumDriver.getDriver().findElement(By.xpath(xpath));
}

public static WebElement fieldFunctionAlias(int row) {
String xpath = "//div[@data-cy='aggregates']//div[@data-cy= '" + row + "']//input[@placeholder='alias']";
return SeleniumDriver.getDriver().findElement(By.xpath(xpath));
}

public static WebElement fieldAddRowButton(int row) {
String xpath = "//*[@data-cy='aggregates']//*[@data-cy='" + row + "']//button[@data-cy='add-row']";
return SeleniumDriver.getDriver().findElement(By.xpath(xpath));
}

public static WebElement fieldFunctionCondition(int row) {
String xpath = "//div[@data-cy='aggregates']//div[@data-cy= '" + row + "']//input[@placeholder='condition']";
return SeleniumDriver.getDriver().findElement(By.xpath(xpath));
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,4 @@
/**
* Package contains the locators for the groupby features.
*/
package io.cdap.plugin.bigquery.locators;
Original file line number Diff line number Diff line change
Expand Up @@ -19,12 +19,14 @@
import io.cdap.e2e.pages.actions.CdfPipelineRunAction;
import io.cdap.e2e.utils.BigQueryClient;
import io.cdap.e2e.utils.PluginPropertyUtils;
import io.cdap.plugin.bigquery.actions.GroupByActions;
import io.cdap.plugin.common.stepsdesign.TestSetupHooks;
import io.cucumber.java.en.Then;
import org.junit.Assert;
import stepsdesign.BeforeActions;

import java.io.IOException;
import java.net.URISyntaxException;

/**
* BigQuery Plugin validation common step design.
Expand All @@ -44,4 +46,18 @@ public void validateTheValuesOfRecordsTransferredToBQsinkIsEqualToTheValuesFromS
Assert.assertTrue("Value of records transferred to the BQ sink should be equal to the value " +
"of the records in the source table", recordsMatched);
}

@Then("Enter GroupBy plugin Fields to be Aggregate {string}")
public void enterGroupByPluginFieldsToBeAggregate(String jsonAggregateField) {
GroupByActions.enterAggregates(jsonAggregateField);
}

@Then("Validate The Data From BQ To BQ With Actual And Expected File for: {string}")
public void validateTheDataFromBQToBQWithActualAndExpectedFileFor(String expectedFile) throws IOException,
InterruptedException, URISyntaxException {
boolean recordsMatched = ValidationHelper.validateActualDataToExpectedData(
PluginPropertyUtils.pluginProp("bqTargetTable"),
PluginPropertyUtils.pluginProp(expectedFile));
Assert.assertTrue("Value of records in actual and expected file is equal", recordsMatched);
}
}
Loading

0 comments on commit 8b780c3

Please sign in to comment.