diff --git a/src/e2e-test/features/bigquery/source/BigQuerySqlEngine.feature b/src/e2e-test/features/bigquery/source/BigQuerySqlEngine.feature new file mode 100644 index 0000000000..02179b4175 --- /dev/null +++ b/src/e2e-test/features/bigquery/source/BigQuerySqlEngine.feature @@ -0,0 +1,189 @@ +# Copyright © 2024 Cask Data, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may not +# use this file except in compliance with the License. You may obtain a copy of +# the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations under +# the License. + +@BigQuery_Sink +Feature: BigQuery sink - Verification of BigQuery to BigQuery successful data transfer + + @BQ_SOURCE_SQLENGINE_TEST @BQ_SINK_TEST @EXISTING_BQ_CONNECTION + Scenario:Validate successful records transfer from BigQuery source to BigQuery sink + Given Open Datafusion Project to configure pipeline + When Expand Plugin group in the LHS plugins list: "Source" + When Select plugin: "BigQuery" from the plugins list as: "Source" + When Expand Plugin group in the LHS plugins list: "Sink" + When Select plugin: "BigQuery" from the plugins list as: "Sink" + Then Connect plugins: "BigQuery" and "BigQuery2" to establish connection + Then Navigate to the properties page of plugin: "BigQuery" + Then Click plugin property: "switch-useConnection" + Then Click on the Browse Connections button + Then Select connection: "bqConnectionName" + Then Enter input plugin property: "referenceName" with value: "BQReferenceName" + And Replace input plugin property: "dataset" with value: "dataset" + And Replace input plugin property: "table" with value: "bqSourceTable" + Then Click on the Get Schema button + Then Validate "BigQuery" plugin properties + And Close the Plugin Properties page + Then Navigate to the properties page of plugin: "BigQuery2" + Then Click plugin property: "useConnection" + Then Click on the Browse Connections button + Then Select connection: "bqConnectionName" + Then Enter input plugin property: "referenceName" with value: "BQSinkReferenceName" + Then Enter input plugin property: "dataset" with value: "dataset" + Then Enter input plugin property: "table" with value: "bqTargetTable" + Then Validate "BigQuery" plugin properties + And Close the Plugin Properties page + Then Save the pipeline + Then Preview and run the pipeline + Then Wait till pipeline preview is in running state + Then Open and capture pipeline preview logs + Then Verify the preview run status of pipeline in the logs is "succeeded" + Then Close the pipeline logs + Then Close the preview + Then Deploy the pipeline + Then Click on "Configure" button + Then Click on "Transformation Pushdown" button + Then Click on "Enable Transformation Pushdown" button + Then Enter input plugin property: "dataset" with value: "test_sqlengine" + Then Click on "Advanced" button + Then Click plugin property: "useConnection" + Then Click on the Browse Connections button + Then Select connection: "bqConnectionName" + Then Click on "Save" button + Then Run the Pipeline in Runtime + Then Wait till pipeline is in running state + Then Open and capture logs + Then Close the pipeline logs + Then Verify the pipeline status is "Succeeded" + Then Validate The Data From BQ To BQ With Actual And Expected File for: "bqExpectedFile" + + @BQ_SOURCE_SQLENGINE_TEST @BQ_SINK_TEST @EXISTING_BQ_CONNECTION + Scenario:Validate successful records transfer from BigQuery source to BigQuery sink using group by + Given Open Datafusion Project to configure pipeline + When Expand Plugin group in the LHS plugins list: "Source" + When Select plugin: "BigQuery" from the plugins list as: "Source" + When Expand Plugin group in the LHS plugins list: "Sink" + When Select plugin: "BigQuery" from the plugins list as: "Sink" + When Expand Plugin group in the LHS plugins list: "Analytics" + When Select plugin: "Group By" from the plugins list as: "Analytics" + Then Navigate to the properties page of plugin: "BigQuery" + Then Click plugin property: "switch-useConnection" + Then Click on the Browse Connections button + Then Select connection: "bqConnectionName" + Then Enter input plugin property: "referenceName" with value: "BQReferenceName" + And Replace input plugin property: "dataset" with value: "dataset" + And Replace input plugin property: "table" with value: "bqSourceTable" + Then Click on the Get Schema button + Then Validate "BigQuery" plugin properties + And Close the Plugin Properties page + Then Connect plugins: "BigQuery" and "Group By" to establish connection + Then Connect plugins: "Group By" and "BigQuery2" to establish connection + Then Navigate to the properties page of plugin: "Group By" + Then Select dropdown plugin property: "groupByFields" with option value: "groupByValidFirstField" + Then Press Escape Key + Then Select dropdown plugin property: "groupByFields" with option value: "groupByValidSecondField" + Then Press Escape Key + Then Enter GroupBy plugin Fields to be Aggregate "groupByGcsAggregateFields" + Then Click on the Get Schema button + Then Click on the Validate button + Then Close the Plugin Properties page + Then Navigate to the properties page of plugin: "BigQuery2" + Then Click plugin property: "useConnection" + Then Click on the Browse Connections button + Then Select connection: "bqConnectionName" + Then Enter input plugin property: "referenceName" with value: "BQSinkReferenceName" + Then Enter input plugin property: "dataset" with value: "dataset" + Then Enter input plugin property: "table" with value: "bqTargetTable" + Then Validate "BigQuery" plugin properties + And Close the Plugin Properties page + Then Save the pipeline + Then Preview and run the pipeline + Then Wait till pipeline preview is in running state + Then Open and capture pipeline preview logs + Then Verify the preview run status of pipeline in the logs is "succeeded" + Then Close the pipeline logs + Then Close the preview + Then Deploy the pipeline + Then Click on "Configure" button + Then Click on "Transformation Pushdown" button + Then Click on "Enable Transformation Pushdown" button + Then Enter input plugin property: "dataset" with value: "test_sqlengine" + Then Click on "Advanced" button + Then Click plugin property: "useConnection" + Then Click on the Browse Connections button + Then Select connection: "bqConnectionName" + Then Click on "Save" button + Then Run the Pipeline in Runtime + Then Wait till pipeline is in running state + Then Open and capture logs + Then Close the pipeline logs + Then Verify the pipeline status is "Succeeded" + Then Validate The Data From BQ To BQ With Actual And Expected File for: "groupByTestOutputFile" + + @BQ_SOURCE_SQLENGINE_TEST @BQ_SINK_TEST @EXISTING_BQ_CONNECTION + Scenario:Validate successful records transfer from BigQuery source to BigQuery sink using deduplicate + Given Open Datafusion Project to configure pipeline + When Expand Plugin group in the LHS plugins list: "Source" + When Select plugin: "BigQuery" from the plugins list as: "Source" + When Expand Plugin group in the LHS plugins list: "Sink" + When Select plugin: "BigQuery" from the plugins list as: "Sink" + When Expand Plugin group in the LHS plugins list: "Analytics" + When Select plugin: "Deduplicate" from the plugins list as: "Analytics" + Then Navigate to the properties page of plugin: "BigQuery" + Then Click plugin property: "switch-useConnection" + Then Click on the Browse Connections button + Then Select connection: "bqConnectionName" + Then Enter input plugin property: "referenceName" with value: "BQReferenceName" + And Replace input plugin property: "dataset" with value: "dataset" + And Replace input plugin property: "table" with value: "bqSourceTable" + Then Click on the Get Schema button + Then Validate "BigQuery" plugin properties + And Close the Plugin Properties page + Then Connect plugins: "BigQuery" and "Deduplicate" to establish connection + Then Connect plugins: "Deduplicate" and "BigQuery2" to establish connection + Then Navigate to the properties page of plugin: "Deduplicate" + Then Select dropdown plugin property: "uniqueFields" with option value: "DeduplicateValidFirstField" + Then Press Escape Key + Then Click on the Validate button + Then Close the Plugin Properties page + Then Navigate to the properties page of plugin: "BigQuery2" + Then Click plugin property: "useConnection" + Then Click on the Browse Connections button + Then Select connection: "bqConnectionName" + Then Enter input plugin property: "referenceName" with value: "BQSinkReferenceName" + Then Enter input plugin property: "dataset" with value: "dataset" + Then Enter input plugin property: "table" with value: "bqTargetTable" + Then Validate "BigQuery" plugin properties + And Close the Plugin Properties page + Then Save the pipeline + Then Preview and run the pipeline + Then Wait till pipeline preview is in running state + Then Open and capture pipeline preview logs + Then Verify the preview run status of pipeline in the logs is "succeeded" + Then Close the pipeline logs + Then Close the preview + Then Deploy the pipeline + Then Click on "Configure" button + Then Click on "Transformation Pushdown" button + Then Click on "Enable Transformation Pushdown" button + Then Enter input plugin property: "dataset" with value: "test_sqlengine" + Then Click on "Advanced" button + Then Click plugin property: "useConnection" + Then Click on the Browse Connections button + Then Select connection: "bqConnectionName" + Then Click on "Save" button + Then Run the Pipeline in Runtime + Then Wait till pipeline is in running state + Then Open and capture logs + Then Close the pipeline logs + Then Verify the pipeline status is "Succeeded" + Then Validate The Data From BQ To BQ With Actual And Expected File for: "deduplicateTestOutputFile" diff --git a/src/e2e-test/java/io/cdap/plugin/bigquery/actions/GroupByActions.java b/src/e2e-test/java/io/cdap/plugin/bigquery/actions/GroupByActions.java new file mode 100644 index 0000000000..4f24f27d61 --- /dev/null +++ b/src/e2e-test/java/io/cdap/plugin/bigquery/actions/GroupByActions.java @@ -0,0 +1,70 @@ +/* + * Copyright © 2024 Cask Data, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not + * use this file except in compliance with the License. You may obtain a copy of + * the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations under + * the License. + */ +package io.cdap.plugin.bigquery.actions; + +import io.cdap.e2e.pages.locators.CdfPluginPropertiesLocators; +import io.cdap.e2e.utils.ElementHelper; +import io.cdap.e2e.utils.JsonUtils; +import io.cdap.e2e.utils.PluginPropertyUtils; +import io.cdap.e2e.utils.SeleniumDriver; +import io.cdap.e2e.utils.SeleniumHelper; +import io.cdap.plugin.bigquery.locators.GroupByLocators; +import io.cucumber.core.logging.Logger; +import io.cucumber.core.logging.LoggerFactory; +import org.openqa.selenium.ElementClickInterceptedException; + +import java.util.Map; + +/** + * GroupBy Related Actions. + */ +public class GroupByActions { + private static final Logger logger = (Logger) LoggerFactory.getLogger(GroupByActions.class); + + static { + SeleniumHelper.getPropertiesLocators(GroupByLocators.class); + } + + public static void enterAggregates(String jsonAggreegatesFields) { + Map fieldsMapping = + JsonUtils.convertKeyValueJsonArrayToMap(PluginPropertyUtils.pluginProp(jsonAggreegatesFields)); + int index = 0; + for (Map.Entry entry : fieldsMapping.entrySet()) { + ElementHelper.sendKeys(GroupByLocators.field(index), entry.getKey().split("#")[0]); + ElementHelper.clickOnElement(GroupByLocators.fieldFunction(index)); + int attempts = 0; + while (attempts < 5) { + try { + ElementHelper.clickOnElement(SeleniumDriver.getDriver(). + findElement(CdfPluginPropertiesLocators.locateDropdownListItem + (entry.getKey().split("#")[1]))); + break; + } catch (ElementClickInterceptedException e) { + if (attempts == 4) { + throw e; + } + } + attempts++; + } + if (entry.getKey().split("#")[1].contains("If")) { + ElementHelper.sendKeys(GroupByLocators.fieldFunctionCondition(index), entry.getKey().split("#")[2]); + } + ElementHelper.sendKeys(GroupByLocators.fieldFunctionAlias(index), entry.getValue()); + ElementHelper.clickOnElement(GroupByLocators.fieldAddRowButton(index)); + index++; + } + } +} diff --git a/src/e2e-test/java/io/cdap/plugin/bigquery/actions/package-info.java b/src/e2e-test/java/io/cdap/plugin/bigquery/actions/package-info.java new file mode 100644 index 0000000000..2f38fe92d4 --- /dev/null +++ b/src/e2e-test/java/io/cdap/plugin/bigquery/actions/package-info.java @@ -0,0 +1,4 @@ +/** + * Package contains the step actions for the groupby features. + */ +package io.cdap.plugin.bigquery.actions; diff --git a/src/e2e-test/java/io/cdap/plugin/bigquery/locators/GroupByLocators.java b/src/e2e-test/java/io/cdap/plugin/bigquery/locators/GroupByLocators.java new file mode 100644 index 0000000000..a966e8066f --- /dev/null +++ b/src/e2e-test/java/io/cdap/plugin/bigquery/locators/GroupByLocators.java @@ -0,0 +1,51 @@ +/* + * Copyright © 2024 Cask Data, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not + * use this file except in compliance with the License. You may obtain a copy of + * the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations under + * the License. + */ +package io.cdap.plugin.bigquery.locators; + +import io.cdap.e2e.utils.SeleniumDriver; +import org.openqa.selenium.By; +import org.openqa.selenium.WebElement; + +/** + * GroupBy Related Locators. + */ +public class GroupByLocators { + + public static WebElement field(int row) { + String xpath = "//div[@data-cy='aggregates']//div[@data-cy= '" + row + "']//input[@placeholder='field']"; + return SeleniumDriver.getDriver().findElement(By.xpath(xpath)); + } + + public static WebElement fieldFunction(int row) { + String xpath = "//div[@data-cy='aggregates']//div[@data-cy= '" + row + "']"; + return SeleniumDriver.getDriver().findElement(By.xpath(xpath)); + } + + public static WebElement fieldFunctionAlias(int row) { + String xpath = "//div[@data-cy='aggregates']//div[@data-cy= '" + row + "']//input[@placeholder='alias']"; + return SeleniumDriver.getDriver().findElement(By.xpath(xpath)); + } + + public static WebElement fieldAddRowButton(int row) { + String xpath = "//*[@data-cy='aggregates']//*[@data-cy='" + row + "']//button[@data-cy='add-row']"; + return SeleniumDriver.getDriver().findElement(By.xpath(xpath)); + } + + public static WebElement fieldFunctionCondition(int row) { + String xpath = "//div[@data-cy='aggregates']//div[@data-cy= '" + row + "']//input[@placeholder='condition']"; + return SeleniumDriver.getDriver().findElement(By.xpath(xpath)); + } +} diff --git a/src/e2e-test/java/io/cdap/plugin/bigquery/locators/package-info.java b/src/e2e-test/java/io/cdap/plugin/bigquery/locators/package-info.java new file mode 100644 index 0000000000..aa306ce075 --- /dev/null +++ b/src/e2e-test/java/io/cdap/plugin/bigquery/locators/package-info.java @@ -0,0 +1,4 @@ +/** + * Package contains the locators for the groupby features. + */ +package io.cdap.plugin.bigquery.locators; diff --git a/src/e2e-test/java/io/cdap/plugin/bigquery/stepsdesign/BigQuery.java b/src/e2e-test/java/io/cdap/plugin/bigquery/stepsdesign/BigQuery.java index 215886662d..6b4eb1323e 100644 --- a/src/e2e-test/java/io/cdap/plugin/bigquery/stepsdesign/BigQuery.java +++ b/src/e2e-test/java/io/cdap/plugin/bigquery/stepsdesign/BigQuery.java @@ -19,12 +19,14 @@ import io.cdap.e2e.pages.actions.CdfPipelineRunAction; import io.cdap.e2e.utils.BigQueryClient; import io.cdap.e2e.utils.PluginPropertyUtils; +import io.cdap.plugin.bigquery.actions.GroupByActions; import io.cdap.plugin.common.stepsdesign.TestSetupHooks; import io.cucumber.java.en.Then; import org.junit.Assert; import stepsdesign.BeforeActions; import java.io.IOException; +import java.net.URISyntaxException; /** * BigQuery Plugin validation common step design. @@ -44,4 +46,18 @@ public void validateTheValuesOfRecordsTransferredToBQsinkIsEqualToTheValuesFromS Assert.assertTrue("Value of records transferred to the BQ sink should be equal to the value " + "of the records in the source table", recordsMatched); } + + @Then("Enter GroupBy plugin Fields to be Aggregate {string}") + public void enterGroupByPluginFieldsToBeAggregate(String jsonAggregateField) { + GroupByActions.enterAggregates(jsonAggregateField); + } + + @Then("Validate The Data From BQ To BQ With Actual And Expected File for: {string}") + public void validateTheDataFromBQToBQWithActualAndExpectedFileFor(String expectedFile) throws IOException, + InterruptedException, URISyntaxException { + boolean recordsMatched = ValidationHelper.validateActualDataToExpectedData( + PluginPropertyUtils.pluginProp("bqTargetTable"), + PluginPropertyUtils.pluginProp(expectedFile)); + Assert.assertTrue("Value of records in actual and expected file is equal", recordsMatched); + } } diff --git a/src/e2e-test/java/io/cdap/plugin/bigquery/stepsdesign/ValidationHelper.java b/src/e2e-test/java/io/cdap/plugin/bigquery/stepsdesign/ValidationHelper.java new file mode 100644 index 0000000000..77be715ab4 --- /dev/null +++ b/src/e2e-test/java/io/cdap/plugin/bigquery/stepsdesign/ValidationHelper.java @@ -0,0 +1,101 @@ +/* + * Copyright © 2024 Cask Data, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not + * use this file except in compliance with the License. You may obtain a copy of + * the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations under + * the License. + */ +package io.cdap.plugin.bigquery.stepsdesign; + +import com.esotericsoftware.minlog.Log; +import com.google.cloud.bigquery.FieldValueList; +import com.google.cloud.bigquery.TableResult; +import com.google.gson.Gson; +import com.google.gson.JsonElement; +import com.google.gson.JsonObject; +import io.cdap.e2e.utils.BigQueryClient; +import io.cdap.e2e.utils.PluginPropertyUtils; +import io.cucumber.core.logging.Logger; +import io.cucumber.core.logging.LoggerFactory; + +import java.io.BufferedReader; +import java.io.FileReader; +import java.io.IOException; +import java.net.URISyntaxException; +import java.nio.file.Path; +import java.nio.file.Paths; +import java.util.HashMap; +import java.util.Map; + +/** + * Validation Helper. + */ +public class ValidationHelper { + + private static final Logger LOG = LoggerFactory.getLogger(ValidationHelper.class); + static Gson gson = new Gson(); + public static boolean validateActualDataToExpectedData(String table, String fileName) throws IOException, + InterruptedException, URISyntaxException { + Map bigQueryMap = new HashMap<>(); + Map fileMap = new HashMap<>(); + Path importExpectedFile = Paths.get(ValidationHelper.class.getResource("/" + fileName).toURI()); + + getBigQueryTableData(table, bigQueryMap); + getFileData(importExpectedFile.toString(), fileMap); + + boolean isMatched = bigQueryMap.equals(fileMap); + + return isMatched; + } + + public static void getFileData(String fileName, Map fileMap) { + try (BufferedReader br = new BufferedReader(new FileReader(fileName))) { + String line; + while ((line = br.readLine()) != null) { + JsonObject json = gson.fromJson(line, JsonObject.class); + if (json.has("id")) { // Check if the JSON object has the "id" key + JsonElement idElement = json.get("id"); + if (idElement.isJsonPrimitive()) { + String idKey = idElement.getAsString(); + fileMap.put(idKey, json); + } else { + Log.error("ID key not found"); + } + } + } + } catch (IOException e) { + System.err.println("Error reading the file: " + e.getMessage()); + } + } + + private static void getBigQueryTableData(String targetTable, Map bigQueryMap) + throws IOException, InterruptedException { + String dataset = PluginPropertyUtils.pluginProp("dataset"); + String projectId = PluginPropertyUtils.pluginProp("projectId"); + String selectQuery = "SELECT TO_JSON(t) FROM `" + projectId + "." + dataset + "." + targetTable + "` AS t"; + TableResult result = BigQueryClient.getQueryResult(selectQuery); + + for (FieldValueList row : result.iterateAll()) { + JsonObject json = gson.fromJson(row.get(0).getStringValue(), JsonObject.class); + if (json.has("id")) { // Check if the JSON object has the "id" key + JsonElement idElement = json.get("id"); + if (idElement.isJsonPrimitive()) { + String idKey = idElement.getAsString(); + bigQueryMap.put(idKey, json); + } else { + LOG.error("Data Mismatched"); + } + } else { + LOG.error("ID Key not found in JSON object"); + } + } + } +} diff --git a/src/e2e-test/java/io/cdap/plugin/common/stepsdesign/TestSetupHooks.java b/src/e2e-test/java/io/cdap/plugin/common/stepsdesign/TestSetupHooks.java index fc92c2e6eb..7182d33a54 100644 --- a/src/e2e-test/java/io/cdap/plugin/common/stepsdesign/TestSetupHooks.java +++ b/src/e2e-test/java/io/cdap/plugin/common/stepsdesign/TestSetupHooks.java @@ -272,7 +272,7 @@ public static void createTempSourceBQTable() throws IOException, InterruptedExce @After(order = 1, value = "@BQ_SOURCE_TEST or @BQ_PARTITIONED_SOURCE_TEST or @BQ_SOURCE_DATATYPE_TEST or " + "@BQ_INSERT_SOURCE_TEST or @BQ_UPDATE_SINK_TEST or @BQ_EXISTING_SOURCE_TEST or @BQ_EXISTING_SINK_TEST or " + - "@BQ_EXISTING_SOURCE_DATATYPE_TEST or @BQ_EXISTING_SINK_DATATYPE_TEST") + "@BQ_EXISTING_SOURCE_DATATYPE_TEST or @BQ_EXISTING_SINK_DATATYPE_TEST or @BQ_SOURCE_SQLENGINE_TEST") public static void deleteTempSourceBQTable() throws IOException, InterruptedException { BigQueryClient.dropBqQuery(bqSourceTable); PluginPropertyUtils.removePluginProp("bqSourceTable"); @@ -1362,4 +1362,10 @@ public static void makeExistingTargetSpannerDBAndTableName() { } } + @Before(order = 1, value = "@BQ_SOURCE_SQLENGINE_TEST") + public static void createSourceBQTableForSqlEngine() throws IOException, InterruptedException { + createSourceBQTableWithQueries(PluginPropertyUtils.pluginProp("bqCreateTableQueryFileSQL"), + PluginPropertyUtils.pluginProp("bqInsertDataQueryFileSQL")); + } + } diff --git a/src/e2e-test/resources/pluginParameters.properties b/src/e2e-test/resources/pluginParameters.properties index 5f37646dc0..e9372f919b 100644 --- a/src/e2e-test/resources/pluginParameters.properties +++ b/src/e2e-test/resources/pluginParameters.properties @@ -1,6 +1,6 @@ projectId=cdf-athena datasetprojectId=cdf-athena -dataset=bq_automation +dataset=test_sqlengine wrongSourcePath=gs://00000000-e2e-0014a44f-81be-4501-8360-0ddca192492 serviceAccountType=filePath serviceAccount=auto-detect @@ -335,3 +335,15 @@ bqExecuteCountDMLUpsertInsert=SELECT COUNT(*) FROM `PROJECT_NAME.DATASET.TABLENA bqExecuteCountDMLUpsertUpdate=SELECT COUNT(*) FROM `PROJECT_NAME.DATASET.TABLENAME` WHERE Id=101 AND Value=5000 AND UID='UPDATED RECORD' bqExecuteInsertFile=testdata/BQExecute/BQExecuteInsertFile ## BQEXECUTE-PLUGIN-PROPERTIES-END + +## SQLENGINE-PLUGIN-PROPERTIES-START +bqCreateTableQueryFileSQL=testdata/BigQuery/BigQueryCreateTableSql.txt +bqInsertDataQueryFileSQL=testdata/BigQuery/BigQueryInsertTableSql.txt +groupByValidFirstField=name +groupByValidSecondField=id +DeduplicateValidFirstField=name +groupByGcsAggregateFields=[{"key":"name#Count","value":"namecount"}] +deduplicateTestOutputFile=testdata/BQValidationExpectedFiles/deduplicate_Test +groupByTestOutputFile=testdata/BQValidationExpectedFiles/groupby_Test +bqExpectedFile=testdata/BQValidationExpectedFiles/bqexpected_Test +## SQLENGINE-PLUGIN-PROPERTIES-END diff --git a/src/e2e-test/resources/testdata/BQValidationExpectedFiles/bqexpected_Test b/src/e2e-test/resources/testdata/BQValidationExpectedFiles/bqexpected_Test new file mode 100644 index 0000000000..d87c278725 --- /dev/null +++ b/src/e2e-test/resources/testdata/BQValidationExpectedFiles/bqexpected_Test @@ -0,0 +1,3 @@ +{"address":"ppu","id":13,"name":"root"} +{"address":"ggn","id":12,"name":"joe"} +{"address":"ggn","id":12,"name":"joe"} \ No newline at end of file diff --git a/src/e2e-test/resources/testdata/BQValidationExpectedFiles/deduplicate_Test b/src/e2e-test/resources/testdata/BQValidationExpectedFiles/deduplicate_Test new file mode 100644 index 0000000000..2e48460aab --- /dev/null +++ b/src/e2e-test/resources/testdata/BQValidationExpectedFiles/deduplicate_Test @@ -0,0 +1,2 @@ +{"address":"ppu","id":13,"name":"root"} +{"address":"ggn","id":12,"name":"joe"} \ No newline at end of file diff --git a/src/e2e-test/resources/testdata/BQValidationExpectedFiles/groupby_Test b/src/e2e-test/resources/testdata/BQValidationExpectedFiles/groupby_Test new file mode 100644 index 0000000000..580ed7cec0 --- /dev/null +++ b/src/e2e-test/resources/testdata/BQValidationExpectedFiles/groupby_Test @@ -0,0 +1,2 @@ +{"id":12,"name":"joe","namecount":2} +{"id":13,"name":"root","namecount":1} \ No newline at end of file diff --git a/src/e2e-test/resources/testdata/BigQuery/BigQueryCreateTableSql.txt b/src/e2e-test/resources/testdata/BigQuery/BigQueryCreateTableSql.txt new file mode 100644 index 0000000000..1ca9c2e140 --- /dev/null +++ b/src/e2e-test/resources/testdata/BigQuery/BigQueryCreateTableSql.txt @@ -0,0 +1 @@ +create table `DATASET.TABLE_NAME` (name STRING, address STRING, id INTEGER ) \ No newline at end of file diff --git a/src/e2e-test/resources/testdata/BigQuery/BigQueryInsertTableSql.txt b/src/e2e-test/resources/testdata/BigQuery/BigQueryInsertTableSql.txt new file mode 100644 index 0000000000..ea5e5b0a20 --- /dev/null +++ b/src/e2e-test/resources/testdata/BigQuery/BigQueryInsertTableSql.txt @@ -0,0 +1,5 @@ +INSERT INTO DATASET.TABLE_NAME (name, address, id) +VALUES +('joe', 'ggn', 12), +('root', 'ppu', 13), +('joe', 'ggn', 12); \ No newline at end of file