From 06ac469243e8b9e08e29e8822017a4f0f330943a Mon Sep 17 00:00:00 2001 From: Praveena2607 Date: Thu, 19 Dec 2024 06:28:15 +0000 Subject: [PATCH] All the changes are made.Please check. --- .../source/BigQuerySourceError.feature | 19 +- .../source/BigQueryToGCS_WithMacro.feature | 75 +------- .../bigquery/stepsdesign/BigQueryBase.java | 176 ++++-------------- .../utils/CdfPluginPropertyLocator.java | 4 +- .../resources/errorMessage.properties | 4 +- .../resources/pluginParameters.properties | 15 +- 6 files changed, 56 insertions(+), 237 deletions(-) diff --git a/src/e2e-test/features/bigquery/source/BigQuerySourceError.feature b/src/e2e-test/features/bigquery/source/BigQuerySourceError.feature index e80adc0cd..71626aff0 100644 --- a/src/e2e-test/features/bigquery/source/BigQuerySourceError.feature +++ b/src/e2e-test/features/bigquery/source/BigQuerySourceError.feature @@ -57,22 +57,19 @@ Feature: BigQuery source - Validate BigQuery source plugin error scenarios Then Verify the BigQuery validation error message for invalid property "bucket" @BQ_SOURCE_TEST - Scenario Outline:To verify error message when unsupported format is provided in Partition Start date and Partition end Date + Scenario:To verify error message when unsupported format is provided in Partition Start date and Partition end Date Given Open Datafusion Project to configure pipeline When Expand Plugin group in the LHS plugins list: "Source" When Select plugin: "BigQuery" from the plugins list as: "Source" Then Navigate to the properties page of plugin: "BigQuery" Then Replace input plugin property: "project" with value: "projectId" + And Enter input plugin property: "referenceName" with value: "bqInvalidReferenceName" Then Replace input plugin property: "dataset" with value: "dataset" Then Replace input plugin property: "table" with value: "bqSourceTable" + And Enter input plugin property: "partitionFrom" with value: "bqIncorrectFormatStartDate" + And Enter input plugin property: "partitionTo" with value: "bqIncorrectFormatEndDate" Then Click on the Get Schema button - Then Enter BigQuery source properties partitionFrom and partitionTo - Then Validate BigQuery source incorrect property error for Partition Start date "" value "" - Then Validate BigQuery source incorrect property error for Partition End date "" value "" - And Enter input plugin property: "referenceName" with value: "bqIncorrectReferenceName" - Then Validate BigQuery source incorrect property error for reference name"" value "" - Examples: - | property | value | - | partitionFrom | bqIncorrectFormatStartDate | - | partitionTo | bqIncorrectFormatEndDate | - | referenceName | bqIncorrectReferenceName | + And Click on the Validate button + Then Verify that the Plugin Property: "referenceName" is displaying an in-line error message: "errorMessageIncorrectReferenceName" + Then Verify that the Plugin Property: "partitionFrom" is displaying an in-line error message: "errorMessageIncorrectPartitionStartDate" + Then Verify that the Plugin Property: "partitionTo" is displaying an in-line error message: "errorMessageIncorrectPartitionEndDate" diff --git a/src/e2e-test/features/bigquery/source/BigQueryToGCS_WithMacro.feature b/src/e2e-test/features/bigquery/source/BigQueryToGCS_WithMacro.feature index ae5bcba21..3cd7f3196 100644 --- a/src/e2e-test/features/bigquery/source/BigQueryToGCS_WithMacro.feature +++ b/src/e2e-test/features/bigquery/source/BigQueryToGCS_WithMacro.feature @@ -146,7 +146,7 @@ Feature: BigQuery source - Verification of BigQuery to GCS successful data trans Then Validate the cmek key "cmekGCS" of target GCS bucket if cmek is enabled @CMEK @BQ_SOURCE_TEST @GCS_SINK_TEST - Scenario:Validate successful records transfer from BigQuery to GCS with macro arguments for filter and outputschema + Scenario:Validate successful records transfer from BigQuery to GCS with macro arguments for filter and Output Schema Given Open Datafusion Project to configure pipeline When Source is BigQuery When Sink is GCS @@ -160,6 +160,7 @@ Feature: BigQuery source - Verification of BigQuery to GCS successful data trans Then Enter BigQuery property "serviceAccountJSON" as macro argument "serviceAccount" Then Enter BigQuery property "dataset" as macro argument "bqDataset" Then Enter BigQuery property "table" as macro argument "bqSourceTable" + Then Select Macro action of output schema property: "Output Schema-macro-input" and set the value to "bqOutputSchema" Then Validate "BigQuery" plugin properties Then Close the BigQuery properties Then Open GCS sink properties @@ -184,77 +185,6 @@ Feature: BigQuery source - Verification of BigQuery to GCS successful data trans Then Enter runtime argument value "serviceAccount" for key "serviceAccount" Then Enter runtime argument value "dataset" for key "bqDataset" Then Enter runtime argument value for BigQuery source table name key "bqSourceTable" - Then Enter runtime argument value "projectId" for key "gcsProjectId" - Then Enter runtime argument value for GCS sink property path key "gcsSinkPath" - Then Enter runtime argument value "gcsPathDateSuffix" for key "gcsPathSuffix" - Then Enter runtime argument value "csvFormat" for key "gcsFormat" - Then Enter runtime argument value "cmekGCS" for GCS cmek property key "cmekGCS" if GCS cmek is enabled - Then Run the preview of pipeline with runtime arguments - Then Wait till pipeline preview is in running state - Then Open and capture pipeline preview logs - Then Verify the preview run status of pipeline in the logs is "succeeded" - Then Close the pipeline logs - Then Click on preview data for GCS sink - Then Close the preview data - Then Deploy the pipeline - Then Run the Pipeline in Runtime - Then Enter runtime argument value "projectId" for key "bqProjectId" - Then Enter runtime argument value "projectId" for key "bqDatasetProjectId" - Then Enter runtime argument value "filter" for key "bqFilter" - Then Enter runtime argument value "serviceAccountType" for key "serviceAccountType" - Then Enter runtime argument value "serviceAccount" for key "serviceAccount" - Then Enter runtime argument value "dataset" for key "bqDataset" - Then Enter runtime argument value for BigQuery source table name key "bqSourceTable" - Then Enter runtime argument value "projectId" for key "gcsProjectId" - Then Enter runtime argument value for GCS sink property path key "gcsSinkPath" - Then Enter runtime argument value "gcsPathDateSuffix" for key "gcsPathSuffix" - Then Enter runtime argument value "csvFormat" for key "gcsFormat" - Then Enter runtime argument value "cmekGCS" for GCS cmek property key "cmekGCS" if GCS cmek is enabled - Then Run the Pipeline in Runtime with runtime arguments - Then Wait till pipeline is in running state - Then Open and capture logs - Then Verify the pipeline status is "Succeeded" - Then Verify data is transferred to target GCS bucket - Then Validate the cmek key "cmekGCS" of target GCS bucket if cmek is enabled - - @CMEK @BQ_SOURCE_TEST @GCS_SINK_TEST - Scenario:Validate successful records transfer from BigQuery to GCS with macro arguments for output schema - Given Open Datafusion Project to configure pipeline - When Source is BigQuery - When Sink is GCS - Then Open BigQuery source properties - Then Enter BigQuery property reference name - Then Enter BigQuery property "projectId" as macro argument "bqProjectId" - Then Enter BigQuery property "datasetProjectId" as macro argument "bqDatasetProjectId" - Then Enter BigQuery property "serviceAccountType" as macro argument "serviceAccountType" - Then Enter BigQuery property "serviceAccountFilePath" as macro argument "serviceAccount" - Then Enter BigQuery property "serviceAccountJSON" as macro argument "serviceAccount" - Then Enter BigQuery property "dataset" as macro argument "bqDataset" - Then Enter BigQuery property "table" as macro argument "bqSourceTable" - Then Enter BigQuery source property output schema "outputSchema" as macro argument "bqOutputSchema" - Then Validate "BigQuery" plugin properties - Then Close the BigQuery properties - Then Open GCS sink properties - Then Enter GCS property reference name - Then Enter GCS property "projectId" as macro argument "gcsProjectId" - Then Enter GCS property "serviceAccountType" as macro argument "serviceAccountType" - Then Enter GCS property "serviceAccountFilePath" as macro argument "serviceAccount" - Then Enter GCS property "serviceAccountJSON" as macro argument "serviceAccount" - Then Enter GCS property "path" as macro argument "gcsSinkPath" - Then Enter GCS sink property "pathSuffix" as macro argument "gcsPathSuffix" - Then Enter GCS property "format" as macro argument "gcsFormat" - Then Enter GCS sink cmek property "encryptionKeyName" as macro argument "cmekGCS" if cmek is enabled - Then Validate "GCS" plugin properties - Then Close the GCS properties - Then Connect source as "BigQuery" and sink as "GCS" to establish connection - Then Save the pipeline - Then Preview and run the pipeline - Then Enter runtime argument value "projectId" for key "bqProjectId" - Then Enter runtime argument value "projectId" for key "bqDatasetProjectId" - Then Enter runtime argument value "serviceAccountType" for key "serviceAccountType" - Then Enter runtime argument value "serviceAccount" for key "serviceAccount" - Then Enter runtime argument value "dataset" for key "bqDataset" - Then Enter runtime argument value for BigQuery source table name key "bqSourceTable" Then Enter runtime argument value "OutputSchema" for key "bqOutputSchema" Then Enter runtime argument value "projectId" for key "gcsProjectId" Then Enter runtime argument value for GCS sink property path key "gcsSinkPath" @@ -272,6 +202,7 @@ Feature: BigQuery source - Verification of BigQuery to GCS successful data trans Then Run the Pipeline in Runtime Then Enter runtime argument value "projectId" for key "bqProjectId" Then Enter runtime argument value "projectId" for key "bqDatasetProjectId" + Then Enter runtime argument value "filter" for key "bqFilter" Then Enter runtime argument value "serviceAccountType" for key "serviceAccountType" Then Enter runtime argument value "serviceAccount" for key "serviceAccount" Then Enter runtime argument value "dataset" for key "bqDataset" diff --git a/src/e2e-test/java/io/cdap/plugin/bigquery/stepsdesign/BigQueryBase.java b/src/e2e-test/java/io/cdap/plugin/bigquery/stepsdesign/BigQueryBase.java index 1dc05c7ef..390fd7801 100644 --- a/src/e2e-test/java/io/cdap/plugin/bigquery/stepsdesign/BigQueryBase.java +++ b/src/e2e-test/java/io/cdap/plugin/bigquery/stepsdesign/BigQueryBase.java @@ -16,6 +16,7 @@ package io.cdap.plugin.bigquery.stepsdesign; import io.cdap.e2e.pages.actions.CdfBigQueryPropertiesActions; +import io.cdap.e2e.pages.actions.CdfPluginPropertiesActions; import io.cdap.e2e.pages.actions.CdfStudioActions; import io.cdap.e2e.pages.locators.CdfBigQueryPropertiesLocators; import io.cdap.e2e.pages.locators.CdfStudioLocators; @@ -39,6 +40,7 @@ import java.text.SimpleDateFormat; import java.util.Date; import java.util.Optional; +import java.util.Properties; import java.util.UUID; /** @@ -94,18 +96,18 @@ public void getCountOfNoOfRecordsTransferredToTargetBigQueryTable() throws IOExc int countRecords = BigQueryClient.countBqQuery(TestSetupHooks.bqTargetTable); BeforeActions.scenario.write("**********No of Records Transferred******************:" + countRecords); Assert.assertEquals("Number of records transferred should be equal to records out ", - countRecords, recordOut()); + countRecords, recordOut()); } @Then("Validate records transferred to target table is equal to number of records from source table " + - "with filter {string}") + "with filter {string}") public void validateRecordsTransferredToTargetTableIsEqualToNumberOfRecordsFromSourceTableWithFilter(String filter) - throws IOException, InterruptedException { + throws IOException, InterruptedException { String projectId = (PluginPropertyUtils.pluginProp("projectId")); String datasetName = (PluginPropertyUtils.pluginProp("dataset")); int countRecordsTarget = BigQueryClient.countBqQuery(TestSetupHooks.bqTargetTable); String selectQuery = "SELECT count(*) FROM `" + projectId + "." + datasetName + "." + - TestSetupHooks.bqTargetTable + "` WHERE " + PluginPropertyUtils.pluginProp(filter); + TestSetupHooks.bqTargetTable + "` WHERE " + PluginPropertyUtils.pluginProp(filter); Optional result = BigQueryClient.getSoleQueryResult(selectQuery); int count = result.map(Integer::parseInt).orElse(0); BeforeActions.scenario.write("Number of records transferred with respect to filter:" + count); @@ -114,13 +116,13 @@ public void validateRecordsTransferredToTargetTableIsEqualToNumberOfRecordsFromS @Then("Validate partition date in output partitioned table") public void validatePartitionDateInOutputPartitionedTable() - throws IOException, InterruptedException { + throws IOException, InterruptedException { Optional result = BigQueryClient - .getSoleQueryResult("SELECT distinct _PARTITIONDATE as pt FROM `" + - (PluginPropertyUtils.pluginProp("projectId")) + "." + - (PluginPropertyUtils.pluginProp("dataset")) + "." + - TestSetupHooks.bqTargetTable + - "` WHERE _PARTITION_LOAD_TIME IS Not NULL ORDER BY _PARTITIONDATE DESC "); + .getSoleQueryResult("SELECT distinct _PARTITIONDATE as pt FROM `" + + (PluginPropertyUtils.pluginProp("projectId")) + "." + + (PluginPropertyUtils.pluginProp("dataset")) + "." + + TestSetupHooks.bqTargetTable + + "` WHERE _PARTITION_LOAD_TIME IS Not NULL ORDER BY _PARTITIONDATE DESC "); String outputDate = StringUtils.EMPTY; if (result.isPresent()) { outputDate = result.get(); @@ -140,10 +142,10 @@ public void validateTheRecordsAreNotCreatedInOutputTable() throws IOException, I public void validatePartitioningIsNotDoneOnTheOutputTable() { try { BigQueryClient.getSoleQueryResult("SELECT distinct _PARTITIONDATE as pt FROM `" + - (PluginPropertyUtils.pluginProp("projectId")) - + "." + (PluginPropertyUtils.pluginProp("dataset")) + "." + - TestSetupHooks.bqTargetTable - + "` WHERE _PARTITION_LOAD_TIME IS Not NULL "); + (PluginPropertyUtils.pluginProp("projectId")) + + "." + (PluginPropertyUtils.pluginProp("dataset")) + "." + + TestSetupHooks.bqTargetTable + + "` WHERE _PARTITION_LOAD_TIME IS Not NULL "); } catch (Exception e) { String partitionException = e.toString(); Assert.assertTrue(partitionException.contains("Unrecognized name: _PARTITION_LOAD_TIME")); @@ -172,8 +174,8 @@ public void validateTheCmekKeyOfTargetBigQueryTableIfCmekIsEnabled(String cmek) String cmekBQ = PluginPropertyUtils.pluginProp(cmek); if (cmekBQ != null) { Assert.assertTrue("Cmek key of target BigQuery table should be equal to " + - "cmek key provided in config file", - BigQueryClient.verifyCmekKey(TestSetupHooks.bqTargetTable, cmekBQ)); + "cmek key provided in config file", + BigQueryClient.verifyCmekKey(TestSetupHooks.bqTargetTable, cmekBQ)); return; } BeforeActions.scenario.write("CMEK not enabled"); @@ -208,13 +210,13 @@ public void enterRuntimeArgumentValueForBigQueryCmekPropertyKeyIfBQCmekIsEnabled @Then("Verify the partition table is created with partitioned on field {string}") public void verifyThePartitionTableIsCreatedWithPartitionedOnField(String partitioningField) throws IOException, - InterruptedException { + InterruptedException { Optional result = BigQueryClient - .getSoleQueryResult("SELECT IS_PARTITIONING_COLUMN FROM `" + - (PluginPropertyUtils.pluginProp("projectId")) + "." - + (PluginPropertyUtils.pluginProp("dataset")) + ".INFORMATION_SCHEMA.COLUMNS` " + - "WHERE table_name = '" + TestSetupHooks.bqTargetTable - + "' and column_name = '" + PluginPropertyUtils.pluginProp(partitioningField) + "' "); + .getSoleQueryResult("SELECT IS_PARTITIONING_COLUMN FROM `" + + (PluginPropertyUtils.pluginProp("projectId")) + "." + + (PluginPropertyUtils.pluginProp("dataset")) + ".INFORMATION_SCHEMA.COLUMNS` " + + "WHERE table_name = '" + TestSetupHooks.bqTargetTable + + "' and column_name = '" + PluginPropertyUtils.pluginProp(partitioningField) + "' "); String isPartitioningDoneOnField = StringUtils.EMPTY; if (result.isPresent()) { isPartitioningDoneOnField = result.get(); @@ -234,16 +236,16 @@ public void verifyTheBigQueryValidationErrorMessageForInvalidProperty(String pro String expectedErrorMessage; if (property.equalsIgnoreCase("gcsChunkSize")) { expectedErrorMessage = PluginPropertyUtils - .errorProp(E2ETestConstants.ERROR_MSG_BQ_INCORRECT_CHUNKSIZE); + .errorProp(E2ETestConstants.ERROR_MSG_BQ_INCORRECT_CHUNKSIZE); } else if (property.equalsIgnoreCase("bucket")) { expectedErrorMessage = PluginPropertyUtils - .errorProp(E2ETestConstants.ERROR_MSG_BQ_INCORRECT_TEMPORARY_BUCKET); + .errorProp(E2ETestConstants.ERROR_MSG_BQ_INCORRECT_TEMPORARY_BUCKET); } else if (property.equalsIgnoreCase("table")) { expectedErrorMessage = PluginPropertyUtils - .errorProp(E2ETestConstants.ERROR_MSG_INCORRECT_TABLE_NAME); + .errorProp(E2ETestConstants.ERROR_MSG_INCORRECT_TABLE_NAME); } else { expectedErrorMessage = PluginPropertyUtils.errorProp(E2ETestConstants.ERROR_MSG_BQ_INCORRECT_PROPERTY). - replaceAll("PROPERTY", property.substring(0, 1).toUpperCase() + property.substring(1)); + replaceAll("PROPERTY", property.substring(0, 1).toUpperCase() + property.substring(1)); } String actualErrorMessage = PluginPropertyUtils.findPropertyErrorElement(property).getText(); Assert.assertEquals(expectedErrorMessage, actualErrorMessage); @@ -254,134 +256,20 @@ public void verifyTheBigQueryValidationErrorMessageForInvalidProperty(String pro @Then("Validate records transferred to target table is equal to number of records from source table") public void validateRecordsTransferredToTargetTableIsEqualToNumberOfRecordsFromSourceTable() - throws IOException, InterruptedException { + throws IOException, InterruptedException { int countRecordsTarget = BigQueryClient.countBqQuery(TestSetupHooks.bqTargetTable); Optional result = BigQueryClient.getSoleQueryResult("SELECT count(*) FROM `" + - (PluginPropertyUtils.pluginProp("projectId")) - + "." + (PluginPropertyUtils.pluginProp - ("dataset")) + "." + TestSetupHooks.bqTargetTable + "` "); + (PluginPropertyUtils.pluginProp("projectId")) + + "." + (PluginPropertyUtils.pluginProp + ("dataset")) + "." + TestSetupHooks.bqTargetTable + "` "); int count = result.map(Integer::parseInt).orElse(0); BeforeActions.scenario.write("Number of records transferred from source table to target table:" + count); Assert.assertEquals(count, countRecordsTarget); } - @Then("Enter BigQuery source properties partitionFrom and partitionTo") - public void enterBigQuerySourcePropertiespartitionFromandpartitionTo() throws IOException { - CdfBigQueryPropertiesActions.enterPartitionStartDate(new SimpleDateFormat("dd-MM-yyyy").format(new Date())); - CdfBigQueryPropertiesActions.enterPartitionEndDate(new SimpleDateFormat("dd-MM-yyyy") - .format(DateUtils.addDays(new Date(), 1))); - } - @Then("Validate BigQuery source incorrect property error for Partition Start date {string} value {string}") - public void validateBigQuerySourceIncorrectErrorFor(String property, String value) { - CdfBigQueryPropertiesActions.getSchema(); - - - SeleniumHelper.waitElementIsVisible(CdfBigQueryPropertiesLocators.getSchemaButton, 5L); - String tableFullName = StringUtils.EMPTY; - if (property.equalsIgnoreCase("dataset")) { - tableFullName = PluginPropertyUtils.pluginProp("projectId") + ":" + PluginPropertyUtils.pluginProp(value) - + "." + TestSetupHooks.bqSourceTable; - } else if (property.equalsIgnoreCase("table")) { - tableFullName = PluginPropertyUtils.pluginProp("projectId") + ":" - + PluginPropertyUtils.pluginProp("dataset") - + "." + PluginPropertyUtils.pluginProp(value); - } else if (property.equalsIgnoreCase("datasetProject")) { - tableFullName = PluginPropertyUtils.pluginProp(value) + ":" + PluginPropertyUtils.pluginProp("dataset") - + "." + TestSetupHooks.bqSourceTable; - - } else if (property.equalsIgnoreCase("partitionFrom")) { - tableFullName = PluginPropertyUtils.pluginProp("projectId") + ":" - + PluginPropertyUtils.pluginProp("dataset") - + "." + PluginPropertyUtils.pluginProp(value); - } - - String expectedErrorMessage = PluginPropertyUtils.errorProp(E2ETestConstants.ERROR_MSG_INCORRECT_PARTITIONSTARTDATE) - .replaceAll("TABLENAME", tableFullName); - String actualErrorMessage = PluginPropertyUtils.findPropertyErrorElement("partitionFrom").getText(); - System.out.println(actualErrorMessage); - Assert.assertEquals("Error message mismatch for Partition Start Date", expectedErrorMessage, actualErrorMessage); - String actualColor = PluginPropertyUtils.getErrorColor(PluginPropertyUtils.findPropertyErrorElement - ("partitionFrom")); - String expectedColor = ConstantsUtil.ERROR_MSG_COLOR; - Assert.assertEquals(expectedColor, actualColor); - } - - @Then("Validate BigQuery source incorrect property error for Partition End date {string} value {string}") - public void validateBigQuerySourceIncorrectPartitionenddateErrorFor(String property, String value) { - CdfBigQueryPropertiesActions.getSchema(); - SeleniumHelper.waitElementIsVisible(CdfBigQueryPropertiesLocators.getSchemaButton, 5L); - String tableFullName = StringUtils.EMPTY; - if (property.equalsIgnoreCase("dataset")) { - tableFullName = PluginPropertyUtils.pluginProp("projectId") + ":" + PluginPropertyUtils.pluginProp(value) - + "." + TestSetupHooks.bqSourceTable; - } else if (property.equalsIgnoreCase("table")) { - tableFullName = PluginPropertyUtils.pluginProp("projectId") + ":" - + PluginPropertyUtils.pluginProp("dataset") - + "." + PluginPropertyUtils.pluginProp(value); - } else if (property.equalsIgnoreCase("datasetProjectId")) { - tableFullName = PluginPropertyUtils.pluginProp(value) + ":" + PluginPropertyUtils.pluginProp("dataset") - + "." + TestSetupHooks.bqSourceTable; - } else if (property.equalsIgnoreCase("partitionEndDate")) { - tableFullName = PluginPropertyUtils.pluginProp(value) + ":" - + PluginPropertyUtils.pluginProp("partitionTo") - + "." + TestSetupHooks.bqSourceTable; - } - - String expectedErrorMessage = PluginPropertyUtils.errorProp(E2ETestConstants.ERROR_MSG_INCORRECT_PARTITIONENDDATE) - .replaceAll("TABLENAME", tableFullName); - String actualErrorMessage = PluginPropertyUtils.findPropertyErrorElement("partitionTo").getText(); - System.out.println(actualErrorMessage); - Assert.assertEquals("Error message mismatch for Partition End Date", expectedErrorMessage, actualErrorMessage); - String actualColor = PluginPropertyUtils.getErrorColor(PluginPropertyUtils.findPropertyErrorElement("partitionTo")); - String expectedColor = ConstantsUtil.ERROR_MSG_COLOR; - Assert.assertEquals(expectedColor, actualColor); - } - - @Then("Validate BigQuery source incorrect property error for reference name{string} value {string}") - public void validateBigQuerySourceIncorrectPropertyErrorForreferncename(String property, String value) { - CdfBigQueryPropertiesActions.getSchema(); - SeleniumHelper.waitElementIsVisible(CdfBigQueryPropertiesLocators.getSchemaButton, 5L); - String tableFullName = StringUtils.EMPTY; - if (property.equalsIgnoreCase("dataset")) { - tableFullName = PluginPropertyUtils.pluginProp("projectId") + ":" + PluginPropertyUtils.pluginProp(value) - + "." + TestSetupHooks.bqSourceTable; - } else if (property.equalsIgnoreCase("table")) { - tableFullName = PluginPropertyUtils.pluginProp("projectId") + ":" - + PluginPropertyUtils.pluginProp("dataset") - + "." + PluginPropertyUtils.pluginProp(value); - } else if (property.equalsIgnoreCase("datasetProject")) { - tableFullName = PluginPropertyUtils.pluginProp(value) + ":" + PluginPropertyUtils.pluginProp("dataset") - + "." + TestSetupHooks.bqSourceTable; - } else if (property.equalsIgnoreCase("referenceName")) { - tableFullName = PluginPropertyUtils.pluginProp(value) + ":" + PluginPropertyUtils.pluginProp("reference") - + "." + TestSetupHooks.bqSourceTable; - } - String expectedErrorMessage = PluginPropertyUtils.errorProp(E2ETestConstants.ERROR_MSG_INCORRECT_REFERENCENAME) - .replaceAll("TABLENAME", tableFullName); - String actualErrorMessage = PluginPropertyUtils.findPropertyErrorElement("referenceName").getText(); - - Assert.assertEquals(expectedErrorMessage, actualErrorMessage); - String actualColor = PluginPropertyUtils.getErrorColor(PluginPropertyUtils.findPropertyErrorElement - ("referenceName")); - String expectedColor = ConstantsUtil.ERROR_MSG_COLOR; - Assert.assertEquals(expectedColor, actualColor); - - } - @Then("Enter BigQuery source properties filter") public void enterBigQuerysourcePropertiesfilter() throws IOException { CdfBigQueryPropertiesActions.enterFilter("%%%%"); } - @Then("Enter BigQuery source property output schema {string} as macro argument {string}") - public void enterBigQueryPropertyAsMacroArgumentoutputschema(String pluginProperty, String macroArgument) { - SCHEMA_LOCATORS.schemaActions.click(); - SCHEMA_LOCATORS.schemaActionType("macro").click(); - WaitHelper.waitForElementToBeHidden(SCHEMA_LOCATORS.schemaActionType("macro"), 5); - try { - enterMacro(CdfPluginPropertyLocator.fromPropertyString(pluginProperty).pluginProperty, macroArgument); - } catch (NullPointerException e) { - Assert.fail("CDF_PLUGIN_PROPERTY_MAPPING for '" + pluginProperty + "' not present in CdfPluginPropertyLocator."); - } - } } diff --git a/src/e2e-test/java/io/cdap/plugin/utils/CdfPluginPropertyLocator.java b/src/e2e-test/java/io/cdap/plugin/utils/CdfPluginPropertyLocator.java index 8d3199b7d..10a848a9b 100644 --- a/src/e2e-test/java/io/cdap/plugin/utils/CdfPluginPropertyLocator.java +++ b/src/e2e-test/java/io/cdap/plugin/utils/CdfPluginPropertyLocator.java @@ -78,10 +78,10 @@ public enum CdfPluginPropertyLocator { .put("createFailIfObjectExists", CdfPluginPropertyLocator.GCS_CREATE_FAIL_IF_OBJECT_EXISTS) .put("gcsMoveSourcePath", CdfPluginPropertyLocator.GCS_MOVE_SOURCE_PATH) .put("gcsMoveDestinationPath", CdfPluginPropertyLocator.GCS_MOVE_DESTINATION_PATH) - .put("partitionFrom", CdfPluginPropertyLocator.PARTITION_START_DATE) - .put("partitionTo", CdfPluginPropertyLocator.PARTITION_END_DATE) .put("filter", CdfPluginPropertyLocator.FILTER) .put("Output Schema-macro-input", CdfPluginPropertyLocator.OUTPUT_SCHEMA) + .put("partitionFrom", CdfPluginPropertyLocator.PARTITION_START_DATE) + .put("partitionTo", CdfPluginPropertyLocator.PARTITION_END_DATE) .build(); } diff --git a/src/e2e-test/resources/errorMessage.properties b/src/e2e-test/resources/errorMessage.properties index f474d65fe..19050e28b 100644 --- a/src/e2e-test/resources/errorMessage.properties +++ b/src/e2e-test/resources/errorMessage.properties @@ -33,7 +33,7 @@ errorMessageMultipleFileWithoutClearDefaultSchema=Found a row with 4 fields when errorMessageInvalidSourcePath=Invalid bucket name in path 'abc@'. Bucket name should errorMessageInvalidDestPath=Invalid bucket name in path 'abc@'. Bucket name should errorMessageInvalidEncryptionKey=CryptoKeyName.parse: formattedString not in valid format: Parameter "abc@" must be -errorMessageIncorrectPartitionStartDate=16-12-2024 is not in a valid format. Enter valid date in format: yyyy-MM-dd -errorMessageIncorrectPartitionEndDate=17-12-2024 is not in a valid format. Enter valid date in format: yyyy-MM-dd +errorMessageIncorrectPartitionStartDate=17-12-2024 is not in a valid format. Enter valid date in format: yyyy-MM-dd +errorMessageIncorrectPartitionEndDate=18-12-2024 is not in a valid format. Enter valid date in format: yyyy-MM-dd errorMessageIncorrectReferenceName=Invalid reference name 'invalidRef&^*&&*'. Supported characters are: letters, numbers, and '_', '-', '.', or '$'. errorLogsMessageInvalidFilter=Spark Program 'phase-1' failed. diff --git a/src/e2e-test/resources/pluginParameters.properties b/src/e2e-test/resources/pluginParameters.properties index 694e62812..0b3825c1a 100644 --- a/src/e2e-test/resources/pluginParameters.properties +++ b/src/e2e-test/resources/pluginParameters.properties @@ -1,5 +1,6 @@ projectId=cdf-athena -datasetprojectId=cdf-athena +datasetprojectId=testbq_bqmt + //cdf-athena dataset=testbq_bqmt wrongSourcePath=gs://00000000-e2e-0014a44f-81be-4501-8360-0ddca192492 serviceAccountType=filePath @@ -354,15 +355,17 @@ bqTargetTable=dummy bqTargetTable2=dummy bqmtTargetTable=tabA bqmtTargetTable2=tabB -bqStartDate=2024-12-16 -bqEndDate=2024-12-17 -partitionFrom=2024-12-16 -partitionTo=2024-12-17 +bqStartDate=2024-12-17 +bqEndDate=2024-12-18 +partitionFrom=2024-12-17 +partitionTo=2024-12-18 filter=Id=20 -bqIncorrectReferenceName=invalidRef&^*&&* +bqInvalidReferenceName=invalidRef&^*&&* OutputSchema={ "type": "record", "name": "text", "fields": [{ "name": "Id", "type": "long" }, { "name": "Value", "type": "long" }, \ { "name": "UID", "type": "string" } ] } incorrectFilter=%%%% +bqIncorrectFormatStartDate=17-12-2024 +bqIncorrectFormatEndDate=18-12-2024 ## BQMT-PLUGIN-PROPERTIES-END ##CLOUDBIGTABLE-PLUGIN-PROPERTIES-START