diff --git a/.github/workflows/e2e.yml b/.github/workflows/e2e.yml
index 619b83d76f..1db5015306 100644
--- a/.github/workflows/e2e.yml
+++ b/.github/workflows/e2e.yml
@@ -40,7 +40,7 @@ jobs:
)
strategy:
matrix:
- tests: [bigquery, common, gcs, pubsub, spanner, gcscreate, gcsdelete, gcsmove, bigqueryexecute]
+ tests: [bigquery, common, gcs, pubsub, spanner, gcscreate, gcsdelete, gcsmove, bigqueryexecute, gcscopy]
fail-fast: false
steps:
# Pinned 1.0.0 version
diff --git a/pom.xml b/pom.xml
index 92ff32c99c..d9a1db5fae 100644
--- a/pom.xml
+++ b/pom.xml
@@ -1157,6 +1157,7 @@
integration-test
+ verify
diff --git a/src/e2e-test/features/gcscopy/GCSCopy.feature b/src/e2e-test/features/gcscopy/GCSCopy.feature
new file mode 100644
index 0000000000..cc81c7f875
--- /dev/null
+++ b/src/e2e-test/features/gcscopy/GCSCopy.feature
@@ -0,0 +1,151 @@
+# Copyright © 2023 Cask Data, Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may not
+# use this file except in compliance with the License. You may obtain a copy of
+# the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations under
+# the License.
+
+@GCSCopy
+Feature:GCSCopy - Verification of successful objects copy from one bucket to another
+
+ @CMEK @GCS_CSV_TEST @GCS_SINK_TEST
+ Scenario:Validate successful copy object from one bucket to another new bucket along with data validation with default subdirectory and overwrite toggle button as false.
+ Given Open Datafusion Project to configure pipeline
+ When Expand Plugin group in the LHS plugins list: "Conditions and Actions"
+ When Select plugin: "GCS Copy" from the plugins list as: "Conditions and Actions"
+ When Navigate to the properties page of plugin: "GCS Copy"
+ And Replace input plugin property: "project" with value: "projectId"
+ And Enter GCSCopy property source path "gcsCsvFile"
+ And Enter GCSCopy property destination path
+ Then Override Service account details if set in environment variables
+ Then Enter GCSCopy property encryption key name "cmekGCS" if cmek is enabled
+ Then Validate "GCS Copy" plugin properties
+ Then Close the Plugin Properties page
+ Then Save and Deploy Pipeline
+ Then Run the Pipeline in Runtime
+ Then Wait till pipeline is in running state
+ Then Open and capture logs
+ Then Verify the pipeline status is "Succeeded"
+ Then Close the pipeline logs
+ Then Validate GCSCopy successfully copies object "gcsCsvFile" to destination bucket
+ Then Validate the data of GCS Copy source bucket and destination bucket "gcsCopyCsvExpectedFilePath"
+
+ @GCS_READ_RECURSIVE_TEST @GCS_SINK_TEST @GCSCopy_Required
+ Scenario: Validate successful copy objects from one bucket to another with Copy All Subdirectories set to true along with data validation.
+ Given Open Datafusion Project to configure pipeline
+ When Expand Plugin group in the LHS plugins list: "Conditions and Actions"
+ When Select plugin: "GCS Copy" from the plugins list as: "Conditions and Actions"
+ When Navigate to the properties page of plugin: "GCS Copy"
+ And Replace input plugin property: "project" with value: "projectId"
+ And Enter GCSCopy property source path "gcsCopyReadRecursivePath"
+ And Enter GCSCopy property destination path
+ Then Override Service account details if set in environment variables
+ Then Enter GCSCopy property encryption key name "cmekGCS" if cmek is enabled
+ Then Select radio button plugin property: "recursive" with value: "true"
+ Then Validate "GCS Copy" plugin properties
+ Then Close the Plugin Properties page
+ Then Save and Deploy Pipeline
+ Then Run the Pipeline in Runtime
+ Then Wait till pipeline is in running state
+ Then Open and capture logs
+ Then Verify the pipeline status is "Succeeded"
+ Then Close the pipeline logs
+ Then Validate GCSCopy copies subdirectories along with its files to the destination bucket
+
+ @GCS_READ_RECURSIVE_TEST @GCS_SINK_TEST @GCSCopy_Required
+ Scenario: Validate successful copy objects from one bucket to another with Copy All Subdirectories set to false along with data validation.
+ Given Open Datafusion Project to configure pipeline
+ When Expand Plugin group in the LHS plugins list: "Conditions and Actions"
+ When Select plugin: "GCS Copy" from the plugins list as: "Conditions and Actions"
+ When Navigate to the properties page of plugin: "GCS Copy"
+ And Replace input plugin property: "project" with value: "projectId"
+ And Enter GCSCopy property source path "gcsCopyReadRecursivePath"
+ And Enter GCSCopy property destination path
+ Then Override Service account details if set in environment variables
+ Then Enter GCSCopy property encryption key name "cmekGCS" if cmek is enabled
+ Then Select radio button plugin property: "recursive" with value: "false"
+ Then Validate "GCS Copy" plugin properties
+ Then Close the Plugin Properties page
+ Then Save and Deploy Pipeline
+ Then Run the Pipeline in Runtime
+ Then Wait till pipeline is in running state
+ Then Open and capture logs
+ Then Verify the pipeline status is "Succeeded"
+ Then Close the pipeline logs
+ Then Validate GCSCopy did not copy subdirectories along with its files to the destination bucket
+
+ @GCS_CSV_TEST @GCS_SINK_EXISTING_BUCKET_TEST @GCSCopy_Required
+ Scenario: Validate successful copy objects from one bucket to another existing bucket with Overwrite Existing Files set to true along with data validation.
+ Given Open Datafusion Project to configure pipeline
+ When Expand Plugin group in the LHS plugins list: "Conditions and Actions"
+ When Select plugin: "GCS Copy" from the plugins list as: "Conditions and Actions"
+ When Navigate to the properties page of plugin: "GCS Copy"
+ And Replace input plugin property: "project" with value: "projectId"
+ And Enter GCSCopy property source path "gcsCsvFile"
+ Then Enter GCSCopy property destination path "gcsCopyReadRecursivePath"
+ Then Override Service account details if set in environment variables
+ Then Enter GCSCopy property encryption key name "cmekGCS" if cmek is enabled
+ Then Select radio button plugin property: "overwrite" with value: "true"
+ Then Validate "GCS Copy" plugin properties
+ Then Close the Plugin Properties page
+ Then Save and Deploy Pipeline
+ Then Run the Pipeline in Runtime
+ Then Wait till pipeline is in running state
+ Then Open and capture logs
+ Then Verify the pipeline status is "Succeeded"
+ Then Close the pipeline logs
+ Then Validate GCSCopy successfully copies object "gcsCsvFile" to destination bucket
+ Then Validate the data of GCS Copy source bucket and destination bucket "gcsCopyCsvExpectedFilePath"
+
+ @GCS_CSV_TEST @GCS_SINK_EXISTING_BUCKET_TEST
+ Scenario: Validate successful copy objects from one bucket to another existing bucket with Overwrite Existing Files set to false along with data validation.
+ Given Open Datafusion Project to configure pipeline
+ When Expand Plugin group in the LHS plugins list: "Conditions and Actions"
+ When Select plugin: "GCS Copy" from the plugins list as: "Conditions and Actions"
+ When Navigate to the properties page of plugin: "GCS Copy"
+ And Replace input plugin property: "project" with value: "projectId"
+ And Enter GCSCopy property source path "gcsCsvFile"
+ Then Enter GCSCopy property destination path "gcsCopyReadRecursivePath"
+ Then Override Service account details if set in environment variables
+ Then Enter GCSCopy property encryption key name "cmekGCS" if cmek is enabled
+ Then Select radio button plugin property: "overwrite" with value: "false"
+ Then Validate "GCS Copy" plugin properties
+ Then Close the Plugin Properties page
+ Then Save and Deploy Pipeline
+ Then Run the Pipeline in Runtime
+ Then Wait till pipeline is in running state
+ Then Open and capture logs
+ And Verify the pipeline status is "Failed"
+ Then Close the pipeline logs
+ Then Validate GCSCopy failed to copy object "gcsCsvFile" to destination bucket
+ Then Validate the data of GCS Copy source bucket and destination bucket "gcsCopyCsvExpectedFilePath"
+
+ @GCS_CSV_TEST @GCS_SINK_TEST
+ Scenario:Validate successful Copy object from one bucket to another new bucket with location set to non-default value
+ Given Open Datafusion Project to configure pipeline
+ When Expand Plugin group in the LHS plugins list: "Conditions and Actions"
+ When Select plugin: "GCS Copy" from the plugins list as: "Conditions and Actions"
+ When Navigate to the properties page of plugin: "GCS Copy"
+ And Replace input plugin property: "project" with value: "projectId"
+ And Enter GCSCopy property source path "gcsCsvFile"
+ And Enter GCSCopy property destination path
+ Then Override Service account details if set in environment variables
+ Then Replace input plugin property: "location" with value: "locationEU"
+ Then Enter GCSCopy property encryption key name "cmekGCS" if cmek is enabled
+ Then Validate "GCS Copy" plugin properties
+ Then Close the Plugin Properties page
+ Then Save and Deploy Pipeline
+ Then Run the Pipeline in Runtime
+ Then Wait till pipeline is in running state
+ Then Open and capture logs
+ Then Verify the pipeline status is "Succeeded"
+ Then Close the pipeline logs
+ Then Validate GCSCopy successfully copied object "gcsCsvFile" to destination bucket in location "locationEU"
+ Then Validate the data of GCS Copy source bucket and destination bucket "gcsCopyCsvExpectedFilePath"
\ No newline at end of file
diff --git a/src/e2e-test/features/gcscopy/GCSCopyErrorScenarios.feature b/src/e2e-test/features/gcscopy/GCSCopyErrorScenarios.feature
new file mode 100644
index 0000000000..5ce468b81f
--- /dev/null
+++ b/src/e2e-test/features/gcscopy/GCSCopyErrorScenarios.feature
@@ -0,0 +1,60 @@
+# Copyright © 2023 Cask Data, Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may not
+# use this file except in compliance with the License. You may obtain a copy of
+# the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations under
+# the License.
+
+@GCSCopy
+Feature: GCSCopy - Validate GCSCopy plugin error scenarios
+
+ Scenario:Verify GCSCopy plugin properties validation errors for mandatory fields
+ Given Open Datafusion Project to configure pipeline
+ When Expand Plugin group in the LHS plugins list: "Conditions and Actions"
+ When Select plugin: "GCS Copy" from the plugins list as: "Conditions and Actions"
+ When Navigate to the properties page of plugin: "GCS Copy"
+ Then Click on the Validate button
+ Then Verify mandatory property error for below listed properties:
+ | sourcePath |
+ | destPath |
+
+ @GCS_SINK_TEST
+ Scenario:Verify GCSCopy plugin error message for invalid bucket name in Source Path
+ Given Open Datafusion Project to configure pipeline
+ When Expand Plugin group in the LHS plugins list: "Conditions and Actions"
+ When Select plugin: "GCS Copy" from the plugins list as: "Conditions and Actions"
+ When Navigate to the properties page of plugin: "GCS Copy"
+ And Enter input plugin property: "sourcePath" with value: "invalidsourcePath"
+ And Enter input plugin property: "destPath" with value: "gcsTargetBucketName"
+ Then Click on the Validate button
+ Then Verify that the Plugin Property: "sourcePath" is displaying an in-line error message: "errorMessageInvalidSourcePath"
+
+ @GCS_CSV_TEST
+ Scenario:Verify GCSCopy plugin error message for invalid bucket name in Destination Path
+ Given Open Datafusion Project to configure pipeline
+ When Expand Plugin group in the LHS plugins list: "Conditions and Actions"
+ When Select plugin: "GCS Copy" from the plugins list as: "Conditions and Actions"
+ When Navigate to the properties page of plugin: "GCS Copy"
+ And Enter input plugin property: "sourcePath" with value: "gcsCsvFile"
+ And Enter input plugin property: "destPath" with value: "invaliddestPath"
+ Then Click on the Validate button
+ Then Verify that the Plugin Property: "destPath" is displaying an in-line error message: "errorMessageInvalidDestPath"
+
+ @GCS_CSV_TEST @GCS_SINK_TEST
+ Scenario:Verify GCSCopy plugin error message for invalid Encryption Key Name
+ Given Open Datafusion Project to configure pipeline
+ When Expand Plugin group in the LHS plugins list: "Conditions and Actions"
+ When Select plugin: "GCS Copy" from the plugins list as: "Conditions and Actions"
+ When Navigate to the properties page of plugin: "GCS Copy"
+ And Enter input plugin property: "sourcePath" with value: "gcsCsvFile"
+ And Enter input plugin property: "destPath" with value: "gcsTargetBucketName"
+ And Enter input plugin property: "cmekKey" with value: "invalidEncryptionKey"
+ Then Click on the Validate button
+ Then Verify that the Plugin Property: "cmekKey" is displaying an in-line error message: "errorMessageInvalidEncryptionKey"
diff --git a/src/e2e-test/features/gcscopy/GCSCopy_WithMacro.feature b/src/e2e-test/features/gcscopy/GCSCopy_WithMacro.feature
new file mode 100644
index 0000000000..e33faa48c5
--- /dev/null
+++ b/src/e2e-test/features/gcscopy/GCSCopy_WithMacro.feature
@@ -0,0 +1,41 @@
+# Copyright © 2023 Cask Data, Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may not
+# use this file except in compliance with the License. You may obtain a copy of
+# the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations under
+# the License.
+
+@GCSCopy
+Feature:GCSCopy - Verification of successful objects copy from one bucket to another with macro arguments
+
+ @CMEK @GCS_CSV_TEST @GCS_SINK_TEST
+ Scenario:Validate successful copy object from one bucket to another new bucket with macro arguments
+ Given Open Datafusion Project to configure pipeline
+ When Expand Plugin group in the LHS plugins list: "Conditions and Actions"
+ When Select plugin: "GCS Copy" from the plugins list as: "Conditions and Actions"
+ When Navigate to the properties page of plugin: "GCS Copy"
+ Then Click on the Macro button of Property: "project" and set the value to: "projectId"
+ Then Click on the Macro button of Property: "sourcePath" and set the value to: "SourcePath"
+ Then Click on the Macro button of Property: "destPath" and set the value to: "DestPath"
+ Then Override Service account details if set in environment variables
+ Then Enter GCSCopy property encryption key name "cmekGCS" if cmek is enabled
+ Then Validate "GCS Copy" plugin properties
+ Then Close the Plugin Properties page
+ Then Save and Deploy Pipeline
+ Then Run the Pipeline in Runtime
+ Then Enter runtime argument value "projectId" for key "projectId"
+ Then Enter runtime argument value "gcsCsvFile" for GCSCopy property sourcePath key "SourcePath"
+ Then Enter runtime argument value for GCSCopy property destination path key "DestPath"
+ Then Run the Pipeline in Runtime with runtime arguments
+ Then Wait till pipeline is in running state
+ Then Open and capture logs
+ Then Verify the pipeline status is "Succeeded"
+ Then Close the pipeline logs
+ Then Validate GCSCopy successfully copies object "gcsCsvFile" to destination bucket
diff --git a/src/e2e-test/features/gcsmove/GCSMove.feature b/src/e2e-test/features/gcsmove/GCSMove.feature
index 57e2152795..e79e572483 100644
--- a/src/e2e-test/features/gcsmove/GCSMove.feature
+++ b/src/e2e-test/features/gcsmove/GCSMove.feature
@@ -82,7 +82,7 @@ Feature:GCSMove - Verification of successful objects move from one bucket to ano
Then Verify the pipeline status is "Succeeded"
Then Validate GCSMove successfully moved object "gcsCsvFile" to destination bucket
- @GCS_CSV_TEST @GCS_SINK_EXISTING_BUCKET_TEST @PLUGIN-1134
+ @GCS_CSV_TEST @GCS_SINK_EXISTING_BUCKET_TEST
Scenario:Validate successful move objects from one bucket to another existing bucket with Overwrite Existing Files set to false
Given Open Datafusion Project to configure pipeline
When Expand Plugin group in the LHS plugins list: "Conditions and Actions"
@@ -99,7 +99,7 @@ Feature:GCSMove - Verification of successful objects move from one bucket to ano
Then Run the Pipeline in Runtime
Then Wait till pipeline is in running state
Then Open and capture logs
- Then Verify the pipeline status is "failed"
+ And Verify the pipeline status is "Failed"
Then Validate GCSMove failed to move object "gcsCsvFile" to destination bucket
@GCS_CSV_TEST @GCS_SINK_TEST
diff --git a/src/e2e-test/java/io/cdap/plugin/gcs/stepsdesign/package-info.java b/src/e2e-test/java/io/cdap/plugin/gcs/stepsdesign/package-info.java
index e4b6b888e1..a886730d38 100644
--- a/src/e2e-test/java/io/cdap/plugin/gcs/stepsdesign/package-info.java
+++ b/src/e2e-test/java/io/cdap/plugin/gcs/stepsdesign/package-info.java
@@ -1,4 +1,4 @@
/**
- * Package contains the stepDesign for the GCS features.
+ * Package contains the step definitions for the GCS plugin.
*/
package io.cdap.plugin.gcs.stepsdesign;
diff --git a/src/e2e-test/java/io/cdap/plugin/gcscopy/actions/GCSCopyActions.java b/src/e2e-test/java/io/cdap/plugin/gcscopy/actions/GCSCopyActions.java
new file mode 100644
index 0000000000..e216e582c8
--- /dev/null
+++ b/src/e2e-test/java/io/cdap/plugin/gcscopy/actions/GCSCopyActions.java
@@ -0,0 +1,42 @@
+/*
+ * Copyright © 2023 Cask Data, Inc.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not
+ * use this file except in compliance with the License. You may obtain a copy of
+ * the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+
+package io.cdap.plugin.gcscopy.actions;
+
+import io.cdap.e2e.utils.ElementHelper;
+import io.cdap.e2e.utils.SeleniumHelper;
+import io.cdap.plugin.gcscopy.locators.GCSCopyLocators;
+
+/**
+ * GCS Copy plugin related actions.
+ */
+
+public class GCSCopyActions {
+ static {
+ SeleniumHelper.getPropertiesLocators(GCSCopyLocators.class);
+ }
+ public static void enterSourcePath(String sourcePath) {
+ ElementHelper.replaceElementValue(GCSCopyLocators.gcsCopySourcePath, sourcePath);
+ }
+
+ public static void enterDestinationPath(String destinationPath) {
+ ElementHelper.replaceElementValue(GCSCopyLocators.gcsCopyDestinationPath, destinationPath);
+ }
+
+ public static void enterEncryptionKeyName(String cmek) {
+ GCSCopyLocators.gcsCopyEncryptionKey.sendKeys(cmek);
+ }
+}
diff --git a/src/e2e-test/java/io/cdap/plugin/gcscopy/actions/package-info.java b/src/e2e-test/java/io/cdap/plugin/gcscopy/actions/package-info.java
new file mode 100644
index 0000000000..96c6070452
--- /dev/null
+++ b/src/e2e-test/java/io/cdap/plugin/gcscopy/actions/package-info.java
@@ -0,0 +1,4 @@
+/**
+ * Package contains the actions for the GCS Copy plugin.
+ */
+package io.cdap.plugin.gcscopy.actions;
diff --git a/src/e2e-test/java/io/cdap/plugin/gcscopy/locators/GCSCopyLocators.java b/src/e2e-test/java/io/cdap/plugin/gcscopy/locators/GCSCopyLocators.java
new file mode 100644
index 0000000000..79b87cf173
--- /dev/null
+++ b/src/e2e-test/java/io/cdap/plugin/gcscopy/locators/GCSCopyLocators.java
@@ -0,0 +1,37 @@
+/*
+ * Copyright © 2023 Cask Data, Inc.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not
+ * use this file except in compliance with the License. You may obtain a copy of
+ * the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+
+package io.cdap.plugin.gcscopy.locators;
+
+import org.openqa.selenium.WebElement;
+import org.openqa.selenium.support.FindBy;
+import org.openqa.selenium.support.How;
+
+/**
+ * GCS Copy plugin related locators.
+ */
+public class GCSCopyLocators {
+
+ @FindBy(how = How.XPATH, using = "//input[@data-testid='sourcePath']")
+ public static WebElement gcsCopySourcePath;
+
+ @FindBy(how = How.XPATH, using = "//input[@data-testid='destPath']")
+ public static WebElement gcsCopyDestinationPath;
+
+ @FindBy(how = How.XPATH, using = "//input[@data-testid='cmekKey']")
+ public static WebElement gcsCopyEncryptionKey;
+
+}
diff --git a/src/e2e-test/java/io/cdap/plugin/gcscopy/locators/package-info.java b/src/e2e-test/java/io/cdap/plugin/gcscopy/locators/package-info.java
new file mode 100644
index 0000000000..c8cb778988
--- /dev/null
+++ b/src/e2e-test/java/io/cdap/plugin/gcscopy/locators/package-info.java
@@ -0,0 +1,4 @@
+/**
+ * Package contains the locators for the GCS Copy plugin.
+ */
+package io.cdap.plugin.gcscopy.locators;
diff --git a/src/e2e-test/java/io/cdap/plugin/gcscopy/runners/TestRunner.java b/src/e2e-test/java/io/cdap/plugin/gcscopy/runners/TestRunner.java
new file mode 100644
index 0000000000..91ae566df9
--- /dev/null
+++ b/src/e2e-test/java/io/cdap/plugin/gcscopy/runners/TestRunner.java
@@ -0,0 +1,37 @@
+/*
+ * Copyright © 2023 Cask Data, Inc.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not
+ * use this file except in compliance with the License. You may obtain a copy of
+ * the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+
+package io.cdap.plugin.gcscopy.runners;
+
+import io.cucumber.junit.Cucumber;
+import io.cucumber.junit.CucumberOptions;
+import org.junit.runner.RunWith;
+
+/**
+ * Test Runner to execute GCSCopy test cases.
+ */
+ @RunWith(Cucumber.class)
+ @CucumberOptions(
+ features = {"src/e2e-test/features"},
+ glue = {"io.cdap.plugin.gcscopy.stepsdesign", "stepsdesign" , "io.cdap.plugin.common.stepsdesign"},
+ tags = {"@GCSCopy"},
+ monochrome = true,
+ plugin = {"pretty", "html:target/cucumber-html-report/gcscopy-action",
+ "json:target/cucumber-reports/cucumber-gcscopy-action.json",
+ "junit:target/cucumber-reports/cucumber-gcscopy-action.xml"}
+ )
+ public class TestRunner {
+ }
diff --git a/src/e2e-test/java/io/cdap/plugin/gcscopy/runners/TestRunnerRequired.java b/src/e2e-test/java/io/cdap/plugin/gcscopy/runners/TestRunnerRequired.java
new file mode 100644
index 0000000000..d0a44c1f7e
--- /dev/null
+++ b/src/e2e-test/java/io/cdap/plugin/gcscopy/runners/TestRunnerRequired.java
@@ -0,0 +1,37 @@
+/*
+ * Copyright © 2023 Cask Data, Inc.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not
+ * use this file except in compliance with the License. You may obtain a copy of
+ * the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+
+package io.cdap.plugin.gcscopy.runners;
+
+import io.cucumber.junit.Cucumber;
+import io.cucumber.junit.CucumberOptions;
+import org.junit.runner.RunWith;
+
+/**
+ * Test Runner to execute only GCSCopy test cases.
+ */
+@RunWith(Cucumber.class)
+@CucumberOptions(
+ features = {"src/e2e-test/features"},
+ glue = {"io.cdap.plugin.gcscopy.stepsdesign", "stepsdesign", "io.cdap.plugin.common.stepsdesign"},
+ tags = {"@GCSCopy_Required"},
+ monochrome = true,
+ plugin = {"pretty", "html:target/cucumber-html-report/gcscopy-action",
+ "json:target/cucumber-reports/cucumber-gcscopy-action.json",
+ "junit:target/cucumber-reports/cucumber-gcscopy-action.xml"}
+)
+public class TestRunnerRequired {
+}
diff --git a/src/e2e-test/java/io/cdap/plugin/gcscopy/runners/package-info.java b/src/e2e-test/java/io/cdap/plugin/gcscopy/runners/package-info.java
new file mode 100644
index 0000000000..6a95f2a483
--- /dev/null
+++ b/src/e2e-test/java/io/cdap/plugin/gcscopy/runners/package-info.java
@@ -0,0 +1,4 @@
+/**
+ * Package contains the test runner for the GCS Copy plugin.
+ */
+package io.cdap.plugin.gcscopy.runners;
diff --git a/src/e2e-test/java/io/cdap/plugin/gcscopy/stepsdesign/GCSCopy.java b/src/e2e-test/java/io/cdap/plugin/gcscopy/stepsdesign/GCSCopy.java
new file mode 100644
index 0000000000..89f2f57e94
--- /dev/null
+++ b/src/e2e-test/java/io/cdap/plugin/gcscopy/stepsdesign/GCSCopy.java
@@ -0,0 +1,234 @@
+
+/*
+ * Copyright © 2023 Cask Data, Inc.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not
+ * use this file except in compliance with the License. You may obtain a copy of
+ * the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+
+package io.cdap.plugin.gcscopy.stepsdesign;
+
+import com.google.cloud.storage.Blob;
+import com.google.cloud.storage.Bucket;
+import com.google.cloud.storage.Storage;
+import com.google.cloud.storage.StorageException;
+import com.google.cloud.storage.StorageOptions;
+import io.cdap.e2e.pages.locators.CdfStudioLocators;
+import io.cdap.e2e.utils.ConstantsUtil;
+import io.cdap.e2e.utils.ElementHelper;
+import io.cdap.e2e.utils.PluginPropertyUtils;
+import io.cdap.e2e.utils.StorageClient;
+import io.cdap.plugin.common.stepsdesign.TestSetupHooks;
+import io.cdap.plugin.gcscopy.actions.GCSCopyActions;
+import io.cucumber.java.en.Then;
+import org.junit.Assert;
+import stepsdesign.BeforeActions;
+
+import java.io.IOException;
+import java.util.HashSet;
+import java.util.Set;
+
+
+/**
+ * GCS Copy plugin related stepsdesign.
+ */
+
+public class GCSCopy {
+
+ @Then("Validate GCSCopy copies subdirectories along with its files to the destination bucket")
+ public static boolean compareBuckets() throws IOException {
+ // Initialize the GCS client
+ Boolean bucketMatched = false;
+ Storage storage = StorageOptions.newBuilder().setProjectId(
+ PluginPropertyUtils.pluginProp(ConstantsUtil.PROJECT_ID)).build().getService();
+ // Get references to the source and destination buckets
+ String sourceGCSBucket = TestSetupHooks.gcsSourceBucketName;
+ String targetGCSBucket = TestSetupHooks.gcsTargetBucketName;
+ Bucket sourceBucket = storage.get(sourceGCSBucket);
+ Bucket destinationBucket = storage.get(targetGCSBucket);
+ // List objects in the source bucket
+ Set sourceObjectNames = new HashSet<>();
+ for (Blob blob : sourceBucket.list(Storage.BlobListOption.prefix(PluginPropertyUtils.pluginProp
+ ("gcsCopyReadRecursivePath"))).iterateAll()) {
+ sourceObjectNames.add(blob.getName());
+ }
+ // List objects in the destination bucket
+ Set destinationObjectNames = new HashSet<>();
+ for (Blob blob : destinationBucket.list(Storage.BlobListOption.prefix(PluginPropertyUtils.pluginProp
+ ("gcsCopyReadRecursivePath"))).iterateAll()) {
+ destinationObjectNames.add(blob.getName());
+ }
+ try {
+ if (sourceObjectNames.equals(destinationObjectNames)) {
+ BeforeActions.scenario.write("Subdirectory along with its files is copied " + targetGCSBucket +
+ " successfully");
+ return bucketMatched;
+ }
+ Assert.fail("Object not copied to target gcs bucket " + targetGCSBucket);
+ } catch (StorageException e) {
+ if (e.getMessage().equals("The specified bucket does not exist")) {
+ Assert.fail("Target gcs bucket " + targetGCSBucket + " not created - " + e.getMessage());
+ } else {
+ Assert.fail(e.getMessage());
+ }
+ }
+ return false;
+ }
+
+
+ @Then("Validate GCSCopy did not copy subdirectories along with its files to the destination bucket")
+ public static void compareBucketsTarget() throws IOException {
+ // Initialize the GCS client
+ Storage storage = StorageOptions.newBuilder().setProjectId(
+ PluginPropertyUtils.pluginProp(ConstantsUtil.PROJECT_ID)).build().getService();
+ // Get references to the source and destination buckets
+ String sourceGCSBucket = TestSetupHooks.gcsSourceBucketName;
+ String targetGCSBucket = TestSetupHooks.gcsTargetBucketName;
+ Bucket sourceBucket = storage.get(sourceGCSBucket);
+ Bucket destinationBucket = storage.get(targetGCSBucket);
+ // List objects in the source bucket
+ Set sourceObjectNames = new HashSet<>();
+ for (Blob blob : sourceBucket.list(Storage.BlobListOption.prefix(PluginPropertyUtils.pluginProp
+ ("gcsCopyReadRecursivePath"))).iterateAll()) {
+ sourceObjectNames.add(blob.getName());
+ }
+ // List objects in the destination bucket
+ Set destinationObjectNames = new HashSet<>();
+ for (Blob blob : destinationBucket.list(Storage.BlobListOption.prefix(PluginPropertyUtils.pluginProp
+ ("gcsCopyReadRecursivePath"))).iterateAll()) {
+ destinationObjectNames.add(blob.getName());
+ }
+ try {
+ if (destinationObjectNames.isEmpty()) {
+ BeforeActions.scenario.write("Target bucket is empty , no files copied from subdirectory "
+ + targetGCSBucket + " successfully");
+ return;
+ }
+ Assert.fail("Object copied to target gcs bucket along with its files " + targetGCSBucket);
+ } catch (StorageException e) {
+ if (e.getMessage().equals("The specified bucket does not exist")) {
+ Assert.fail("Target gcs bucket " + targetGCSBucket + " not created - " + e.getMessage());
+ } else {
+ Assert.fail(e.getMessage());
+ }
+ }
+ }
+
+ @Then("Validate GCSCopy successfully copies object {string} to destination bucket")
+ public void validateGCSCopySuccessfullyCopiedObjectToDestinationBucket(String path) throws IOException {
+ String sourceGCSBucket = TestSetupHooks.gcsSourceBucketName;
+ String gcsObject = PluginPropertyUtils.pluginProp(path);
+ boolean isPresentAtSource = false;
+ for (Blob blob : StorageClient.listObjects(sourceGCSBucket).iterateAll()) {
+ if (blob.getName().equals(gcsObject)) {
+ isPresentAtSource = true;
+ break;
+ }
+ }
+ if (!isPresentAtSource) {
+
+ Assert.fail("Object is not present in source bucket" + sourceGCSBucket);
+ }
+ BeforeActions.scenario.write("Object is copied from source GCS Bucket " + sourceGCSBucket + " successfully");
+ String targetGCSBucket = TestSetupHooks.gcsTargetBucketName;
+ try {
+ for (Blob blob : StorageClient.listObjects(targetGCSBucket).iterateAll()) {
+ if (blob.getName().equals(gcsObject)) {
+ BeforeActions.scenario.write("Object copied to gcs bucket " + targetGCSBucket + " successfully");
+ return;
+ }
+ }
+ Assert.fail("Object not copied to target gcs bucket " + targetGCSBucket);
+ } catch (StorageException | IOException e) {
+ if (e.getMessage().equals("The specified bucket does not exist")) {
+ Assert.fail("Target gcs bucket " + targetGCSBucket + " not created - " + e.getMessage());
+ } else {
+ Assert.fail(e.getMessage());
+ }
+ }
+ }
+
+ @Then("Enter GCSCopy property encryption key name {string} if cmek is enabled")
+ public void enterGCSCopyPropertyEncryptionKeyNameIfCmekIsEnabled(String cmek) {
+ String cmekGCS = PluginPropertyUtils.pluginProp(cmek);
+ if (cmekGCS != null) {
+ GCSCopyActions.enterEncryptionKeyName(cmekGCS);
+ BeforeActions.scenario.write("Entered encryption key name - " + cmekGCS);
+ }
+ }
+
+ @Then("Enter GCSCopy property source path {string}")
+ public void enterGCSCopyPropertySourcePath(String path) {
+ GCSCopyActions.enterSourcePath("gs://" + TestSetupHooks.gcsSourceBucketName
+ + "/" + PluginPropertyUtils.pluginProp(path));
+ }
+
+ @Then("Enter GCSCopy property destination path")
+ public void enterGCSCopyPropertyDestinationPath() {
+ GCSCopyActions.enterDestinationPath("gs://" + TestSetupHooks.gcsTargetBucketName);
+ }
+
+ @Then("Enter GCSCopy property destination path {string}")
+ public void enterGCSCopyPropertyDestinationPath(String path) {
+ GCSCopyActions.enterDestinationPath("gs://" + TestSetupHooks.gcsTargetBucketName
+ + "/" + PluginPropertyUtils.pluginProp(path));
+ }
+
+ @Then("Validate GCSCopy successfully copied object {string} to destination bucket in location {string}")
+ public void validateGCSCopySuccessfullyCopiedObjectToDestinationBucketInLocation(String path, String location) {
+ String targetGCSBucket = TestSetupHooks.gcsTargetBucketName;
+ try {
+ if (StorageClient.getBucketMetadata(targetGCSBucket).getLocation().equalsIgnoreCase(PluginPropertyUtils.pluginProp
+ (location))) {
+ validateGCSCopySuccessfullyCopiedObjectToDestinationBucket(path);
+ return;
+ }
+ Assert.fail("Target gcs bucket " + targetGCSBucket + " is not created in location " + location);
+ } catch (StorageException | IOException e) {
+ if (e.getMessage().equals("The specified bucket does not exist")) {
+ Assert.fail("Target gcs bucket " + targetGCSBucket + " not created - " + e.getMessage());
+ } else {
+ Assert.fail(e.getMessage());
+ }
+ }
+ }
+
+ @Then("Enter runtime argument value {string} for GCSCopy property sourcePath key {string}")
+ public void enterRuntimeArgumentValueForGCSCopyPropertySourcePathKey(String value, String runtimeArgumentKey) {
+ ElementHelper.sendKeys(CdfStudioLocators.runtimeArgsValue(runtimeArgumentKey),
+ "gs://" + TestSetupHooks.gcsSourceBucketName + "/" + PluginPropertyUtils.pluginProp(value));
+ }
+
+ @Then("Enter runtime argument value for GCSCopy property destination path key {string}")
+ public void enterRuntimeArgumentValueForGCSCopyPropertyDestinationPathKey(String runtimeArgumentKey) {
+ ElementHelper.sendKeys(CdfStudioLocators.runtimeArgsValue(runtimeArgumentKey),
+ "gs://" + TestSetupHooks.gcsTargetBucketName);
+ }
+ @Then("Validate GCSCopy failed to copy object {string} to destination bucket")
+ public void validateGCSCopyFailedToCopyObjectToDestinationBucket(String path) throws IOException {
+ String sourceGCSBucket = TestSetupHooks.gcsSourceBucketName;
+ String gcsObject = PluginPropertyUtils.pluginProp(path);
+ for (Blob blob : StorageClient.listObjects(sourceGCSBucket).iterateAll()) {
+ if (blob.getName().equals(gcsObject)) {
+ BeforeActions.scenario.write("Object is not copied from source bucket" + sourceGCSBucket);
+ return;
+ }
+ }
+ Assert.fail("Object is deleted from source GCS Bucket " + sourceGCSBucket);
+ }
+
+ @Then("Validate the data of GCS Copy source bucket and destination bucket {string}")
+ public void validateTheDataFromGCSSourceToGCSSinkWithExpectedCsvFileAndTargetGCSBucket(String path) {
+ GCSCopyValidation.validateGCSSourceToGCSSinkWithCSVFormat(TestSetupHooks.gcsTargetBucketName,
+ PluginPropertyUtils.pluginProp(path));
+ }
+}
diff --git a/src/e2e-test/java/io/cdap/plugin/gcscopy/stepsdesign/GCSCopyValidation.java b/src/e2e-test/java/io/cdap/plugin/gcscopy/stepsdesign/GCSCopyValidation.java
new file mode 100644
index 0000000000..fce2f0dd0b
--- /dev/null
+++ b/src/e2e-test/java/io/cdap/plugin/gcscopy/stepsdesign/GCSCopyValidation.java
@@ -0,0 +1,99 @@
+package io.cdap.plugin.gcscopy.stepsdesign;
+
+import au.com.bytecode.opencsv.CSVReader;
+import com.google.api.gax.paging.Page;
+import com.google.cloud.storage.Blob;
+import com.google.cloud.storage.Storage;
+import com.google.cloud.storage.StorageOptions;
+import com.google.gson.JsonObject;
+import io.cdap.e2e.utils.PluginPropertyUtils;
+
+import java.io.IOException;
+import java.nio.charset.StandardCharsets;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.stream.Collectors;
+import java.util.stream.Stream;
+import java.util.stream.StreamSupport;
+
+/**
+ * GCSCopy plugin Validation.
+ */
+public class GCSCopyValidation {
+
+ private static final String projectId = PluginPropertyUtils.pluginProp("projectId");
+ public static boolean validateGCSSourceToGCSSinkWithCSVFormat(String bucketName, String filepath) {
+ Map expectedCSVData = readCsvAndConvertToJson(filepath);
+ Map actualGcsCsvData = listBucketObjects(bucketName);
+
+ boolean isMatched = actualGcsCsvData.equals(expectedCSVData);
+
+ return isMatched;
+ }
+
+ public static Map readCsvAndConvertToJson(String filepath) {
+ Map csvDataMap = new HashMap<>();
+ try (CSVReader csvReader = new CSVReader(new java.io.FileReader(filepath))) {
+ // Read the header line to get column names
+ String[] headers = csvReader.readNext();
+ String[] line;
+ while ((line = csvReader.readNext()) != null) {
+ JsonObject jsonObject = new JsonObject();
+
+ for (int j = 0; j < headers.length; j++) {
+ jsonObject.addProperty(headers[j], line[j]);
+ }
+ String id = line[0];
+ csvDataMap.put(id, jsonObject);
+ }
+ } catch (IOException e) {
+ e.printStackTrace();
+ }
+ return csvDataMap;
+ }
+
+ public static Map listBucketObjects(String bucketName) {
+ Map bucketObjectData = new HashMap<>();
+ Storage storage = StorageOptions.newBuilder().setProjectId(projectId).build().getService();
+ Page blobs = storage.list(bucketName);
+
+ List bucketObjects = StreamSupport.stream(blobs.iterateAll().spliterator(), true)
+ .filter(blob -> blob.getSize() != 0)
+ .collect(Collectors.toList());
+
+ Stream objectNamesWithData = bucketObjects.stream().map(blob -> blob.getName());
+ List bucketObjectNames = objectNamesWithData.collect(Collectors.toList());
+
+ if (!bucketObjectNames.isEmpty()) {
+ String objectName = bucketObjectNames.get(0);
+ if (objectName.contains("part-r")) {
+ Map dataMap2 = fetchObjectData(projectId, bucketName, objectName);
+ bucketObjectData.putAll(dataMap2);
+ }
+ }
+
+ return bucketObjectData;
+ }
+
+ public static Map fetchObjectData(String projectId, String bucketName, String objectName) {
+ Map dataMap = new HashMap<>();
+ Storage storage = StorageOptions.newBuilder().setProjectId(projectId).build().getService();
+ byte[] objectData = storage.readAllBytes(bucketName, objectName);
+ String objectDataAsString = new String(objectData, StandardCharsets.UTF_8);
+ String[] lines = objectDataAsString.split("\n");
+ String[] headers = lines[0].split(",");
+
+ for (int i = 1; i < lines.length; i++) {
+ String[] values = lines[i].split(",");
+ JsonObject jsonObject = new JsonObject();
+ for (int j = 0; j < headers.length; j++) {
+ jsonObject.addProperty(headers[j], values[j]);
+ }
+ String id = values[0];
+ dataMap.put(id, jsonObject);
+ }
+ return dataMap;
+ }
+
+}
diff --git a/src/e2e-test/java/io/cdap/plugin/gcscopy/stepsdesign/package-info.java b/src/e2e-test/java/io/cdap/plugin/gcscopy/stepsdesign/package-info.java
new file mode 100644
index 0000000000..8085a97fbf
--- /dev/null
+++ b/src/e2e-test/java/io/cdap/plugin/gcscopy/stepsdesign/package-info.java
@@ -0,0 +1,4 @@
+/**
+ * Package contains the step definitions for the GCS Copy plugin.
+ */
+package io.cdap.plugin.gcscopy.stepsdesign;
diff --git a/src/e2e-test/java/io/cdap/plugin/gcsmove/runners/TestRunner.java b/src/e2e-test/java/io/cdap/plugin/gcsmove/runners/TestRunner.java
index 2088ccebad..0cfc443c58 100644
--- a/src/e2e-test/java/io/cdap/plugin/gcsmove/runners/TestRunner.java
+++ b/src/e2e-test/java/io/cdap/plugin/gcsmove/runners/TestRunner.java
@@ -27,8 +27,7 @@
features = {"src/e2e-test/features"},
glue = {"io.cdap.plugin.gcsmove.stepsdesign", "io.cdap.plugin.gcs.stepsdesign",
"stepsdesign", "io.cdap.plugin.common.stepsdesign"},
- tags = {"@GCSMove and not @PLUGIN-1134"},
- //TODO: Enable test once issue is fixed https://cdap.atlassian.net/browse/PLUGIN-1134
+ tags = {"@GCSMove"},
monochrome = true,
plugin = {"pretty", "html:target/cucumber-html-report/gcsmove-action",
"json:target/cucumber-reports/cucumber-gcsmove-action.json",
diff --git a/src/e2e-test/java/io/cdap/plugin/gcsmove/runners/TestRunnerRequired.java b/src/e2e-test/java/io/cdap/plugin/gcsmove/runners/TestRunnerRequired.java
index 6d3989ac7f..05180a5ce4 100644
--- a/src/e2e-test/java/io/cdap/plugin/gcsmove/runners/TestRunnerRequired.java
+++ b/src/e2e-test/java/io/cdap/plugin/gcsmove/runners/TestRunnerRequired.java
@@ -28,7 +28,6 @@
glue = {"io.cdap.plugin.gcsmove.stepsdesign", "io.cdap.plugin.gcs.stepsdesign",
"stepsdesign", "io.cdap.plugin.common.stepsdesign"},
tags = {"@GCSMove_Required"},
- //TODO: Enable test once issue is fixed https://cdap.atlassian.net/browse/PLUGIN-1134
monochrome = true,
plugin = {"pretty", "html:target/cucumber-html-report/gcsmove-action-required",
"json:target/cucumber-reports/cucumber-gcsmove-action-required.json",
diff --git a/src/e2e-test/resources/errorMessage.properties b/src/e2e-test/resources/errorMessage.properties
index a6908592aa..6bf51faf6d 100644
--- a/src/e2e-test/resources/errorMessage.properties
+++ b/src/e2e-test/resources/errorMessage.properties
@@ -30,4 +30,7 @@ errorMessageInvalidFormat=Input has multi-level structure that cannot be represe
errorMessageMultipleFileWithFirstRowAsHeaderDisabled=Spark program 'phase-1' failed with error: Found a row with 6 fields when the schema only contains 4 fields. Check that the schema contains the right number of fields.. Please check the system logs for more details.
errorMessageMultipleFileWithFirstRowAsHeaderEnabled=Spark program 'phase-1' failed with error: For input string:
errorMessageMultipleFileWithoutClearDefaultSchema=Spark program 'phase-1' failed with error: Found a row with 4 fields when the schema only contains 2 fields.
+errorMessageInvalidSourcePath=Invalid bucket name in path 'abc@'. Bucket name should
+errorMessageInvalidDestPath=Invalid bucket name in path 'abc@'. Bucket name should
+errorMessageInvalidEncryptionKey=CryptoKeyName.parse: formattedString not in valid format: Parameter "abc@" must be
diff --git a/src/e2e-test/resources/pluginParameters.properties b/src/e2e-test/resources/pluginParameters.properties
index eda2da060f..8227a939d4 100644
--- a/src/e2e-test/resources/pluginParameters.properties
+++ b/src/e2e-test/resources/pluginParameters.properties
@@ -265,13 +265,24 @@ spannerSourceBasicSchema=[{"key":"EmployeeDepartment","value":"string"},{"key":"
## GCSMove-PLUGIN-PROPERTIES-START
gcsMoveMandatoryProperties=sourcePath,destPath
gcsMoveValidGcsPath=gs://valid-bucket-format
-gcsMoveReadRecursivePath=testdata
+gcsMoveReadRecursivePath=testdata/
gcsMoveReadRecursiveSubDirectory=testdata/GCS_RECURSIVE_TEST
locationEU=eu
gcsDoneSuccessFile=__SUCCESS
gcsDoneFailedFile=__FAILED
## GCSMove-PLUGIN-PROPERTIES-END
+## GCSCopy-PLUGIN-PROPERTIES-START
+gcsCopyMandatoryProperties=sourcePath,destPath
+invalidsourcePath=abc@
+invaliddestPath=abc@
+invalidEncryptionKey=abc@
+gcsCopyReadRecursivePath=testdata/
+gcsCopyReadRecursiveSubDirectory=testdata/GCS_RECURSIVE_TEST
+gcsCopyCsvExpectedFilePath=src/e2e-test/resources/testdata/GCSCOPY_CSV_TEST.csv
+gcsCopyRecursivePath=src/e2e-test/resources/testdata/GCS_RECURSIVE_TEST
+## GCSCopy-PLUGIN-PROPERTIES-END
+
## BQEXECUTE-PLUGIN-PROPERTIES-START
bqExecuteQuery=SELECT * FROM `PROJECT_NAME.DATASET.TABLENAME`
bqExecuteRowAsArgQuery=SELECT DISTINCT ProjectId as bqExecuteArgProjectID, Dataset as bqExecuteArgDataset FROM `PROJECT_NAME.DATASET.TABLENAME` limit 1
diff --git a/src/e2e-test/resources/testdata/GCSCOPY_CSV_TEST.csv b/src/e2e-test/resources/testdata/GCSCOPY_CSV_TEST.csv
new file mode 100644
index 0000000000..24a37bf289
--- /dev/null
+++ b/src/e2e-test/resources/testdata/GCSCOPY_CSV_TEST.csv
@@ -0,0 +1,11 @@
+id,EmployeeDepartment,Employeename,Salary,wotkhours
+1,Plumber,Surya,10000,3
+2,Plumber,Sahil,70000,5
+3,Plumber,Waugh,10000,6
+4,Plumber,Steve,10000,7
+5,Plumber,Smith,20000,8
+6,Electrician,Ragini,30000,2
+7,Electrician,Williamson,10000,9
+8,Electrician,James,80000,10
+9,Electrician,Steve Smith,10000,23
+10,Electrician,Mark Waugh,10000,32
\ No newline at end of file