Skip to content

Commit

Permalink
gcscopy e2e tests
Browse files Browse the repository at this point in the history
  • Loading branch information
priyabhatnagar25 committed Sep 11, 2023
1 parent dc04dc6 commit fef2089
Show file tree
Hide file tree
Showing 22 changed files with 720 additions and 8 deletions.
2 changes: 1 addition & 1 deletion .github/workflows/e2e.yml
Original file line number Diff line number Diff line change
Expand Up @@ -40,7 +40,7 @@ jobs:
)
strategy:
matrix:
tests: [bigquery, common, gcs, pubsub, spanner, gcscreate, gcsdelete, gcsmove, bigqueryexecute]
tests: [bigquery, common, gcs, pubsub, spanner, gcscreate, gcsdelete, gcsmove, bigqueryexecute, gcscopy]
fail-fast: false
steps:
# Pinned 1.0.0 version
Expand Down
1 change: 1 addition & 0 deletions pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -1157,6 +1157,7 @@
<execution>
<goals>
<goal>integration-test</goal>
<goal>verify</goal>
</goals>
</execution>
</executions>
Expand Down
153 changes: 153 additions & 0 deletions src/e2e-test/features/gcscopy/GCSCopy.feature
Original file line number Diff line number Diff line change
@@ -0,0 +1,153 @@
# Copyright © 2023 Cask Data, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may not
# use this file except in compliance with the License. You may obtain a copy of
# the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations under
# the License.

@GCSCopy
Feature:GCSCopy - Verification of successful objects copy from one bucket to another

@CMEK @GCS_CSV_TEST @GCS_SINK_TEST
Scenario:Validate successful copy object from one bucket to another new bucket along with data validation.
Given Open Datafusion Project to configure pipeline
When Expand Plugin group in the LHS plugins list: "Conditions and Actions"
When Select plugin: "GCS Copy" from the plugins list as: "Conditions and Actions"
When Navigate to the properties page of plugin: "GCS Copy"
And Replace input plugin property: "project" with value: "projectId"
And Enter GCSCopy property source path "gcsCsvFile"
And Enter GCSCopy property destination path
Then Override Service account details if set in environment variables
Then Enter GCSCopy property encryption key name "cmekGCS" if cmek is enabled
Then Validate "GCS Copy" plugin properties
Then Close the Plugin Properties page
Then Save and Deploy Pipeline
Then Run the Pipeline in Runtime
Then Wait till pipeline is in running state
Then Open and capture logs
Then Verify the pipeline status is "Succeeded"
Then Close the pipeline logs
Then Validate GCSCopy successfully copies object "gcsCsvFile" to destination bucket
Then Validate the data of GCS Copy source bucket and destination bucket "gcsCopyCsvExpectedFilePath"

@GCS_READ_RECURSIVE_TEST @GCS_SINK_TEST @GCSCopy_Required
Scenario: Validate successful copy objects from one bucket to another with Copy All Subdirectories set to true along with data validation.
Given Open Datafusion Project to configure pipeline
When Expand Plugin group in the LHS plugins list: "Conditions and Actions"
When Select plugin: "GCS Copy" from the plugins list as: "Conditions and Actions"
When Navigate to the properties page of plugin: "GCS Copy"
And Replace input plugin property: "project" with value: "projectId"
And Enter GCSCopy property source path "gcsCopyReadRecursivePath"
And Enter GCSCopy property destination path
Then Override Service account details if set in environment variables
Then Enter GCSCopy property encryption key name "cmekGCS" if cmek is enabled
Then Select radio button plugin property: "recursive" with value: "true"
Then Validate "GCS Copy" plugin properties
Then Close the Plugin Properties page
Then Save and Deploy Pipeline
Then Run the Pipeline in Runtime
Then Wait till pipeline is in running state
Then Open and capture logs
Then Verify the pipeline status is "Succeeded"
Then Close the pipeline logs
Then Validate GCSCopy successfully copies object "gcsCopyReadRecursivePath" to destination bucket
Then Validate the data of GCS Copy source bucket and destination bucket "gcsCopyRecursivePath"

@GCS_READ_RECURSIVE_TEST @GCS_SINK_TEST @GCSCopy_Required
Scenario: Validate successful copy objects from one bucket to another with Copy All Subdirectories set to false along with data validation.
Given Open Datafusion Project to configure pipeline
When Expand Plugin group in the LHS plugins list: "Conditions and Actions"
When Select plugin: "GCS Copy" from the plugins list as: "Conditions and Actions"
When Navigate to the properties page of plugin: "GCS Copy"
And Replace input plugin property: "project" with value: "projectId"
And Enter GCSCopy property source path "gcsCopyReadRecursivePath"
And Enter GCSCopy property destination path
Then Override Service account details if set in environment variables
Then Enter GCSCopy property encryption key name "cmekGCS" if cmek is enabled
Then Select radio button plugin property: "recursive" with value: "false"
Then Validate "GCS Copy" plugin properties
Then Close the Plugin Properties page
Then Save and Deploy Pipeline
Then Run the Pipeline in Runtime
Then Wait till pipeline is in running state
Then Open and capture logs
Then Verify the pipeline status is "Succeeded"
Then Close the pipeline logs
Then Validate GCSCopy did not copy subdirectory "gcsCopyReadRecursiveSubDirectory" to destination bucket
Then Validate the data of GCS Copy source bucket and destination bucket "gcsCopyRecursivePath"

@GCS_CSV_TEST @GCS_SINK_EXISTING_BUCKET_TEST @GCSCopy_Required
Scenario: Validate successful copy objects from one bucket to another existing bucket with Overwrite Existing Files set to true along with data validation.
Given Open Datafusion Project to configure pipeline
When Expand Plugin group in the LHS plugins list: "Conditions and Actions"
When Select plugin: "GCS Copy" from the plugins list as: "Conditions and Actions"
When Navigate to the properties page of plugin: "GCS Copy"
And Replace input plugin property: "project" with value: "projectId"
And Enter GCSCopy property source path "gcsCsvFile"
Then Enter GCSCopy property destination path "gcsCopyReadRecursivePath"
Then Override Service account details if set in environment variables
Then Enter GCSCopy property encryption key name "cmekGCS" if cmek is enabled
Then Select radio button plugin property: "overwrite" with value: "true"
Then Validate "GCS Copy" plugin properties
Then Close the Plugin Properties page
Then Save and Deploy Pipeline
Then Run the Pipeline in Runtime
Then Wait till pipeline is in running state
Then Open and capture logs
Then Verify the pipeline status is "Succeeded"
Then Close the pipeline logs
Then Validate GCSCopy successfully copies object "gcsCsvFile" to destination bucket
Then Validate the data of GCS Copy source bucket and destination bucket "gcsCopyCsvExpectedFilePath"

@GCS_CSV_TEST @GCS_SINK_EXISTING_BUCKET_TEST
Scenario: Validate successful copy objects from one bucket to another existing bucket with Overwrite Existing Files set to false along with data validation.
Given Open Datafusion Project to configure pipeline
When Expand Plugin group in the LHS plugins list: "Conditions and Actions"
When Select plugin: "GCS Copy" from the plugins list as: "Conditions and Actions"
When Navigate to the properties page of plugin: "GCS Copy"
And Replace input plugin property: "project" with value: "projectId"
And Enter GCSCopy property source path "gcsCsvFile"
Then Enter GCSCopy property destination path "gcsCopyReadRecursivePath"
Then Override Service account details if set in environment variables
Then Enter GCSCopy property encryption key name "cmekGCS" if cmek is enabled
Then Select radio button plugin property: "overwrite" with value: "false"
Then Validate "GCS Copy" plugin properties
Then Close the Plugin Properties page
Then Save and Deploy Pipeline
Then Run the Pipeline in Runtime
Then Wait till pipeline is in running state
Then Open and capture logs
And Verify the pipeline status is "Failed"
Then Close the pipeline logs
Then Validate GCSCopy failed to copy object "gcsCsvFile" to destination bucket
Then Validate the data of GCS Copy source bucket and destination bucket "gcsCopyCsvExpectedFilePath"

@GCS_CSV_TEST @GCS_SINK_TEST
Scenario:Validate successful Copy object from one bucket to another new bucket with location set to non-default value
Given Open Datafusion Project to configure pipeline
When Expand Plugin group in the LHS plugins list: "Conditions and Actions"
When Select plugin: "GCS Copy" from the plugins list as: "Conditions and Actions"
When Navigate to the properties page of plugin: "GCS Copy"
And Replace input plugin property: "project" with value: "projectId"
And Enter GCSCopy property source path "gcsCsvFile"
And Enter GCSCopy property destination path
Then Override Service account details if set in environment variables
Then Replace input plugin property: "location" with value: "locationEU"
Then Enter GCSCopy property encryption key name "cmekGCS" if cmek is enabled
Then Validate "GCS Copy" plugin properties
Then Close the Plugin Properties page
Then Save and Deploy Pipeline
Then Run the Pipeline in Runtime
Then Wait till pipeline is in running state
Then Open and capture logs
Then Verify the pipeline status is "Succeeded"
Then Close the pipeline logs
Then Validate GCSCopy successfully copied object "gcsCsvFile" to destination bucket in location "locationEU"
Then Validate the data of GCS Copy source bucket and destination bucket "gcsCopyCsvExpectedFilePath"
60 changes: 60 additions & 0 deletions src/e2e-test/features/gcscopy/GCSCopyErrorScenarios.feature
Original file line number Diff line number Diff line change
@@ -0,0 +1,60 @@
# Copyright © 2023 Cask Data, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may not
# use this file except in compliance with the License. You may obtain a copy of
# the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations under
# the License.

@GCSCopy
Feature: GCSCopy - Validate GCSCopy plugin error scenarios

Scenario:Verify GCSCopy plugin properties validation errors for mandatory fields
Given Open Datafusion Project to configure pipeline
When Expand Plugin group in the LHS plugins list: "Conditions and Actions"
When Select plugin: "GCS Copy" from the plugins list as: "Conditions and Actions"
When Navigate to the properties page of plugin: "GCS Copy"
Then Click on the Validate button
Then Verify mandatory property error for below listed properties:
| sourcePath |
| destPath |

@GCS_SINK_TEST
Scenario:Verify GCSCopy plugin error message for invalid bucket name in Source Path
Given Open Datafusion Project to configure pipeline
When Expand Plugin group in the LHS plugins list: "Conditions and Actions"
When Select plugin: "GCS Copy" from the plugins list as: "Conditions and Actions"
When Navigate to the properties page of plugin: "GCS Copy"
And Enter input plugin property: "sourcePath" with value: "invalidsourcePath"
And Enter input plugin property: "destPath" with value: "gcsTargetBucketName"
Then Click on the Validate button
Then Verify that the Plugin Property: "sourcePath" is displaying an in-line error message: "errorMessageInvalidSourcePath"

@GCS_CSV_TEST
Scenario:Verify GCSCopy plugin error message for invalid bucket name in Destination Path
Given Open Datafusion Project to configure pipeline
When Expand Plugin group in the LHS plugins list: "Conditions and Actions"
When Select plugin: "GCS Copy" from the plugins list as: "Conditions and Actions"
When Navigate to the properties page of plugin: "GCS Copy"
And Enter input plugin property: "sourcePath" with value: "gcsCsvFile"
And Enter input plugin property: "destPath" with value: "invaliddestPath"
Then Click on the Validate button
Then Verify that the Plugin Property: "destPath" is displaying an in-line error message: "errorMessageInvalidDestPath"

@GCS_CSV_TEST @GCS_SINK_TEST
Scenario:Verify GCSCopy plugin error message for invalid Encryption Key Name
Given Open Datafusion Project to configure pipeline
When Expand Plugin group in the LHS plugins list: "Conditions and Actions"
When Select plugin: "GCS Copy" from the plugins list as: "Conditions and Actions"
When Navigate to the properties page of plugin: "GCS Copy"
And Enter input plugin property: "sourcePath" with value: "gcsCsvFile"
And Enter input plugin property: "destPath" with value: "gcsTargetBucketName"
And Enter input plugin property: "cmekKey" with value: "invalidEncryptionKey"
Then Click on the Validate button
Then Verify that the Plugin Property: "cmekKey" is displaying an in-line error message: "errorMessageInvalidEncryptionKey"
41 changes: 41 additions & 0 deletions src/e2e-test/features/gcscopy/GCSCopy_WithMacro.feature
Original file line number Diff line number Diff line change
@@ -0,0 +1,41 @@
# Copyright © 2023 Cask Data, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may not
# use this file except in compliance with the License. You may obtain a copy of
# the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations under
# the License.

@GCSCopy
Feature:GCSCopy - Verification of successful objects copy from one bucket to another with macro arguments

@CMEK @GCS_CSV_TEST @GCS_SINK_TEST
Scenario:Validate successful copy object from one bucket to another new bucket with macro arguments
Given Open Datafusion Project to configure pipeline
When Expand Plugin group in the LHS plugins list: "Conditions and Actions"
When Select plugin: "GCS Copy" from the plugins list as: "Conditions and Actions"
When Navigate to the properties page of plugin: "GCS Copy"
Then Click on the Macro button of Property: "project" and set the value to: "projectId"
Then Click on the Macro button of Property: "sourcePath" and set the value to: "SourcePath"
Then Click on the Macro button of Property: "destPath" and set the value to: "DestPath"
Then Override Service account details if set in environment variables
Then Enter GCSCopy property encryption key name "cmekGCS" if cmek is enabled
Then Validate "GCS Copy" plugin properties
Then Close the Plugin Properties page
Then Save and Deploy Pipeline
Then Run the Pipeline in Runtime
Then Enter runtime argument value "projectId" for key "projectId"
Then Enter runtime argument value "gcsCsvFile" for GCSCopy property sourcePath key "SourcePath"
Then Enter runtime argument value for GCSCopy property destination path key "DestPath"
Then Run the Pipeline in Runtime with runtime arguments
Then Wait till pipeline is in running state
Then Open and capture logs
Then Verify the pipeline status is "Succeeded"
Then Close the pipeline logs
Then Validate GCSCopy successfully copies object "gcsCsvFile" to destination bucket
4 changes: 2 additions & 2 deletions src/e2e-test/features/gcsmove/GCSMove.feature
Original file line number Diff line number Diff line change
Expand Up @@ -82,7 +82,7 @@ Feature:GCSMove - Verification of successful objects move from one bucket to ano
Then Verify the pipeline status is "Succeeded"
Then Validate GCSMove successfully moved object "gcsCsvFile" to destination bucket

@GCS_CSV_TEST @GCS_SINK_EXISTING_BUCKET_TEST @PLUGIN-1134
@GCS_CSV_TEST @GCS_SINK_EXISTING_BUCKET_TEST
Scenario:Validate successful move objects from one bucket to another existing bucket with Overwrite Existing Files set to false
Given Open Datafusion Project to configure pipeline
When Expand Plugin group in the LHS plugins list: "Conditions and Actions"
Expand All @@ -99,7 +99,7 @@ Feature:GCSMove - Verification of successful objects move from one bucket to ano
Then Run the Pipeline in Runtime
Then Wait till pipeline is in running state
Then Open and capture logs
Then Verify the pipeline status is "failed"
And Verify the pipeline status is "Failed"
Then Validate GCSMove failed to move object "gcsCsvFile" to destination bucket

@GCS_CSV_TEST @GCS_SINK_TEST
Expand Down
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
/**
* Package contains the stepDesign for the GCS features.
* Package contains the step definitions for the GCS plugin.
*/
package io.cdap.plugin.gcs.stepsdesign;
Original file line number Diff line number Diff line change
@@ -0,0 +1,42 @@
/*
* Copyright © 2023 Cask Data, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/

package io.cdap.plugin.gcscopy.actions;

import io.cdap.e2e.utils.ElementHelper;
import io.cdap.e2e.utils.SeleniumHelper;
import io.cdap.plugin.gcscopy.locators.GCSCopyLocators;

/**
* GCS Copy plugin related actions.
*/

public class GCSCopyActions {
static {
SeleniumHelper.getPropertiesLocators(GCSCopyLocators.class);
}
public static void enterSourcePath(String sourcePath) {
ElementHelper.replaceElementValue(GCSCopyLocators.gcsCopySourcePath, sourcePath);
}

public static void enterDestinationPath(String destinationPath) {
ElementHelper.replaceElementValue(GCSCopyLocators.gcsCopyDestinationPath, destinationPath);
}

public static void enterEncryptionKeyName(String cmek) {
GCSCopyLocators.gcsCopyEncryptionKey.sendKeys(cmek);
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,4 @@
/**
* Package contains the actions for the GCS Copy plugin.
*/
package io.cdap.plugin.gcscopy.actions;
Original file line number Diff line number Diff line change
@@ -0,0 +1,37 @@
/*
* Copyright © 2023 Cask Data, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/

package io.cdap.plugin.gcscopy.locators;

import org.openqa.selenium.WebElement;
import org.openqa.selenium.support.FindBy;
import org.openqa.selenium.support.How;

/**
* GCS Copy plugin related locators.
*/
public class GCSCopyLocators {

@FindBy(how = How.XPATH, using = "//input[@data-testid='sourcePath']")
public static WebElement gcsCopySourcePath;

@FindBy(how = How.XPATH, using = "//input[@data-testid='destPath']")
public static WebElement gcsCopyDestinationPath;

@FindBy(how = How.XPATH, using = "//input[@data-testid='cmekKey']")
public static WebElement gcsCopyEncryptionKey;

}
Original file line number Diff line number Diff line change
@@ -0,0 +1,4 @@
/**
* Package contains the locators for the GCS Copy plugin.
*/
package io.cdap.plugin.gcscopy.locators;
Loading

0 comments on commit fef2089

Please sign in to comment.