Skip to content

Commit

Permalink
E2E cloud datastore
Browse files Browse the repository at this point in the history
  • Loading branch information
AnkitCLI committed Mar 7, 2024
1 parent 32e7f1b commit 85efbc2
Show file tree
Hide file tree
Showing 8 changed files with 355 additions and 0 deletions.
145 changes: 145 additions & 0 deletions src/e2e-test/features/datastore/runtime.feature
Original file line number Diff line number Diff line change
@@ -0,0 +1,145 @@
# Copyright © 2024 Cask Data, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may not
# use this file except in compliance with the License. You may obtain a copy of
# the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations under
# the License.

@DataStore
Feature: DataStore - Verification of Datastore to Datastore successful data transfer

@DATASTORE_SOURCE_ENTITY @DATASTORE_TARGET_ENTITY
Scenario: To verify data is getting transferred from Datastore to Datastore successfully using filter and custom index
Given Open Datafusion Project to configure pipeline
Then Select plugin: "Datastore" from the plugins list as: "Source"
And Navigate to the properties page of plugin: "Datastore"
Then Replace input plugin property: "project" with value: "projectId"
Then Enter input plugin property: "referenceName" with value: "ReferenceName"
Then Enter key value pairs for plugin property: "filters" with values from json: "filterOptions"
Then Enter kind for datastore plugin
Then Select dropdown plugin property: "keyType" with option value: "None"
Then Click on the Get Schema button
Then Validate "Datastore" plugin properties
Then Close the Plugin Properties page
And Select Sink plugin: "Datastore" from the plugins list
Then Connect plugins: "Datastore" and "Datastore2" to establish connection
Then Navigate to the properties page of plugin: "Datastore2"
Then Replace input plugin property: "project" with value: "projectId"
Then Enter input plugin property: "referenceName" with value: "refName"
Then Enter input plugin property: "kind" with value: "targetKind"
Then Enter Ancestor for the datastore plugin
Then Select dropdown plugin property: "indexStrategy" with option value: "Custom"
Then Enter Value for plugin property table key : "indexedProperties" with values: "fieldName"
Then Validate "datastore2" plugin properties
Then Close the Plugin Properties page
Then Save and Deploy Pipeline
Then Run the Pipeline in Runtime
Then Wait till pipeline is in running state
Then Validate OUT record count is equal to IN record count
Then Open and capture logs
Then Verify the pipeline status is "Succeeded"
Then Close the pipeline logs

@DATASTORE_SOURCE_ENTITY
Scenario: To verify data is getting transferred from Datastore to Datastore using Urlsafekey
Given Open Datafusion Project to configure pipeline
Then Select plugin: "Datastore" from the plugins list as: "Source"
And Navigate to the properties page of plugin: "Datastore"
Then Replace input plugin property: "project" with value: "projectId"
Then Enter input plugin property: "referenceName" with value: "ReferenceName"
Then Enter key value pairs for plugin property: "filters" with values from json: "filterOptions"
Then Enter kind for datastore plugin
Then Select dropdown plugin property: "keyType" with option value: "URL-safe key"
Then Enter input plugin property: "keyAlias" with value: "fieldName"
Then Click on the Get Schema button
Then Validate "Datastore" plugin properties
Then Close the Plugin Properties page
And Select Sink plugin: "Datastore" from the plugins list
Then Connect plugins: "Datastore" and "Datastore2" to establish connection
Then Navigate to the properties page of plugin: "Datastore2"
Then Replace input plugin property: "project" with value: "projectId"
Then Enter input plugin property: "referenceName" with value: "refName"
Then Select dropdown plugin property: "keyType" with option value: "URL-safe key"
Then Enter input plugin property: "keyAlias" with value: "fieldName"
Then Enter kind for datastore plugin
Then Validate "datastore2" plugin properties
Then Close the Plugin Properties page
Then Save and Deploy Pipeline
Then Run the Pipeline in Runtime
Then Wait till pipeline is in running state
Then Validate OUT record count is equal to IN record count
Then Open and capture logs
Then Verify the pipeline status is "Succeeded"
Then Close the pipeline logs

@DATASTORE_SOURCE_ENTITY
Scenario: To verify data is getting transferred from Datastore to Datastore using Ancestor and Key Literal
Given Open Datafusion Project to configure pipeline
Then Select plugin: "Datastore" from the plugins list as: "Source"
And Navigate to the properties page of plugin: "Datastore"
Then Replace input plugin property: "project" with value: "projectId"
Then Enter input plugin property: "referenceName" with value: "ReferenceName"
Then Enter kind for datastore plugin
Then Enter Ancestor for the datastore plugin
Then Select dropdown plugin property: "keyType" with option value: "Key literal"
Then Enter input plugin property: "keyAlias" with value: "fieldName"
Then Click on the Get Schema button
Then Validate "Datastore" plugin properties
Then Close the Plugin Properties page
And Select Sink plugin: "Datastore" from the plugins list
Then Connect plugins: "Datastore" and "Datastore2" to establish connection
Then Navigate to the properties page of plugin: "Datastore2"
Then Replace input plugin property: "project" with value: "projectId"
Then Enter input plugin property: "referenceName" with value: "refName"
Then Select dropdown plugin property: "keyType" with option value: "Key literal"
Then Enter input plugin property: "keyAlias" with value: "fieldName"
Then Enter kind for datastore plugin
Then Enter Ancestor for the datastore plugin
Then Validate "datastore2" plugin properties
Then Close the Plugin Properties page
Then Save and Deploy Pipeline
Then Run the Pipeline in Runtime
Then Wait till pipeline is in running state
Then Validate OUT record count is equal to IN record count
Then Open and capture logs
Then Verify the pipeline status is "Succeeded"
Then Close the pipeline logs

@DATASTORE_SOURCE_ENTITY
Scenario: To verify data is getting transferred from Datastore to Datastore using Ancestor and Custom Key
Given Open Datafusion Project to configure pipeline
Then Select plugin: "Datastore" from the plugins list as: "Source"
And Navigate to the properties page of plugin: "Datastore"
Then Replace input plugin property: "project" with value: "projectId"
Then Enter input plugin property: "referenceName" with value: "ReferenceName"
Then Enter kind for datastore plugin
Then Enter Ancestor for the datastore plugin
Then Select dropdown plugin property: "keyType" with option value: "Key literal"
Then Enter input plugin property: "keyAlias" with value: "fieldName"
Then Click on the Get Schema button
Then Validate "Datastore" plugin properties
Then Close the Plugin Properties page
And Select Sink plugin: "Datastore" from the plugins list
Then Connect plugins: "Datastore" and "Datastore2" to establish connection
Then Navigate to the properties page of plugin: "Datastore2"
Then Replace input plugin property: "project" with value: "projectId"
Then Enter input plugin property: "referenceName" with value: "refName"
Then Select dropdown plugin property: "keyType" with option value: "Custom name"
Then Enter input plugin property: "keyAlias" with value: "fieldName"
Then Enter kind for datastore plugin
Then Validate "datastore2" plugin properties
Then Close the Plugin Properties page
Then Save and Deploy Pipeline
Then Run the Pipeline in Runtime
Then Wait till pipeline is in running state
Then Validate OUT record count is equal to IN record count
Then Open and capture logs
Then Verify the pipeline status is "Succeeded"
Then Close the pipeline logs
Original file line number Diff line number Diff line change
Expand Up @@ -23,6 +23,7 @@
import io.cdap.e2e.utils.BigQueryClient;
import io.cdap.e2e.utils.PluginPropertyUtils;
import io.cdap.e2e.utils.StorageClient;
import io.cdap.plugin.utils.DataStoreClient;
import io.cdap.plugin.utils.PubSubClient;
import io.cdap.plugin.utils.SpannerClient;
import io.cucumber.java.After;
Expand Down Expand Up @@ -69,6 +70,8 @@ public class TestSetupHooks {
public static String spannerTargetTable = StringUtils.EMPTY;
public static boolean firstSpannerTestFlag = true;
public static String datasetName = PluginPropertyUtils.pluginProp("dataset");
public static String kindName = StringUtils.EMPTY;
public static String targetKind = StringUtils.EMPTY;

@Before(order = 1)
public static void overrideServiceAccountFilePathIfProvided() {
Expand Down Expand Up @@ -1297,6 +1300,33 @@ public static void createBucketWithLifeCycle() throws IOException, URISyntaxExce
gcsTargetBucketName = createGCSBucketLifeCycle();
BeforeActions.scenario.write("GCS target bucket name - " + gcsTargetBucketName); }

@Before(order=1, value = "@DATASTORE_SOURCE_ENTITY")
public static void createEntityInCloudDataStore() throws IOException, URISyntaxException {
kindName= "cdf-test-"+ UUID.randomUUID().toString().substring(0, 8);
String EntityName = DataStoreClient.createKind(kindName);
PluginPropertyUtils.addPluginProp(" kindName", EntityName);
BeforeActions.scenario.write("Kind name - " + EntityName + "created successfully");
}

@After(order=1, value = "@DATASTORE_SOURCE_ENTITY")
public static void deleteEntityInCloudDataStore() throws IOException, URISyntaxException {
DataStoreClient.deleteEntity(kindName);
BeforeActions.scenario.write("Kind name - " + kindName + "deleted successfully");
}

@Before(order = 2, value = "@DATASTORE_TARGET_ENTITY")
public static void setTempTargetKindName() {
targetKind = "cdf-target-test-"+ UUID.randomUUID().toString().substring(0, 8);
PluginPropertyUtils.addPluginProp("targetKind", targetKind);
BeforeActions.scenario.write("Target kind name - " + targetKind);
}

@After(order=1, value = "@DATASTORE_TARGET_ENTITY")
public static void deleteTargetEntityInCloudDataStore() throws IOException, URISyntaxException {
DataStoreClient.deleteEntity(targetKind);
BeforeActions.scenario.write("Target Kind name - " + targetKind + "deleted successfully");
}

@Before(order = 1, value = "@BQEXECUTE_SOURCE_TEST")
public static void createBQEcxecuteSourceBQTable() throws IOException, InterruptedException {
bqSourceTable = "E2E_SOURCE_" + UUID.randomUUID().toString().replaceAll("-", "_");
Expand Down
38 changes: 38 additions & 0 deletions src/e2e-test/java/io/cdap/plugin/datastore/runner/TestRunner.java
Original file line number Diff line number Diff line change
@@ -0,0 +1,38 @@
/*
* Copyright © 2021 Cask Data, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package io.cdap.plugin.datastore.runner;


import io.cucumber.junit.Cucumber;
import io.cucumber.junit.CucumberOptions;
import org.junit.runner.RunWith;

/**
* Test Runner to execute Datastore cases.
*/
@RunWith(Cucumber.class)
@CucumberOptions(
features = {"src/e2e-test/features"},
glue = {"io.cdap.plugin.datastore.stepsdesign", "io.cdap.plugin.gcs.stepsdesign",
"io.cdap.plugin.common.stepsdesign", "io.cdap.plugin.bigquery.stepsdesign", "stepsdesign"},
tags = {"@DataStore"},
monochrome = true,
plugin = {"pretty", "html:target/cucumber-html-report/datastore",
"json:target/cucumber-reports/cucumber-datastore.json",
"junit:target/cucumber-reports/cucumber-datastore.xml"}
)
public class TestRunner {
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,20 @@
package io.cdap.plugin.datastore.stepsdesign;

import io.cdap.e2e.utils.ElementHelper;
import io.cdap.e2e.utils.SeleniumHelper;
import io.cdap.plugin.utils.DataStoreClient;

public class DataStoreActions {
static {

SeleniumHelper.getPropertiesLocators(DataStoreLocators.class);
}
public static void enterKind(String kindName) {
ElementHelper.sendKeys(DataStoreLocators.kind, kindName);
}

public static void enterAncestor()
{
ElementHelper.sendKeys(DataStoreLocators.ancestor, DataStoreClient.getKeyLiteral());
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,15 @@
package io.cdap.plugin.datastore.stepsdesign;

import io.cdap.e2e.utils.SeleniumDriver;
import org.openqa.selenium.By;
import org.openqa.selenium.WebElement;
import org.openqa.selenium.support.FindBy;
import org.openqa.selenium.support.How;

public class DataStoreLocators {
@FindBy(how = How.XPATH, using = "//input[@data-testid='kind']")
public static WebElement kind;

@FindBy(how = How.XPATH, using = "//input[@data-testid='ancestor']")
public static WebElement ancestor;
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,18 @@
package io.cdap.plugin.datastore.stepsdesign;
import io.cdap.e2e.utils.ElementHelper;
import io.cdap.plugin.common.stepsdesign.TestSetupHooks;

import io.cucumber.java.en.Then;

public class MyStepdefs {
@Then("Enter kind for datastore plugin")
public void enterKindForDatastorePlugin() {
DataStoreActions.enterKind(TestSetupHooks.kindName);

}

@Then("Enter Ancestor for the datastore plugin")
public void enterAncestorForTheDatastorePlugin() {
DataStoreActions.enterAncestor();
}
}
84 changes: 84 additions & 0 deletions src/e2e-test/java/io/cdap/plugin/utils/DataStoreClient.java
Original file line number Diff line number Diff line change
@@ -0,0 +1,84 @@
package io.cdap.plugin.utils;

import com.google.cloud.datastore.Datastore;
import com.google.cloud.datastore.DatastoreOptions;
import com.google.cloud.datastore.Entity;
import com.google.cloud.datastore.FullEntity;
import com.google.cloud.datastore.IncompleteKey;
import com.google.cloud.datastore.Key;
import com.google.cloud.datastore.KeyFactory;
import com.google.cloud.datastore.Query;
import com.google.cloud.datastore.QueryResults;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import java.util.Date;

public class DataStoreClient {
private static final Logger logger = LoggerFactory.getLogger(PubSubClient.class);
static Datastore datastore = DatastoreOptions.getDefaultInstance().getService();
static Key key;

/**
* Creates a new entity of the specified kind in Google Cloud Datastore.
*
* @param kindName the kind name for the entity to be created
* @return the kind name of the created entity
*/
public static String createKind(String kindName) {
KeyFactory keyFactory = datastore.newKeyFactory().setKind(kindName);
// Create an incomplete key (it will be auto-generated when saved)
IncompleteKey incompleteKey = keyFactory.newKey();
// Build the entity
FullEntity<IncompleteKey> entity = Entity.newBuilder(incompleteKey)
.set("firstName", "Antonio")
.set("age", 23)
.set("done", true)
.set("hireDate", String.valueOf(new Date()))
// Add other properties as needed
.build();

// Save the entity
Entity savedEntity = datastore.put(entity);
key = savedEntity.getKey();
logger.info("Entity saved with key: " + key);

return kindName;
}

/**
* Deletes all entities of the specified kind from Google Cloud Datastore.
*
* @param kindName the kind name of the entities to be deleted
*/
public static void deleteEntity(String kindName) {
Query<Entity> query = Query.newEntityQueryBuilder()
.setKind(kindName)
.build();
// Execute the query
QueryResults<Entity> queryResults = datastore.run(query);
// Delete each entity
while (queryResults.hasNext()) {
Entity entity = queryResults.next();
Key entityKey = entity.getKey();
datastore.delete(entityKey);
logger.info("Entity deleted: " + entityKey);
}

logger.info("All entities of kind '" + kindName + "' deleted successfully.");
}

/**
* Returns the key-literal representation of the current entity key.
*
* @return the key-literal representation of the current entity key
*/
public static String getKeyLiteral()
{
String kind = key.getKind(); // Get the kind of the entity
long id = key.getId(); // Get the ID of the entity
String keyLiteral = String.format("Key(%s, %d)", kind, id);

return keyLiteral;
}
}
5 changes: 5 additions & 0 deletions src/e2e-test/resources/pluginParameters.properties
Original file line number Diff line number Diff line change
Expand Up @@ -333,3 +333,8 @@ bqExecuteCountDMLUpsertInsert=SELECT COUNT(*) FROM `PROJECT_NAME.DATASET.TABLENA
bqExecuteCountDMLUpsertUpdate=SELECT COUNT(*) FROM `PROJECT_NAME.DATASET.TABLENAME` WHERE Id=101 AND Value=5000 AND UID='UPDATED RECORD'
bqExecuteInsertFile=testdata/BQExecute/BQExecuteInsertFile
## BQEXECUTE-PLUGIN-PROPERTIES-END

## DATASTORE-PLUGIN-PROPERTIES-START
fieldName=firstName
filterOptions=[{"key":"done","value":"true"}]
## DATASTORE-PLUGIN-PROPERTIES-END

0 comments on commit 85efbc2

Please sign in to comment.