diff --git a/.github/workflows/e2e.yml b/.github/workflows/e2e.yml index e10d21f28f..1fc2003ce5 100644 --- a/.github/workflows/e2e.yml +++ b/.github/workflows/e2e.yml @@ -40,7 +40,7 @@ jobs: ) strategy: matrix: - tests: [bigquery, common, gcs, pubsub, spanner, gcscreate, gcsdelete, gcsmove, bigqueryexecute, gcscopy] + tests: [bigquery, common, gcs, pubsub, spanner, gcscreate, gcsdelete, gcsmove, bigqueryexecute, gcscopy, datastore] fail-fast: false steps: # Pinned 1.0.0 version diff --git a/src/e2e-test/features/datastore/runtime.feature b/src/e2e-test/features/datastore/runtime.feature new file mode 100644 index 0000000000..90a3cd2b1d --- /dev/null +++ b/src/e2e-test/features/datastore/runtime.feature @@ -0,0 +1,149 @@ +# Copyright © 2024 Cask Data, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may not +# use this file except in compliance with the License. You may obtain a copy of +# the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations under +# the License. + +@DataStore +Feature: DataStore - Verification of Datastore to Datastore Successful Data Transfer + + @DATASTORE_SOURCE_ENTITY @datastore_Required + Scenario: To verify data is getting transferred from Datastore to Datastore successfully using filter and custom index + Given Open Datafusion Project to configure pipeline + Then Select plugin: "Datastore" from the plugins list as: "Source" + And Navigate to the properties page of plugin: "Datastore" + Then Replace input plugin property: "project" with value: "projectId" + Then Enter input plugin property: "referenceName" with value: "ReferenceName" + Then Enter key value pairs for plugin property: "filters" with values from json: "filterOptions" + Then Enter kind for datastore plugin + Then Select dropdown plugin property: "keyType" with option value: "None" + Then Click on the Get Schema button + Then Validate "Datastore" plugin properties + Then Close the Plugin Properties page + And Select Sink plugin: "Datastore" from the plugins list + Then Connect plugins: "Datastore" and "Datastore2" to establish connection + Then Navigate to the properties page of plugin: "Datastore2" + Then Replace input plugin property: "project" with value: "projectId" + Then Enter input plugin property: "referenceName" with value: "refName" + Then Enter kind for datastore plugin + Then Select dropdown plugin property: "indexStrategy" with option value: "Custom" + Then Enter Value for plugin property table key : "indexedProperties" with values: "propertyName" + Then Validate "datastore2" plugin properties + Then Close the Plugin Properties page + Then Save and Deploy Pipeline + Then Run the Pipeline in Runtime + Then Wait till pipeline is in running state + Then Validate OUT record count is equal to IN record count + Then Open and capture logs + Then Verify the pipeline status is "Succeeded" + Then Close the pipeline logs + Then Validate The Data From Datastore To Datastore With Actual And Expected File for: "dsExpectedFile" + + @DATASTORE_SOURCE_ENTITY @datastore_Required + Scenario: To verify data is getting transferred from Datastore to Datastore using Urlsafekey + Given Open Datafusion Project to configure pipeline + Then Select plugin: "Datastore" from the plugins list as: "Source" + And Navigate to the properties page of plugin: "Datastore" + Then Replace input plugin property: "project" with value: "projectId" + Then Enter input plugin property: "referenceName" with value: "ReferenceName" + Then Enter key value pairs for plugin property: "filters" with values from json: "filterOptions" + Then Enter kind for datastore plugin + Then Select dropdown plugin property: "keyType" with option value: "URL-safe key" + Then Enter input plugin property: "keyAlias" with value: "fieldName" + Then Click on the Get Schema button + Then Validate "Datastore" plugin properties + Then Close the Plugin Properties page + And Select Sink plugin: "Datastore" from the plugins list + Then Connect plugins: "Datastore" and "Datastore2" to establish connection + Then Navigate to the properties page of plugin: "Datastore2" + Then Replace input plugin property: "project" with value: "projectId" + Then Enter input plugin property: "referenceName" with value: "refName" + Then Select dropdown plugin property: "keyType" with option value: "URL-safe key" + Then Enter input plugin property: "keyAlias" with value: "fieldName" + Then Enter kind for datastore plugin + Then Enter Ancestor for the datastore plugin + Then Validate "datastore2" plugin properties + Then Close the Plugin Properties page + Then Save and Deploy Pipeline + Then Run the Pipeline in Runtime + Then Wait till pipeline is in running state + Then Validate OUT record count is equal to IN record count + Then Open and capture logs + Then Verify the pipeline status is "Succeeded" + Then Close the pipeline logs + Then Validate The Data From Datastore To Datastore With Actual And Expected File for: "dsExpectedFile" + + @DATASTORE_SOURCE_ENTITY @datastore_Required + Scenario: To verify data is getting transferred from Datastore to Datastore using Ancestor and Key Literal + Given Open Datafusion Project to configure pipeline + Then Select plugin: "Datastore" from the plugins list as: "Source" + And Navigate to the properties page of plugin: "Datastore" + Then Replace input plugin property: "project" with value: "projectId" + Then Enter input plugin property: "referenceName" with value: "ReferenceName" + Then Enter kind for datastore plugin + Then Enter Ancestor for the datastore plugin + Then Select dropdown plugin property: "keyType" with option value: "Key literal" + Then Enter input plugin property: "keyAlias" with value: "fieldName" + Then Click on the Get Schema button + Then Validate "Datastore" plugin properties + Then Close the Plugin Properties page + And Select Sink plugin: "Datastore" from the plugins list + Then Connect plugins: "Datastore" and "Datastore2" to establish connection + Then Navigate to the properties page of plugin: "Datastore2" + Then Replace input plugin property: "project" with value: "projectId" + Then Enter input plugin property: "referenceName" with value: "refName" + Then Select dropdown plugin property: "keyType" with option value: "Key literal" + Then Enter input plugin property: "keyAlias" with value: "fieldName" + Then Enter kind for datastore plugin + Then Enter Ancestor for the datastore plugin + Then Validate "datastore2" plugin properties + Then Close the Plugin Properties page + Then Save and Deploy Pipeline + Then Run the Pipeline in Runtime + Then Wait till pipeline is in running state + Then Validate OUT record count is equal to IN record count + Then Open and capture logs + Then Verify the pipeline status is "Succeeded" + Then Close the pipeline logs + Then Validate The Data From Datastore To Datastore With Actual And Expected File for: "dsExpectedFile" + + @DATASTORE_SOURCE_ENTITY @datastore_Required + Scenario: To verify data is getting transferred from Datastore to Datastore using Ancestor and Custom Key + Given Open Datafusion Project to configure pipeline + Then Select plugin: "Datastore" from the plugins list as: "Source" + And Navigate to the properties page of plugin: "Datastore" + Then Replace input plugin property: "project" with value: "projectId" + Then Enter input plugin property: "referenceName" with value: "ReferenceName" + Then Enter kind for datastore plugin + Then Enter Ancestor for the datastore plugin + Then Select dropdown plugin property: "keyType" with option value: "Key literal" + Then Enter input plugin property: "keyAlias" with value: "fieldName" + Then Click on the Get Schema button + Then Validate "Datastore" plugin properties + Then Close the Plugin Properties page + And Select Sink plugin: "Datastore" from the plugins list + Then Connect plugins: "Datastore" and "Datastore2" to establish connection + Then Navigate to the properties page of plugin: "Datastore2" + Then Replace input plugin property: "project" with value: "projectId" + Then Enter input plugin property: "referenceName" with value: "refName" + Then Select dropdown plugin property: "keyType" with option value: "Custom name" + Then Enter input plugin property: "keyAlias" with value: "fieldName" + Then Enter kind for datastore plugin + Then Validate "datastore2" plugin properties + Then Close the Plugin Properties page + Then Save and Deploy Pipeline + Then Run the Pipeline in Runtime + Then Wait till pipeline is in running state + Then Validate OUT record count is equal to IN record count + Then Open and capture logs + Then Verify the pipeline status is "Succeeded" + Then Close the pipeline logs + Then Validate The Data From Datastore To Datastore With Actual And Expected File for: "dsExpectedFile" diff --git a/src/e2e-test/java/io/cdap/plugin/common/stepsdesign/TestSetupHooks.java b/src/e2e-test/java/io/cdap/plugin/common/stepsdesign/TestSetupHooks.java index fc92c2e6eb..52badd9a16 100644 --- a/src/e2e-test/java/io/cdap/plugin/common/stepsdesign/TestSetupHooks.java +++ b/src/e2e-test/java/io/cdap/plugin/common/stepsdesign/TestSetupHooks.java @@ -23,6 +23,7 @@ import io.cdap.e2e.utils.BigQueryClient; import io.cdap.e2e.utils.PluginPropertyUtils; import io.cdap.e2e.utils.StorageClient; +import io.cdap.plugin.utils.DataStoreClient; import io.cdap.plugin.utils.PubSubClient; import io.cdap.plugin.utils.SpannerClient; import io.cucumber.java.After; @@ -69,6 +70,8 @@ public class TestSetupHooks { public static String spannerTargetTable = StringUtils.EMPTY; public static boolean firstSpannerTestFlag = true; public static String datasetName = PluginPropertyUtils.pluginProp("dataset"); + public static String kindName = StringUtils.EMPTY; + public static String targetKind = StringUtils.EMPTY; public static String spannerExistingTargetTable = StringUtils.EMPTY; @Before(order = 1) @@ -1298,6 +1301,33 @@ public static void createBucketWithLifeCycle() throws IOException, URISyntaxExce gcsTargetBucketName = createGCSBucketLifeCycle(); BeforeActions.scenario.write("GCS target bucket name - " + gcsTargetBucketName); } + @Before(order = 1, value = "@DATASTORE_SOURCE_ENTITY") + public static void createEntityInCloudDataStore() throws IOException, URISyntaxException { + kindName = "cdf-test-" + UUID.randomUUID().toString().substring(0, 8); + String entityName = DataStoreClient.createKind(kindName); + PluginPropertyUtils.addPluginProp("kindName", entityName); + BeforeActions.scenario.write("Kind name - " + entityName + " created successfully"); + } + + @After(order = 1, value = "@DATASTORE_SOURCE_ENTITY") + public static void deleteEntityInCloudDataStore() throws IOException, URISyntaxException { + DataStoreClient.deleteEntity(kindName); + BeforeActions.scenario.write("Kind name - " + kindName + " deleted successfully"); +} + + @Before(order = 2, value = "@DATASTORE_TARGET_ENTITY") + public static void setTempTargetKindName() { + targetKind = "cdf-target-test-" + UUID.randomUUID().toString().substring(0, 8); + PluginPropertyUtils.addPluginProp("targetKind", targetKind); + BeforeActions.scenario.write("Target kind name - " + targetKind); + } + + @After(order = 1, value = "@DATASTORE_TARGET_ENTITY") + public static void deleteTargetEntityInCloudDataStore() throws IOException, URISyntaxException { + DataStoreClient.deleteEntity(targetKind); + BeforeActions.scenario.write("Target Kind name - " + targetKind + " deleted successfully"); + } + @Before(order = 1, value = "@BQEXECUTE_SOURCE_TEST") public static void createBQEcxecuteSourceBQTable() throws IOException, InterruptedException { bqSourceTable = "E2E_SOURCE_" + UUID.randomUUID().toString().replaceAll("-", "_"); @@ -1361,5 +1391,4 @@ public static void makeExistingTargetSpannerDBAndTableName() { e.printStackTrace(); } } - } diff --git a/src/e2e-test/java/io/cdap/plugin/datastore/actions/DataStoreActions.java b/src/e2e-test/java/io/cdap/plugin/datastore/actions/DataStoreActions.java new file mode 100644 index 0000000000..84c5fa4b0e --- /dev/null +++ b/src/e2e-test/java/io/cdap/plugin/datastore/actions/DataStoreActions.java @@ -0,0 +1,48 @@ +/* + * Copyright © 2024 Cask Data, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not + * use this file except in compliance with the License. You may obtain a copy of + * the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations under + * the License. + */ + +package io.cdap.plugin.datastore.actions; + +import io.cdap.e2e.utils.ElementHelper; +import io.cdap.e2e.utils.SeleniumHelper; +import io.cdap.plugin.datastore.locators.DataStoreLocators; +import io.cdap.plugin.utils.DataStoreClient; + +/** + * DataStore Plugin related actions. + */ +public class DataStoreActions { + static { + SeleniumHelper.getPropertiesLocators(DataStoreLocators.class); + } + + /** + * Enters the specified kind name into the appropriate field in the user interface. + * + * @param kindName the name of the kind to be entered + */ + public static void enterKind(String kindName) { + ElementHelper.sendKeys(DataStoreLocators.kind, kindName); + } + + /** + * Enters the key literal of the current entity into the appropriate field in the user interface + * as the ancestor. + */ + public static void enterAncestor() { + ElementHelper.sendKeys(DataStoreLocators.ancestor, DataStoreClient.getKeyLiteral()); + } +} diff --git a/src/e2e-test/java/io/cdap/plugin/datastore/actions/package-info.java b/src/e2e-test/java/io/cdap/plugin/datastore/actions/package-info.java new file mode 100644 index 0000000000..11414a5a3c --- /dev/null +++ b/src/e2e-test/java/io/cdap/plugin/datastore/actions/package-info.java @@ -0,0 +1,4 @@ +/** + * Package contains the actions for the DataStore features. + */ +package io.cdap.plugin.datastore.actions; diff --git a/src/e2e-test/java/io/cdap/plugin/datastore/locators/DataStoreLocators.java b/src/e2e-test/java/io/cdap/plugin/datastore/locators/DataStoreLocators.java new file mode 100644 index 0000000000..6e2cf575c1 --- /dev/null +++ b/src/e2e-test/java/io/cdap/plugin/datastore/locators/DataStoreLocators.java @@ -0,0 +1,32 @@ +/* + * Copyright © 2024 Cask Data, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not + * use this file except in compliance with the License. You may obtain a copy of + * the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations under + * the License. + */ + +package io.cdap.plugin.datastore.locators; + +import org.openqa.selenium.WebElement; +import org.openqa.selenium.support.FindBy; +import org.openqa.selenium.support.How; + +/** + * DataStore Plugin related step design. + */ +public class DataStoreLocators { + @FindBy(how = How.XPATH, using = "//input[@data-testid='kind']") + public static WebElement kind; + + @FindBy(how = How.XPATH, using = "//input[@data-testid='ancestor']") + public static WebElement ancestor; +} diff --git a/src/e2e-test/java/io/cdap/plugin/datastore/locators/package-info.java b/src/e2e-test/java/io/cdap/plugin/datastore/locators/package-info.java new file mode 100644 index 0000000000..491056b21c --- /dev/null +++ b/src/e2e-test/java/io/cdap/plugin/datastore/locators/package-info.java @@ -0,0 +1,4 @@ +/** + * Package contains the locators for the DataStore features. + */ +package io.cdap.plugin.datastore.locators; diff --git a/src/e2e-test/java/io/cdap/plugin/datastore/runner/TestRunner.java b/src/e2e-test/java/io/cdap/plugin/datastore/runner/TestRunner.java new file mode 100644 index 0000000000..1057d0c1d7 --- /dev/null +++ b/src/e2e-test/java/io/cdap/plugin/datastore/runner/TestRunner.java @@ -0,0 +1,37 @@ +/* + * Copyright © 2024 Cask Data, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not + * use this file except in compliance with the License. You may obtain a copy of + * the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations under + * the License. + */ +package io.cdap.plugin.datastore.runner; + +import io.cucumber.junit.Cucumber; +import io.cucumber.junit.CucumberOptions; +import org.junit.runner.RunWith; + +/** + * Test Runner to execute Datastore cases. + */ +@RunWith(Cucumber.class) +@CucumberOptions( + features = {"src/e2e-test/features"}, + glue = {"io.cdap.plugin.datastore.stepsdesign", "io.cdap.plugin.common.stepsdesign", + "stepsdesign"}, + tags = {"@DataStore"}, + monochrome = true, + plugin = {"pretty", "html:target/cucumber-html-report/datastore", + "json:target/cucumber-reports/cucumber-datastore.json", + "junit:target/cucumber-reports/cucumber-datastore.xml"} +) +public class TestRunner { +} diff --git a/src/e2e-test/java/io/cdap/plugin/datastore/runner/TestRunnerRequired.java b/src/e2e-test/java/io/cdap/plugin/datastore/runner/TestRunnerRequired.java new file mode 100644 index 0000000000..84ec0eb2c7 --- /dev/null +++ b/src/e2e-test/java/io/cdap/plugin/datastore/runner/TestRunnerRequired.java @@ -0,0 +1,38 @@ +/* + * Copyright © 2024 Cask Data, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not + * use this file except in compliance with the License. You may obtain a copy of + * the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations under + * the License. + */ +package io.cdap.plugin.datastore.runner; + +import io.cucumber.junit.Cucumber; +import io.cucumber.junit.CucumberOptions; +import org.junit.runner.RunWith; + +/** + * Test Runner to execute only required DataStore cases. + */ +@RunWith(Cucumber.class) +@CucumberOptions( + features = {"src/e2e-test/features"}, + glue = {"io.cdap.plugin.datastore.stepsdesign", "io.cdap.plugin.common.stepsdesign", + "stepsdesign"}, + tags = {"@datastore_Required"}, + monochrome = true, + plugin = {"pretty", "html:target/cucumber-html-report/datastore-required", + "json:target/cucumber-reports/cucumber-datastore-required.json", + "junit:target/cucumber-reports/cucumber-datastore-required.xml"} +) + +public class TestRunnerRequired { +} diff --git a/src/e2e-test/java/io/cdap/plugin/datastore/runner/package-info.java b/src/e2e-test/java/io/cdap/plugin/datastore/runner/package-info.java new file mode 100644 index 0000000000..5eb95c857f --- /dev/null +++ b/src/e2e-test/java/io/cdap/plugin/datastore/runner/package-info.java @@ -0,0 +1,4 @@ +/** + * Package contains the DataStore runners. + */ +package io.cdap.plugin.datastore.runner; diff --git a/src/e2e-test/java/io/cdap/plugin/datastore/stepsdesign/StepDesign.java b/src/e2e-test/java/io/cdap/plugin/datastore/stepsdesign/StepDesign.java new file mode 100644 index 0000000000..abca3e77a1 --- /dev/null +++ b/src/e2e-test/java/io/cdap/plugin/datastore/stepsdesign/StepDesign.java @@ -0,0 +1,53 @@ +/* + * Copyright © 2024 Cask Data, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not + * use this file except in compliance with the License. You may obtain a copy of + * the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations under + * the License. + */ + +package io.cdap.plugin.datastore.stepsdesign; +import io.cdap.e2e.utils.PluginPropertyUtils; +import io.cdap.plugin.common.stepsdesign.TestSetupHooks; + +import io.cdap.plugin.datastore.actions.DataStoreActions; +import io.cdap.plugin.utils.DataStoreClient; +import io.cucumber.java.en.Then; +import org.junit.Assert; + +import java.io.IOException; +import java.net.URISyntaxException; + +/** + * DataStore Plugin related step design. + */ +public class StepDesign { + @Then("Enter kind for datastore plugin") + public void enterKindForDatastorePlugin() { + DataStoreActions.enterKind(TestSetupHooks.kindName); + + } + + @Then("Enter Ancestor for the datastore plugin") + public void enterAncestorForTheDatastorePlugin() { + DataStoreActions.enterAncestor(); + } + + @Then("Validate The Data From Datastore To Datastore With Actual And Expected File for: {string}") + public void validateTheDataFromDatastoreToDatastoreWithActualAndExpectedFileFor(String expectedFile) + throws URISyntaxException { + boolean recordsMatched = DataStoreClient.validateActualDataToExpectedData( + PluginPropertyUtils.pluginProp("kindName"), + PluginPropertyUtils.pluginProp(expectedFile)); + Assert.assertTrue("Value of records in actual and expected file is equal", recordsMatched); + } +} + diff --git a/src/e2e-test/java/io/cdap/plugin/datastore/stepsdesign/package-info.java b/src/e2e-test/java/io/cdap/plugin/datastore/stepsdesign/package-info.java new file mode 100644 index 0000000000..d675a3942b --- /dev/null +++ b/src/e2e-test/java/io/cdap/plugin/datastore/stepsdesign/package-info.java @@ -0,0 +1,4 @@ +/** + * Package contains the stepDesign for the DataStore features. + */ +package io.cdap.plugin.datastore.stepsdesign; diff --git a/src/e2e-test/java/io/cdap/plugin/utils/DataStoreClient.java b/src/e2e-test/java/io/cdap/plugin/utils/DataStoreClient.java new file mode 100644 index 0000000000..927f88ec2b --- /dev/null +++ b/src/e2e-test/java/io/cdap/plugin/utils/DataStoreClient.java @@ -0,0 +1,154 @@ +package io.cdap.plugin.utils; + +import com.esotericsoftware.minlog.Log; +import com.google.cloud.datastore.Datastore; +import com.google.cloud.datastore.DatastoreOptions; +import com.google.cloud.datastore.Entity; +import com.google.cloud.datastore.FullEntity; +import com.google.cloud.datastore.IncompleteKey; +import com.google.cloud.datastore.Key; +import com.google.cloud.datastore.KeyFactory; +import com.google.cloud.datastore.Query; +import com.google.cloud.datastore.QueryResults; +import com.google.gson.Gson; +import com.google.gson.JsonElement; +import com.google.gson.JsonObject; +import io.cdap.e2e.utils.PluginPropertyUtils; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.BufferedReader; +import java.io.FileReader; +import java.io.IOException; +import java.net.URISyntaxException; +import java.nio.file.Path; +import java.nio.file.Paths; +import java.util.HashMap; +import java.util.Map; + +/** + * Represents DataStore client. + */ +public class DataStoreClient { + static Gson gson = new Gson(); + private static final Logger logger = LoggerFactory.getLogger(PubSubClient.class); + static Datastore datastore = DatastoreOptions.newBuilder(). + setProjectId(PluginPropertyUtils.pluginProp("projectId")).build().getService(); + static Key key; + + /** + * Creates a new entity of the specified kind in Google Cloud Datastore. + * + * @param kindName the kind name for the entity to be created + * @return the kind name of the created entity + */ + public static String createKind(String kindName) { + KeyFactory keyFactory = datastore.newKeyFactory().setKind(kindName); + // Create an incomplete key (it will be auto-generated when saved) + IncompleteKey incompleteKey = keyFactory.newKey(); + // Build the entity + FullEntity entity = Entity.newBuilder(incompleteKey) + .set("firstName", PluginPropertyUtils.pluginProp("name")) + .set("age", Integer.parseInt(PluginPropertyUtils.pluginProp("age"))) + .set("isValid", PluginPropertyUtils.pluginProp("result").isEmpty()) + .set("postalAdd", Float.parseFloat(PluginPropertyUtils.pluginProp("address"))) + // Add other properties as needed + .build(); + + // Save the entity + Entity savedEntity = datastore.put(entity); + key = savedEntity.getKey(); + logger.info("Entity saved with key: " + key); + + return kindName; + } + + /** + * Deletes all entities of the specified kind from Google Cloud Datastore. + * + * @param kindName the kind name of the entities to be deleted + */ + public static void deleteEntity(String kindName) { + Query query = Query.newEntityQueryBuilder() + .setKind(kindName) + .build(); + // Execute the query + QueryResults queryResults = datastore.run(query); + // Delete each entity + while (queryResults.hasNext()) { + Entity entity = queryResults.next(); + Key entityKey = entity.getKey(); + datastore.delete(entityKey); + logger.info("Entity deleted: " + entityKey); + } + + logger.info("All entities of kind '" + kindName + "' deleted successfully."); + } + + /** + * Returns the key-literal representation of the current entity key. + * + * @return the key-literal representation of the current entity key + */ + public static String getKeyLiteral() { + String kind = key.getKind(); // Get the kind of the entity + long id = key.getId(); // Get the ID of the entity + String keyLiteral = String.format("Key(%s, %d)", kind, id); + + return keyLiteral; + } + +public static boolean validateActualDataToExpectedData(String kindName, String fileName) throws URISyntaxException { + Map datastoreMap; + Map fileMap = new HashMap<>(); + Path importExpectedFile = Paths.get(DataStoreClient.class.getResource("/" + fileName).toURI()); + datastoreMap = fetchEntities(kindName); + getFileData(importExpectedFile.toString(), fileMap); + + boolean isMatched = datastoreMap.equals(fileMap); + + return isMatched; +} + + public static Map fetchEntities(String kindName) { + Map entityMap = new HashMap<>(); + + Query query = Query.newEntityQueryBuilder().setKind(kindName).build(); + QueryResults results = datastore.run(query); + + while (results.hasNext()) { + Entity entity = results.next(); + JsonObject jsonObject = new JsonObject(); + jsonObject.addProperty("firstName", entity.getString("firstName")); + jsonObject.addProperty("age", entity.getLong("age")); + jsonObject.addProperty("isValid", entity.getBoolean("isValid")); + jsonObject.addProperty("postalAdd", entity.getDouble("postalAdd")); + + // Convert JsonObject to String and store it in the map based on age + String ageKey = String.valueOf(entity.getLong("age")); + entityMap.put(ageKey, jsonObject); + } + + return entityMap; + } + + public static void getFileData(String fileName, Map fileMap) { + try (BufferedReader br = new BufferedReader(new FileReader(fileName))) { + String line; + while ((line = br.readLine()) != null) { + JsonObject json = gson.fromJson(line, JsonObject.class); + if (json.has("age")) { // Check if the JSON object has the "id" key + JsonElement idElement = json.get("age"); + if (idElement.isJsonPrimitive()) { + String idKey = idElement.getAsString(); + fileMap.put(idKey, json); + } else { + Log.error("age key not found"); + } + } + } + } catch (IOException e) { + System.err.println("Error reading the file: " + e.getMessage()); + } + } +} diff --git a/src/e2e-test/resources/pluginParameters.properties b/src/e2e-test/resources/pluginParameters.properties index 5f37646dc0..ffa99ee9a0 100644 --- a/src/e2e-test/resources/pluginParameters.properties +++ b/src/e2e-test/resources/pluginParameters.properties @@ -335,3 +335,14 @@ bqExecuteCountDMLUpsertInsert=SELECT COUNT(*) FROM `PROJECT_NAME.DATASET.TABLENA bqExecuteCountDMLUpsertUpdate=SELECT COUNT(*) FROM `PROJECT_NAME.DATASET.TABLENAME` WHERE Id=101 AND Value=5000 AND UID='UPDATED RECORD' bqExecuteInsertFile=testdata/BQExecute/BQExecuteInsertFile ## BQEXECUTE-PLUGIN-PROPERTIES-END + +## DATASTORE-PLUGIN-PROPERTIES-START +fieldName=_key_ +name=Antonio +age=23 +result= +address=123.2 +filterOptions=[{"key":"isValid","value":"true"}] +propertyName=firstName +dsExpectedFile=testdata/BQValidationExpectedFiles/datastoreexpectedfile +## DATASTORE-PLUGIN-PROPERTIES-END diff --git a/src/e2e-test/resources/testdata/BQValidationExpectedFiles/datastoreexpectedfile b/src/e2e-test/resources/testdata/BQValidationExpectedFiles/datastoreexpectedfile new file mode 100644 index 0000000000..0d718a7977 --- /dev/null +++ b/src/e2e-test/resources/testdata/BQValidationExpectedFiles/datastoreexpectedfile @@ -0,0 +1 @@ +{"firstName":"Antonio","age":23,"isValid":true,"postalAdd":123.19999694824219} \ No newline at end of file