diff --git a/sdk/cosmos/azure-cosmos-spark_3-1_2-12/CHANGELOG.md b/sdk/cosmos/azure-cosmos-spark_3-1_2-12/CHANGELOG.md index 876d3ec19c07b..d5fbcbd2e9fd8 100644 --- a/sdk/cosmos/azure-cosmos-spark_3-1_2-12/CHANGELOG.md +++ b/sdk/cosmos/azure-cosmos-spark_3-1_2-12/CHANGELOG.md @@ -8,6 +8,7 @@ #### Breaking Changes #### Bugs Fixed +* Added null checking for previous images for deletes in full fidelity change feed. - See [PR 43483](https://github.com/Azure/azure-sdk-for-java/pull/43483) #### Other Changes * Added options to fine-tune settings for bulk operations. - [PR 43509](https://github.com/Azure/azure-sdk-for-java/pull/43509) diff --git a/sdk/cosmos/azure-cosmos-spark_3-2_2-12/CHANGELOG.md b/sdk/cosmos/azure-cosmos-spark_3-2_2-12/CHANGELOG.md index afd38f10d2d08..0646078bc2258 100644 --- a/sdk/cosmos/azure-cosmos-spark_3-2_2-12/CHANGELOG.md +++ b/sdk/cosmos/azure-cosmos-spark_3-2_2-12/CHANGELOG.md @@ -8,6 +8,7 @@ #### Breaking Changes #### Bugs Fixed +* Added null checking for previous images for deletes in full fidelity change feed. - See [PR 43483](https://github.com/Azure/azure-sdk-for-java/pull/43483) #### Other Changes * Added options to fine-tune settings for bulk operations. - [PR 43509](https://github.com/Azure/azure-sdk-for-java/pull/43509) diff --git a/sdk/cosmos/azure-cosmos-spark_3-3_2-12/CHANGELOG.md b/sdk/cosmos/azure-cosmos-spark_3-3_2-12/CHANGELOG.md index c683bcc589362..d7bb958624c42 100644 --- a/sdk/cosmos/azure-cosmos-spark_3-3_2-12/CHANGELOG.md +++ b/sdk/cosmos/azure-cosmos-spark_3-3_2-12/CHANGELOG.md @@ -8,6 +8,7 @@ #### Breaking Changes #### Bugs Fixed +* Added null checking for previous images for deletes in full fidelity change feed. - See [PR 43483](https://github.com/Azure/azure-sdk-for-java/pull/43483) #### Other Changes * Added options to fine-tune settings for bulk operations. - [PR 43509](https://github.com/Azure/azure-sdk-for-java/pull/43509) diff --git a/sdk/cosmos/azure-cosmos-spark_3-4_2-12/CHANGELOG.md b/sdk/cosmos/azure-cosmos-spark_3-4_2-12/CHANGELOG.md index 608d3e2497a36..040ab80a908f9 100644 --- a/sdk/cosmos/azure-cosmos-spark_3-4_2-12/CHANGELOG.md +++ b/sdk/cosmos/azure-cosmos-spark_3-4_2-12/CHANGELOG.md @@ -8,6 +8,7 @@ #### Breaking Changes #### Bugs Fixed +* Added null checking for previous images for deletes in full fidelity change feed. - See [PR 43483](https://github.com/Azure/azure-sdk-for-java/pull/43483) #### Other Changes * Added options to fine-tune settings for bulk operations. - [PR 43509](https://github.com/Azure/azure-sdk-for-java/pull/43509) diff --git a/sdk/cosmos/azure-cosmos-spark_3-5_2-12/CHANGELOG.md b/sdk/cosmos/azure-cosmos-spark_3-5_2-12/CHANGELOG.md index 1baaf8697927f..232eafd885a31 100644 --- a/sdk/cosmos/azure-cosmos-spark_3-5_2-12/CHANGELOG.md +++ b/sdk/cosmos/azure-cosmos-spark_3-5_2-12/CHANGELOG.md @@ -8,6 +8,7 @@ #### Breaking Changes #### Bugs Fixed +* Added null checking for previous images for deletes in full fidelity change feed. - See [PR 43483](https://github.com/Azure/azure-sdk-for-java/pull/43483) #### Other Changes * Added options to fine-tune settings for bulk operations. - [PR 43509](https://github.com/Azure/azure-sdk-for-java/pull/43509) diff --git a/sdk/cosmos/azure-cosmos-spark_3_2-12/src/main/scala/com/azure/cosmos/spark/CosmosRowConverterBase.scala b/sdk/cosmos/azure-cosmos-spark_3_2-12/src/main/scala/com/azure/cosmos/spark/CosmosRowConverterBase.scala index cd49d40fe6483..3bf04f0a51fb4 100644 --- a/sdk/cosmos/azure-cosmos-spark_3_2-12/src/main/scala/com/azure/cosmos/spark/CosmosRowConverterBase.scala +++ b/sdk/cosmos/azure-cosmos-spark_3_2-12/src/main/scala/com/azure/cosmos/spark/CosmosRowConverterBase.scala @@ -705,28 +705,40 @@ private[cosmos] class CosmosRowConverterBase( private def parseId(objectNode: ObjectNode): String = { val currentNode = getCurrentOrPreviousNode(objectNode) - currentNode.get(IdAttributeName) match { - case valueNode: JsonNode => - Option(valueNode).fold(null: String)(v => v.asText(null)) - case _ => null + if (currentNode != null) { + currentNode.get(IdAttributeName) match { + case valueNode: JsonNode => + Option(valueNode).fold(null: String)(v => v.asText(null)) + case _ => null + } + } else { + null } } private def parseTimestamp(objectNode: ObjectNode): Long = { val currentNode = getCurrentOrPreviousNode(objectNode) - currentNode.get(TimestampAttributeName) match { - case valueNode: JsonNode => - Option(valueNode).fold(-1L)(v => v.asLong(-1)) - case _ => -1L + if (currentNode != null) { + currentNode.get(TimestampAttributeName) match { + case valueNode: JsonNode => + Option(valueNode).fold(-1L)(v => v.asLong(-1)) + case _ => -1L + } + } else { + -1L } } private def parseETag(objectNode: ObjectNode): String = { val currentNode = getCurrentOrPreviousNode(objectNode) - currentNode.get(ETagAttributeName) match { - case valueNode: JsonNode => - Option(valueNode).fold(null: String)(v => v.asText(null)) - case _ => null + if (currentNode != null) { + currentNode.get(ETagAttributeName) match { + case valueNode: JsonNode => + Option(valueNode).fold(null: String)(v => v.asText(null)) + case _ => null + } + } else { + null } } diff --git a/sdk/cosmos/azure-cosmos-spark_3_2-12/src/test/scala/com/azure/cosmos/spark/SparkE2EChangeFeedITest.scala b/sdk/cosmos/azure-cosmos-spark_3_2-12/src/test/scala/com/azure/cosmos/spark/SparkE2EChangeFeedITest.scala index 90cdd508bcce3..f6144f78f0aaf 100644 --- a/sdk/cosmos/azure-cosmos-spark_3_2-12/src/test/scala/com/azure/cosmos/spark/SparkE2EChangeFeedITest.scala +++ b/sdk/cosmos/azure-cosmos-spark_3_2-12/src/test/scala/com/azure/cosmos/spark/SparkE2EChangeFeedITest.scala @@ -5,9 +5,11 @@ package com.azure.cosmos.spark import com.azure.cosmos.SparkBridgeInternal import com.azure.cosmos.implementation.changefeed.common.ChangeFeedState import com.azure.cosmos.implementation.{TestConfigurations, Utils} +import com.azure.cosmos.models.PartitionKey import com.azure.cosmos.spark.diagnostics.BasicLoggingTrait import com.azure.cosmos.spark.udf.{CreateChangeFeedOffsetFromSpark2, CreateSpark2ContinuationsFromChangeFeedOffset, GetFeedRangeForPartitionKeyValue} import org.apache.hadoop.fs.{FileSystem, Path} +import org.apache.spark.sql.functions import org.apache.spark.sql.types._ import java.io.{BufferedReader, InputStreamReader} @@ -442,104 +444,104 @@ class SparkE2EChangeFeedITest rowsArray2 should have size 50 - initialCount } -// "spark change feed query (full fidelity)" should "honor checkpoint location" in { -// val cosmosEndpoint = TestConfigurations.HOST -// val cosmosMasterKey = TestConfigurations.MASTER_KEY -// -// val checkpointLocation = s"/tmp/checkpoints/${UUID.randomUUID().toString}" -// val cfg = Map( -// "spark.cosmos.accountEndpoint" -> cosmosEndpoint, -// "spark.cosmos.accountKey" -> cosmosMasterKey, -// "spark.cosmos.database" -> cosmosDatabase, -// "spark.cosmos.container" -> cosmosContainer, -// "spark.cosmos.read.inferSchema.enabled" -> "false", -// "spark.cosmos.changeFeed.mode" -> "FullFidelity", -// "spark.cosmos.changeFeed.startFrom" -> "NOW", -// "spark.cosmos.read.partitioning.strategy" -> "Restrictive", -// "spark.cosmos.changeFeed.batchCheckpointLocation" -> checkpointLocation -// ) -// -// val df1 = spark.read.format("cosmos.oltp.changeFeed").options(cfg).load() -// val rowsArray1 = df1.collect() -// rowsArray1.length == 0 shouldEqual true -// -// df1.schema.equals( -// ChangeFeedTable.defaultFullFidelityChangeFeedSchemaForInferenceDisabled) shouldEqual true -// -// val hdfs = org.apache.hadoop.fs.FileSystem.get(spark.sparkContext.hadoopConfiguration) -// -// val startOffsetFolderLocation = Paths.get(checkpointLocation, "startOffset").toString -// val startOffsetFileLocation = Paths.get(startOffsetFolderLocation, "0").toString -// hdfs.exists(new Path(startOffsetFolderLocation)) shouldEqual true -// hdfs.exists(new Path(startOffsetFileLocation)) shouldEqual false -// -// val latestOffsetFolderLocation = Paths.get(checkpointLocation, "latestOffset").toString -// val latestOffsetFileLocation = Paths.get(latestOffsetFolderLocation, "0").toString -// hdfs.exists(new Path(latestOffsetFolderLocation)) shouldEqual true -// hdfs.exists(new Path(latestOffsetFileLocation)) shouldEqual true -// -// // TODO - check for the offset structure to make sure it looks like the new lease format. -// -// hdfs.copyToLocalFile(true, new Path(latestOffsetFileLocation), new Path(startOffsetFileLocation)) -// assert(!hdfs.exists(new Path(latestOffsetFileLocation))) -// -// val container = cosmosClient.getDatabase(cosmosDatabase).getContainer(cosmosContainer) -// -// val createdObjectIds = new ArrayBuffer[String]() -// val replacedObjectIds = new ArrayBuffer[String]() -// val deletedObjectIds = new ArrayBuffer[String]() -// for (sequenceNumber <- 1 to 5) { -// val objectNode = Utils.getSimpleObjectMapper.createObjectNode() -// objectNode.put("name", "Shrodigner's cat") -// objectNode.put("type", "cat") -// objectNode.put("age", 20) -// objectNode.put("sequenceNumber", sequenceNumber) -// val id = UUID.randomUUID().toString -// objectNode.put("id", id) -// createdObjectIds += id -// if (sequenceNumber % 2 == 0) { -// replacedObjectIds += id -// } -// if (sequenceNumber % 3 == 0) { -// deletedObjectIds += id -// } -// container.createItem(objectNode).block() -// } -// -// for (id <- replacedObjectIds) { -// val objectNode = Utils.getSimpleObjectMapper.createObjectNode() -// objectNode.put("name", "Shrodigner's cat") -// objectNode.put("type", "dog") -// objectNode.put("age", 25) -// objectNode.put("id", id) -// container.replaceItem(objectNode, id, new PartitionKey(id)).block() -// } -// -// for (id <- deletedObjectIds) { -// container.deleteItem(id, new PartitionKey(id)).block() -// } -// -// // wait for the log store to get these changes -// Thread.sleep(2000) -// -// val df2 = spark.read.format("cosmos.oltp.changeFeed").options(cfg).load() -// val groupedFrame = df2.groupBy(CosmosTableSchemaInferrer.OperationTypeAttributeName) -// .agg(functions.collect_list("id").as("ids")) -// -// val collectedFrame = groupedFrame.collect() -// collectedFrame.foreach(row => { -// val wrappedArray = row.get(1).asInstanceOf[mutable.WrappedArray[String]] -// val array = wrappedArray.array -// row.get(0) match { -// case "create" => -// validateArraysUnordered(createdObjectIds, array) -// case "replace" => -// validateArraysUnordered(replacedObjectIds, array) -// case "delete" => -// validateArraysUnordered(deletedObjectIds, array) -// } -// }) -// } + "spark change feed query (full fidelity)" should "honor checkpoint location" in { + val cosmosEndpoint = TestConfigurations.HOST + val cosmosMasterKey = TestConfigurations.MASTER_KEY + + val checkpointLocation = s"/tmp/checkpoints/${UUID.randomUUID().toString}" + val cfg = Map( + "spark.cosmos.accountEndpoint" -> cosmosEndpoint, + "spark.cosmos.accountKey" -> cosmosMasterKey, + "spark.cosmos.database" -> cosmosDatabase, + "spark.cosmos.container" -> cosmosContainer, + "spark.cosmos.read.inferSchema.enabled" -> "false", + "spark.cosmos.changeFeed.mode" -> "FullFidelity", + "spark.cosmos.changeFeed.startFrom" -> "NOW", + "spark.cosmos.read.partitioning.strategy" -> "Restrictive", + "spark.cosmos.changeFeed.batchCheckpointLocation" -> checkpointLocation + ) + + val df1 = spark.read.format("cosmos.oltp.changeFeed").options(cfg).load() + val rowsArray1 = df1.collect() + rowsArray1.length == 0 shouldEqual true + + df1.schema.equals( + ChangeFeedTable.defaultFullFidelityChangeFeedSchemaForInferenceDisabled) shouldEqual true + + val hdfs = org.apache.hadoop.fs.FileSystem.get(spark.sparkContext.hadoopConfiguration) + + val startOffsetFolderLocation = Paths.get(checkpointLocation, "startOffset").toString + val startOffsetFileLocation = Paths.get(startOffsetFolderLocation, "0").toString + hdfs.exists(new Path(startOffsetFolderLocation)) shouldEqual true + hdfs.exists(new Path(startOffsetFileLocation)) shouldEqual false + + val latestOffsetFolderLocation = Paths.get(checkpointLocation, "latestOffset").toString + val latestOffsetFileLocation = Paths.get(latestOffsetFolderLocation, "0").toString + hdfs.exists(new Path(latestOffsetFolderLocation)) shouldEqual true + hdfs.exists(new Path(latestOffsetFileLocation)) shouldEqual true + + // TODO - check for the offset structure to make sure it looks like the new lease format. + + hdfs.copyToLocalFile(true, new Path(latestOffsetFileLocation), new Path(startOffsetFileLocation)) + assert(!hdfs.exists(new Path(latestOffsetFileLocation))) + + val container = cosmosClient.getDatabase(cosmosDatabase).getContainer(cosmosContainer) + + val createdObjectIds = new ArrayBuffer[String]() + val replacedObjectIds = new ArrayBuffer[String]() + val deletedObjectIds = new ArrayBuffer[String]() + for (sequenceNumber <- 1 to 5) { + val objectNode = Utils.getSimpleObjectMapper.createObjectNode() + objectNode.put("name", "Shrodigner's cat") + objectNode.put("type", "cat") + objectNode.put("age", 20) + objectNode.put("sequenceNumber", sequenceNumber) + val id = UUID.randomUUID().toString + objectNode.put("id", id) + createdObjectIds += id + if (sequenceNumber % 2 == 0) { + replacedObjectIds += id + } + if (sequenceNumber % 3 == 0) { + deletedObjectIds += id + } + container.createItem(objectNode).block() + } + + for (id <- replacedObjectIds) { + val objectNode = Utils.getSimpleObjectMapper.createObjectNode() + objectNode.put("name", "Shrodigner's cat") + objectNode.put("type", "dog") + objectNode.put("age", 25) + objectNode.put("id", id) + container.replaceItem(objectNode, id, new PartitionKey(id)).block() + } + + for (id <- deletedObjectIds) { + container.deleteItem(id, new PartitionKey(id)).block() + } + + // wait for the log store to get these changes + Thread.sleep(2000) + + val df2 = spark.read.format("cosmos.oltp.changeFeed").options(cfg).load() + val groupedFrame = df2.groupBy(CosmosTableSchemaInferrer.OperationTypeAttributeName) + .agg(functions.collect_list("id").as("ids")) + + val collectedFrame = groupedFrame.collect() + collectedFrame.foreach(row => { + val wrappedArray = row.get(1).asInstanceOf[mutable.WrappedArray[String]] + val array = wrappedArray.array + row.get(0) match { + case "create" => + validateArraysUnordered(createdObjectIds, array) + case "replace" => + validateArraysUnordered(replacedObjectIds, array) + case "delete" => + validateArraysUnordered(deletedObjectIds, array) + } + }) + } "spark change feed query (incremental)" can "proceed with simulated Spark2 Checkpoint" in { val cosmosEndpoint = TestConfigurations.HOST diff --git a/sdk/cosmos/azure-cosmos-tests/src/test/java/com/azure/cosmos/CosmosItemIdEncodingTest.java b/sdk/cosmos/azure-cosmos-tests/src/test/java/com/azure/cosmos/CosmosItemIdEncodingTest.java index fd80a05c4a9f9..3411ec85fe182 100644 --- a/sdk/cosmos/azure-cosmos-tests/src/test/java/com/azure/cosmos/CosmosItemIdEncodingTest.java +++ b/sdk/cosmos/azure-cosmos-tests/src/test/java/com/azure/cosmos/CosmosItemIdEncodingTest.java @@ -45,7 +45,7 @@ public CosmosItemIdEncodingTest(CosmosClientBuilder clientBuilder) { super(clientBuilder.contentResponseOnWriteEnabled(true)); } - @BeforeClass(groups = {"fast", "emulator"}, timeOut = SETUP_TIMEOUT) + @BeforeClass(groups = { "emulator" }, timeOut = SETUP_TIMEOUT) public void before_CosmosItemTest() { assertThat(this.client).isNull(); CosmosContainerProperties containerProperties = getCollectionDefinitionWithRangeRangeIndex(); @@ -74,7 +74,7 @@ public void before_CosmosItemTest() { "any left-overs with weird encoded ids in the shared container.", containerProperties.getId()); } - @AfterClass(groups = {"fast", "emulator"}, timeOut = SHUTDOWN_TIMEOUT, alwaysRun = true) + @AfterClass(groups = { "emulator" }, timeOut = SHUTDOWN_TIMEOUT, alwaysRun = true) public void afterClass() { assertThat(this.client).isNotNull(); if (this.container != null) { @@ -83,7 +83,7 @@ public void afterClass() { this.client.close(); } - @Test(groups = { "fast", "emulator" }, timeOut = TIMEOUT) + @Test(groups = { "emulator" }, timeOut = TIMEOUT) public void plainVanillaId() { TestScenario scenario = new TestScenario( "PlainVanillaId", @@ -110,7 +110,7 @@ public void plainVanillaId() { this.executeTestCase(scenario); } - @Test(groups = { "fast", "emulator" }, timeOut = TIMEOUT) + @Test(groups = { "emulator" }, timeOut = TIMEOUT) public void containerIdWithUnicodeCharacter() { TestScenario scenario = new TestScenario( "ContainerIdWithUnicodeé±€", @@ -137,7 +137,7 @@ public void containerIdWithUnicodeCharacter() { this.executeTestCase(scenario); } - @Test(groups = { "fast", "emulator" }, timeOut = TIMEOUT) + @Test(groups = { "emulator" }, timeOut = TIMEOUT) public void idWithWhitespaces() { TestScenario scenario = new TestScenario( "IdWithWhitespaces", @@ -164,7 +164,7 @@ public void idWithWhitespaces() { this.executeTestCase(scenario); } - @Test(groups = { "fast", "emulator" }, timeOut = TIMEOUT) + @Test(groups = { "emulator" }, timeOut = TIMEOUT) public void idStartingWithWhitespace() { TestScenario scenario = new TestScenario( "IdStartingWithWhitespace", @@ -191,7 +191,7 @@ public void idStartingWithWhitespace() { this.executeTestCase(scenario); } - @Test(groups = { "fast", "emulator" }, timeOut = TIMEOUT) + @Test(groups = { "emulator" }, timeOut = TIMEOUT) public void idStartingWithWhitespaces() { TestScenario scenario = new TestScenario( "IdStartingWithWhitespaces", @@ -218,7 +218,7 @@ public void idStartingWithWhitespaces() { this.executeTestCase(scenario); } - @Test(groups = { "fast", "emulator" }, timeOut = TIMEOUT) + @Test(groups = { "emulator" }, timeOut = TIMEOUT) public void idEndingWithWhitespace() { TestScenario scenario = new TestScenario( "IdEndingWithWhitespace", @@ -245,7 +245,7 @@ public void idEndingWithWhitespace() { this.executeTestCase(scenario); } - @Test(groups = { "fast", "emulator" }, timeOut = TIMEOUT) + @Test(groups = { "emulator" }, timeOut = TIMEOUT) public void idEndingWithWhitespaces() { TestScenario scenario = new TestScenario( "IdEndingWithWhitespaces", @@ -272,7 +272,7 @@ public void idEndingWithWhitespaces() { this.executeTestCase(scenario); } - @Test(groups = { "fast", "emulator" }, timeOut = TIMEOUT) + @Test(groups = { "emulator" }, timeOut = TIMEOUT) public void idWithUnicodeCharacters() { TestScenario scenario = new TestScenario( "IdWithUnicodeCharacters", @@ -299,7 +299,7 @@ public void idWithUnicodeCharacters() { this.executeTestCase(scenario); } - @Test(groups = { "fast", "emulator" }, timeOut = TIMEOUT) + @Test(groups = { "emulator" }, timeOut = TIMEOUT) public void idWithAllowedSpecialCharacters() { TestScenario scenario = new TestScenario( "IdWithAllowedSpecialCharacters", @@ -326,7 +326,7 @@ public void idWithAllowedSpecialCharacters() { this.executeTestCase(scenario); } - @Test(groups = { "fast", "emulator" }, timeOut = TIMEOUT) + @Test(groups = { "emulator" }, timeOut = TIMEOUT) public void idWithBase64EncodedIdCharacters() { String base64EncodedId = "BQE1D3PdG4N4bzU9TKaCIM3qc0TVcZ2/Y3jnsRfwdHC1ombkX3F1dot/SG0/UTq9AbgdX3kOWoP6qL6lJqWeKgV3zwWWPZO/t5X0ehJzv9LGkWld07LID2rhWhGT6huBM6Q="; String safeBase64EncodedId = base64EncodedId.replace("/", "-"); @@ -356,7 +356,7 @@ public void idWithBase64EncodedIdCharacters() { this.executeTestCase(scenario); } - @Test(groups = { "fast", "emulator" }, timeOut = TIMEOUT) + @Test(groups = { "emulator" }, timeOut = TIMEOUT) public void idEndingWithPercentEncodedWhitespace() { TestScenario scenario = new TestScenario( "IdEndingWithPercentEncodedWhitespace", @@ -383,7 +383,7 @@ public void idEndingWithPercentEncodedWhitespace() { this.executeTestCase(scenario); } - @Test(groups = { "fast", "emulator" }, timeOut = TIMEOUT) + @Test(groups = { "emulator" }, timeOut = TIMEOUT) public void idWithPercentEncodedSpecialChar() { TestScenario scenario = new TestScenario( "IdWithPercentEncodedSpecialChar", @@ -410,7 +410,7 @@ public void idWithPercentEncodedSpecialChar() { this.executeTestCase(scenario); } - @Test(groups = { "fast", "emulator" }, timeOut = TIMEOUT) + @Test(groups = { "emulator" }, timeOut = TIMEOUT) public void idWithDisallowedCharQuestionMark() { TestScenario scenario = new TestScenario( "IdWithDisallowedCharQuestionMark", @@ -437,7 +437,7 @@ public void idWithDisallowedCharQuestionMark() { this.executeTestCase(scenario); } - @Test(groups = { "fast", "emulator" }, timeOut = TIMEOUT) + @Test(groups = { "emulator" }, timeOut = TIMEOUT) public void idWithDisallowedCharForwardSlash() { TestScenario scenario = new TestScenario( "IdWithDisallowedCharForwardSlash", @@ -464,7 +464,7 @@ public void idWithDisallowedCharForwardSlash() { this.executeTestCase(scenario); } - @Test(groups = { "fast", "emulator" }, timeOut = TIMEOUT) + @Test(groups = { "emulator" }, timeOut = TIMEOUT) public void idWithDisallowedCharForwardSlashButIdValidationEnabled() { TestScenario scenario = new TestScenario( "IdWithDisallowedCharForwardSlashButIdValidationEnabled", @@ -492,7 +492,7 @@ public void idWithDisallowedCharForwardSlashButIdValidationEnabled() { this.executeTestCase(scenario); } - @Test(groups = { "fast", "emulator" }, timeOut = TIMEOUT) + @Test(groups = { "emulator" }, timeOut = TIMEOUT) public void idWithDisallowedCharBackSlash() { TestScenario scenario = new TestScenario( "IdWithDisallowedCharBackSlash", @@ -519,7 +519,7 @@ public void idWithDisallowedCharBackSlash() { this.executeTestCase(scenario); } - @Test(groups = { "fast", "emulator" }, timeOut = TIMEOUT) + @Test(groups = { "emulator" }, timeOut = TIMEOUT) public void idWithDisallowedCharPoundSign() { TestScenario scenario = new TestScenario( "IdWithDisallowedCharPoundSign", @@ -546,7 +546,7 @@ public void idWithDisallowedCharPoundSign() { this.executeTestCase(scenario); } - @Test(groups = { "fast", "emulator" }, timeOut = TIMEOUT) + @Test(groups = { "emulator" }, timeOut = TIMEOUT) public void idWithCarriageReturn() { TestScenario scenario = new TestScenario( "IdWithCarriageReturn", @@ -573,7 +573,7 @@ public void idWithCarriageReturn() { this.executeTestCase(scenario); } - @Test(groups = { "fast", "emulator" }, timeOut = TIMEOUT) + @Test(groups = { "emulator" }, timeOut = TIMEOUT) public void idWithTab() { TestScenario scenario = new TestScenario( "IdWithTab", @@ -600,7 +600,7 @@ public void idWithTab() { this.executeTestCase(scenario); } - @Test(groups = { "fast", "emulator" }, timeOut = TIMEOUT) + @Test(groups = { "emulator" }, timeOut = TIMEOUT) public void idWithLineFeed() { TestScenario scenario = new TestScenario( "IdWithLineFeed", diff --git a/sdk/cosmos/azure-cosmos-tests/src/test/java/com/azure/cosmos/rx/FullFidelityChangeFeedTest.java b/sdk/cosmos/azure-cosmos-tests/src/test/java/com/azure/cosmos/rx/FullFidelityChangeFeedTest.java index 328e308042275..2e43dcdfbb266 100644 --- a/sdk/cosmos/azure-cosmos-tests/src/test/java/com/azure/cosmos/rx/FullFidelityChangeFeedTest.java +++ b/sdk/cosmos/azure-cosmos-tests/src/test/java/com/azure/cosmos/rx/FullFidelityChangeFeedTest.java @@ -18,7 +18,6 @@ import org.testng.annotations.AfterClass; import org.testng.annotations.BeforeClass; import org.testng.annotations.Factory; -import org.testng.annotations.Ignore; import org.testng.annotations.Test; import java.time.Duration; @@ -29,7 +28,6 @@ import static org.assertj.core.api.Assertions.assertThat; import static org.assertj.core.api.Assertions.fail; -@Ignore("Investigating change in emulator behavior") public class FullFidelityChangeFeedTest extends TestSuiteBase { private static final int SETUP_TIMEOUT = 40000; @@ -117,7 +115,7 @@ public void fullFidelityChangeFeed_FromNowForLogicalPartition() throws Exception assertThat(itemChanges.get(3).get("previous")).isEqualTo(itemChanges.get(1).get("current")); } // Assert delete of item1 - assertThat(itemChanges.get(4).get("previous").get("id").asText()).isEqualTo(item1.getId()); + assertThat(itemChanges.get(4).get("previous")).isNull(); assertThat(itemChanges.get(4).get("current")).isEmpty(); assertThat(itemChanges.get(4).get("metadata").get("operationType").asText()).isEqualTo("delete"); assertThat(itemChanges.get(4).get("metadata").get("previousImageLSN").asText() @@ -174,7 +172,7 @@ public void fullFidelityChangeFeed_FromNowForLogicalPartition() throws Exception assertThat(itemChanges.get(1).get("previous")).isEqualTo(itemChanges.get(0).get("current")); } // Assert delete of item3 - assertThat(itemChanges.get(2).get("previous").get("id").asText()).isEqualTo(item3.getId()); + assertThat(itemChanges.get(2).get("previous")).isNull(); assertThat(itemChanges.get(2).get("current")).isEmpty(); assertThat(itemChanges.get(2).get("metadata").get("operationType").asText()).isEqualTo("delete"); assertThat(itemChanges.get(2).get("metadata").get("previousImageLSN").asText() @@ -249,7 +247,7 @@ public void fullFidelityChangeFeed_FromContinuationToken() throws Exception { assertThat(itemChanges.get(2).get("previous")).isEqualTo(itemChanges.get(0).get("current")); } // Assert delete of item1 - assertThat(itemChanges.get(3).get("previous").get("id").asText()).isEqualTo(item1.getId()); + assertThat(itemChanges.get(3).get("previous")).isNull(); assertThat(itemChanges.get(3).get("current")).isEmpty(); assertThat(itemChanges.get(3).get("metadata").get("operationType").asText()).isEqualTo("delete"); assertThat(itemChanges.get(3).get("metadata").get("previousImageLSN").asText()