From ae7d205d47c4838e5437c70a094500e7e05879a7 Mon Sep 17 00:00:00 2001 From: Tomas Varon Saldarriaga Date: Wed, 18 Dec 2024 21:04:20 -0500 Subject: [PATCH 1/4] Fix java sdk tests for new emulator behavior for ff --- .../spark/SparkE2EChangeFeedITest.scala | 196 +++++++++--------- .../cosmos/rx/FullFidelityChangeFeedTest.java | 8 +- 2 files changed, 101 insertions(+), 103 deletions(-) diff --git a/sdk/cosmos/azure-cosmos-spark_3_2-12/src/test/scala/com/azure/cosmos/spark/SparkE2EChangeFeedITest.scala b/sdk/cosmos/azure-cosmos-spark_3_2-12/src/test/scala/com/azure/cosmos/spark/SparkE2EChangeFeedITest.scala index f4c858ef0bf48..f6144f78f0aaf 100644 --- a/sdk/cosmos/azure-cosmos-spark_3_2-12/src/test/scala/com/azure/cosmos/spark/SparkE2EChangeFeedITest.scala +++ b/sdk/cosmos/azure-cosmos-spark_3_2-12/src/test/scala/com/azure/cosmos/spark/SparkE2EChangeFeedITest.scala @@ -444,104 +444,104 @@ class SparkE2EChangeFeedITest rowsArray2 should have size 50 - initialCount } -// "spark change feed query (full fidelity)" should "honor checkpoint location" in { -// val cosmosEndpoint = TestConfigurations.HOST -// val cosmosMasterKey = TestConfigurations.MASTER_KEY -// -// val checkpointLocation = s"/tmp/checkpoints/${UUID.randomUUID().toString}" -// val cfg = Map( -// "spark.cosmos.accountEndpoint" -> cosmosEndpoint, -// "spark.cosmos.accountKey" -> cosmosMasterKey, -// "spark.cosmos.database" -> cosmosDatabase, -// "spark.cosmos.container" -> cosmosContainer, -// "spark.cosmos.read.inferSchema.enabled" -> "false", -// "spark.cosmos.changeFeed.mode" -> "FullFidelity", -// "spark.cosmos.changeFeed.startFrom" -> "NOW", -// "spark.cosmos.read.partitioning.strategy" -> "Restrictive", -// "spark.cosmos.changeFeed.batchCheckpointLocation" -> checkpointLocation -// ) -// -// val df1 = spark.read.format("cosmos.oltp.changeFeed").options(cfg).load() -// val rowsArray1 = df1.collect() -// rowsArray1.length == 0 shouldEqual true -// -// df1.schema.equals( -// ChangeFeedTable.defaultFullFidelityChangeFeedSchemaForInferenceDisabled) shouldEqual true -// -// val hdfs = org.apache.hadoop.fs.FileSystem.get(spark.sparkContext.hadoopConfiguration) -// -// val startOffsetFolderLocation = Paths.get(checkpointLocation, "startOffset").toString -// val startOffsetFileLocation = Paths.get(startOffsetFolderLocation, "0").toString -// hdfs.exists(new Path(startOffsetFolderLocation)) shouldEqual true -// hdfs.exists(new Path(startOffsetFileLocation)) shouldEqual false -// -// val latestOffsetFolderLocation = Paths.get(checkpointLocation, "latestOffset").toString -// val latestOffsetFileLocation = Paths.get(latestOffsetFolderLocation, "0").toString -// hdfs.exists(new Path(latestOffsetFolderLocation)) shouldEqual true -// hdfs.exists(new Path(latestOffsetFileLocation)) shouldEqual true -// -// // TODO - check for the offset structure to make sure it looks like the new lease format. -// -// hdfs.copyToLocalFile(true, new Path(latestOffsetFileLocation), new Path(startOffsetFileLocation)) -// assert(!hdfs.exists(new Path(latestOffsetFileLocation))) -// -// val container = cosmosClient.getDatabase(cosmosDatabase).getContainer(cosmosContainer) -// -// val createdObjectIds = new ArrayBuffer[String]() -// val replacedObjectIds = new ArrayBuffer[String]() -// val deletedObjectIds = new ArrayBuffer[String]() -// for (sequenceNumber <- 1 to 5) { -// val objectNode = Utils.getSimpleObjectMapper.createObjectNode() -// objectNode.put("name", "Shrodigner's cat") -// objectNode.put("type", "cat") -// objectNode.put("age", 20) -// objectNode.put("sequenceNumber", sequenceNumber) -// val id = UUID.randomUUID().toString -// objectNode.put("id", id) -// createdObjectIds += id -// if (sequenceNumber % 2 == 0) { -// replacedObjectIds += id -// } -// if (sequenceNumber % 3 == 0) { -// deletedObjectIds += id -// } -// container.createItem(objectNode).block() -// } -// -// for (id <- replacedObjectIds) { -// val objectNode = Utils.getSimpleObjectMapper.createObjectNode() -// objectNode.put("name", "Shrodigner's cat") -// objectNode.put("type", "dog") -// objectNode.put("age", 25) -// objectNode.put("id", id) -// container.replaceItem(objectNode, id, new PartitionKey(id)).block() -// } -// -// for (id <- deletedObjectIds) { -// container.deleteItem(id, new PartitionKey(id)).block() -// } -// -// // wait for the log store to get these changes -// Thread.sleep(2000) -// -// val df2 = spark.read.format("cosmos.oltp.changeFeed").options(cfg).load() -// val groupedFrame = df2.groupBy(CosmosTableSchemaInferrer.OperationTypeAttributeName) -// .agg(functions.collect_list("id").as("ids")) -// -// val collectedFrame = groupedFrame.collect() -// collectedFrame.foreach(row => { -// val wrappedArray = row.get(1).asInstanceOf[mutable.WrappedArray[String]] -// val array = wrappedArray.array -// row.get(0) match { -// case "create" => -// validateArraysUnordered(createdObjectIds, array) -// case "replace" => -// validateArraysUnordered(replacedObjectIds, array) -// case "delete" => -// validateArraysUnordered(deletedObjectIds, array) -// } -// }) -// } + "spark change feed query (full fidelity)" should "honor checkpoint location" in { + val cosmosEndpoint = TestConfigurations.HOST + val cosmosMasterKey = TestConfigurations.MASTER_KEY + + val checkpointLocation = s"/tmp/checkpoints/${UUID.randomUUID().toString}" + val cfg = Map( + "spark.cosmos.accountEndpoint" -> cosmosEndpoint, + "spark.cosmos.accountKey" -> cosmosMasterKey, + "spark.cosmos.database" -> cosmosDatabase, + "spark.cosmos.container" -> cosmosContainer, + "spark.cosmos.read.inferSchema.enabled" -> "false", + "spark.cosmos.changeFeed.mode" -> "FullFidelity", + "spark.cosmos.changeFeed.startFrom" -> "NOW", + "spark.cosmos.read.partitioning.strategy" -> "Restrictive", + "spark.cosmos.changeFeed.batchCheckpointLocation" -> checkpointLocation + ) + + val df1 = spark.read.format("cosmos.oltp.changeFeed").options(cfg).load() + val rowsArray1 = df1.collect() + rowsArray1.length == 0 shouldEqual true + + df1.schema.equals( + ChangeFeedTable.defaultFullFidelityChangeFeedSchemaForInferenceDisabled) shouldEqual true + + val hdfs = org.apache.hadoop.fs.FileSystem.get(spark.sparkContext.hadoopConfiguration) + + val startOffsetFolderLocation = Paths.get(checkpointLocation, "startOffset").toString + val startOffsetFileLocation = Paths.get(startOffsetFolderLocation, "0").toString + hdfs.exists(new Path(startOffsetFolderLocation)) shouldEqual true + hdfs.exists(new Path(startOffsetFileLocation)) shouldEqual false + + val latestOffsetFolderLocation = Paths.get(checkpointLocation, "latestOffset").toString + val latestOffsetFileLocation = Paths.get(latestOffsetFolderLocation, "0").toString + hdfs.exists(new Path(latestOffsetFolderLocation)) shouldEqual true + hdfs.exists(new Path(latestOffsetFileLocation)) shouldEqual true + + // TODO - check for the offset structure to make sure it looks like the new lease format. + + hdfs.copyToLocalFile(true, new Path(latestOffsetFileLocation), new Path(startOffsetFileLocation)) + assert(!hdfs.exists(new Path(latestOffsetFileLocation))) + + val container = cosmosClient.getDatabase(cosmosDatabase).getContainer(cosmosContainer) + + val createdObjectIds = new ArrayBuffer[String]() + val replacedObjectIds = new ArrayBuffer[String]() + val deletedObjectIds = new ArrayBuffer[String]() + for (sequenceNumber <- 1 to 5) { + val objectNode = Utils.getSimpleObjectMapper.createObjectNode() + objectNode.put("name", "Shrodigner's cat") + objectNode.put("type", "cat") + objectNode.put("age", 20) + objectNode.put("sequenceNumber", sequenceNumber) + val id = UUID.randomUUID().toString + objectNode.put("id", id) + createdObjectIds += id + if (sequenceNumber % 2 == 0) { + replacedObjectIds += id + } + if (sequenceNumber % 3 == 0) { + deletedObjectIds += id + } + container.createItem(objectNode).block() + } + + for (id <- replacedObjectIds) { + val objectNode = Utils.getSimpleObjectMapper.createObjectNode() + objectNode.put("name", "Shrodigner's cat") + objectNode.put("type", "dog") + objectNode.put("age", 25) + objectNode.put("id", id) + container.replaceItem(objectNode, id, new PartitionKey(id)).block() + } + + for (id <- deletedObjectIds) { + container.deleteItem(id, new PartitionKey(id)).block() + } + + // wait for the log store to get these changes + Thread.sleep(2000) + + val df2 = spark.read.format("cosmos.oltp.changeFeed").options(cfg).load() + val groupedFrame = df2.groupBy(CosmosTableSchemaInferrer.OperationTypeAttributeName) + .agg(functions.collect_list("id").as("ids")) + + val collectedFrame = groupedFrame.collect() + collectedFrame.foreach(row => { + val wrappedArray = row.get(1).asInstanceOf[mutable.WrappedArray[String]] + val array = wrappedArray.array + row.get(0) match { + case "create" => + validateArraysUnordered(createdObjectIds, array) + case "replace" => + validateArraysUnordered(replacedObjectIds, array) + case "delete" => + validateArraysUnordered(deletedObjectIds, array) + } + }) + } "spark change feed query (incremental)" can "proceed with simulated Spark2 Checkpoint" in { val cosmosEndpoint = TestConfigurations.HOST diff --git a/sdk/cosmos/azure-cosmos-tests/src/test/java/com/azure/cosmos/rx/FullFidelityChangeFeedTest.java b/sdk/cosmos/azure-cosmos-tests/src/test/java/com/azure/cosmos/rx/FullFidelityChangeFeedTest.java index 328e308042275..2e43dcdfbb266 100644 --- a/sdk/cosmos/azure-cosmos-tests/src/test/java/com/azure/cosmos/rx/FullFidelityChangeFeedTest.java +++ b/sdk/cosmos/azure-cosmos-tests/src/test/java/com/azure/cosmos/rx/FullFidelityChangeFeedTest.java @@ -18,7 +18,6 @@ import org.testng.annotations.AfterClass; import org.testng.annotations.BeforeClass; import org.testng.annotations.Factory; -import org.testng.annotations.Ignore; import org.testng.annotations.Test; import java.time.Duration; @@ -29,7 +28,6 @@ import static org.assertj.core.api.Assertions.assertThat; import static org.assertj.core.api.Assertions.fail; -@Ignore("Investigating change in emulator behavior") public class FullFidelityChangeFeedTest extends TestSuiteBase { private static final int SETUP_TIMEOUT = 40000; @@ -117,7 +115,7 @@ public void fullFidelityChangeFeed_FromNowForLogicalPartition() throws Exception assertThat(itemChanges.get(3).get("previous")).isEqualTo(itemChanges.get(1).get("current")); } // Assert delete of item1 - assertThat(itemChanges.get(4).get("previous").get("id").asText()).isEqualTo(item1.getId()); + assertThat(itemChanges.get(4).get("previous")).isNull(); assertThat(itemChanges.get(4).get("current")).isEmpty(); assertThat(itemChanges.get(4).get("metadata").get("operationType").asText()).isEqualTo("delete"); assertThat(itemChanges.get(4).get("metadata").get("previousImageLSN").asText() @@ -174,7 +172,7 @@ public void fullFidelityChangeFeed_FromNowForLogicalPartition() throws Exception assertThat(itemChanges.get(1).get("previous")).isEqualTo(itemChanges.get(0).get("current")); } // Assert delete of item3 - assertThat(itemChanges.get(2).get("previous").get("id").asText()).isEqualTo(item3.getId()); + assertThat(itemChanges.get(2).get("previous")).isNull(); assertThat(itemChanges.get(2).get("current")).isEmpty(); assertThat(itemChanges.get(2).get("metadata").get("operationType").asText()).isEqualTo("delete"); assertThat(itemChanges.get(2).get("metadata").get("previousImageLSN").asText() @@ -249,7 +247,7 @@ public void fullFidelityChangeFeed_FromContinuationToken() throws Exception { assertThat(itemChanges.get(2).get("previous")).isEqualTo(itemChanges.get(0).get("current")); } // Assert delete of item1 - assertThat(itemChanges.get(3).get("previous").get("id").asText()).isEqualTo(item1.getId()); + assertThat(itemChanges.get(3).get("previous")).isNull(); assertThat(itemChanges.get(3).get("current")).isEmpty(); assertThat(itemChanges.get(3).get("metadata").get("operationType").asText()).isEqualTo("delete"); assertThat(itemChanges.get(3).get("metadata").get("previousImageLSN").asText() From f871623d39f3b0a7dc723ecc95f2f5aac13d232d Mon Sep 17 00:00:00 2001 From: tvaron3 Date: Wed, 18 Dec 2024 21:49:52 -0500 Subject: [PATCH 2/4] Changing spark to ignore previous node for change feed --- .../cosmos/spark/CosmosRowConverterBase.scala | 33 ++++++++++++------- 1 file changed, 21 insertions(+), 12 deletions(-) diff --git a/sdk/cosmos/azure-cosmos-spark_3_2-12/src/main/scala/com/azure/cosmos/spark/CosmosRowConverterBase.scala b/sdk/cosmos/azure-cosmos-spark_3_2-12/src/main/scala/com/azure/cosmos/spark/CosmosRowConverterBase.scala index cd49d40fe6483..3923e6ca42c4c 100644 --- a/sdk/cosmos/azure-cosmos-spark_3_2-12/src/main/scala/com/azure/cosmos/spark/CosmosRowConverterBase.scala +++ b/sdk/cosmos/azure-cosmos-spark_3_2-12/src/main/scala/com/azure/cosmos/spark/CosmosRowConverterBase.scala @@ -705,29 +705,38 @@ private[cosmos] class CosmosRowConverterBase( private def parseId(objectNode: ObjectNode): String = { val currentNode = getCurrentOrPreviousNode(objectNode) - currentNode.get(IdAttributeName) match { - case valueNode: JsonNode => - Option(valueNode).fold(null: String)(v => v.asText(null)) - case _ => null + if (currentNode != null) { + currentNode.get(IdAttributeName) match { + case valueNode: JsonNode => + Option(valueNode).fold(null: String)(v => v.asText(null)) + case _ => null + } } + null } private def parseTimestamp(objectNode: ObjectNode): Long = { val currentNode = getCurrentOrPreviousNode(objectNode) - currentNode.get(TimestampAttributeName) match { - case valueNode: JsonNode => - Option(valueNode).fold(-1L)(v => v.asLong(-1)) - case _ => -1L + if (currentNode != null) { + currentNode.get(TimestampAttributeName) match { + case valueNode: JsonNode => + Option(valueNode).fold(-1L)(v => v.asLong(-1)) + case _ => -1L + } } + null } private def parseETag(objectNode: ObjectNode): String = { val currentNode = getCurrentOrPreviousNode(objectNode) - currentNode.get(ETagAttributeName) match { - case valueNode: JsonNode => - Option(valueNode).fold(null: String)(v => v.asText(null)) - case _ => null + if (currentNode != null) { + currentNode.get(ETagAttributeName) match { + case valueNode: JsonNode => + Option(valueNode).fold(null: String)(v => v.asText(null)) + case _ => null + } } + null } private def getCurrentOrPreviousNode(objectNode: ObjectNode): JsonNode = { From 9ce37bae6ba594ac8a3a0f0e75e1e9d61738eda0 Mon Sep 17 00:00:00 2001 From: tvaron3 Date: Wed, 18 Dec 2024 23:36:36 -0500 Subject: [PATCH 3/4] Updating changelog --- sdk/cosmos/azure-cosmos-spark_3-1_2-12/CHANGELOG.md | 1 + sdk/cosmos/azure-cosmos-spark_3-2_2-12/CHANGELOG.md | 1 + sdk/cosmos/azure-cosmos-spark_3-3_2-12/CHANGELOG.md | 1 + sdk/cosmos/azure-cosmos-spark_3-4_2-12/CHANGELOG.md | 1 + sdk/cosmos/azure-cosmos-spark_3-5_2-12/CHANGELOG.md | 1 + .../scala/com/azure/cosmos/spark/CosmosRowConverterBase.scala | 2 +- 6 files changed, 6 insertions(+), 1 deletion(-) diff --git a/sdk/cosmos/azure-cosmos-spark_3-1_2-12/CHANGELOG.md b/sdk/cosmos/azure-cosmos-spark_3-1_2-12/CHANGELOG.md index 9d31c150b1abe..01163034e1151 100644 --- a/sdk/cosmos/azure-cosmos-spark_3-1_2-12/CHANGELOG.md +++ b/sdk/cosmos/azure-cosmos-spark_3-1_2-12/CHANGELOG.md @@ -7,6 +7,7 @@ #### Breaking Changes #### Bugs Fixed +* Added null checking for previous images for deletes in full fidelity change feed. - See [PR 43483](https://github.com/Azure/azure-sdk-for-java/pull/43483) #### Other Changes diff --git a/sdk/cosmos/azure-cosmos-spark_3-2_2-12/CHANGELOG.md b/sdk/cosmos/azure-cosmos-spark_3-2_2-12/CHANGELOG.md index 05370ba9680fc..5801b37ac60e1 100644 --- a/sdk/cosmos/azure-cosmos-spark_3-2_2-12/CHANGELOG.md +++ b/sdk/cosmos/azure-cosmos-spark_3-2_2-12/CHANGELOG.md @@ -7,6 +7,7 @@ #### Breaking Changes #### Bugs Fixed +* Added null checking for previous images for deletes in full fidelity change feed. - See [PR 43483](https://github.com/Azure/azure-sdk-for-java/pull/43483) #### Other Changes diff --git a/sdk/cosmos/azure-cosmos-spark_3-3_2-12/CHANGELOG.md b/sdk/cosmos/azure-cosmos-spark_3-3_2-12/CHANGELOG.md index 258bca0398bdd..d3bd565174399 100644 --- a/sdk/cosmos/azure-cosmos-spark_3-3_2-12/CHANGELOG.md +++ b/sdk/cosmos/azure-cosmos-spark_3-3_2-12/CHANGELOG.md @@ -7,6 +7,7 @@ #### Breaking Changes #### Bugs Fixed +* Added null checking for previous images for deletes in full fidelity change feed. - See [PR 43483](https://github.com/Azure/azure-sdk-for-java/pull/43483) #### Other Changes diff --git a/sdk/cosmos/azure-cosmos-spark_3-4_2-12/CHANGELOG.md b/sdk/cosmos/azure-cosmos-spark_3-4_2-12/CHANGELOG.md index 3f95bc7111788..ffd75497d927c 100644 --- a/sdk/cosmos/azure-cosmos-spark_3-4_2-12/CHANGELOG.md +++ b/sdk/cosmos/azure-cosmos-spark_3-4_2-12/CHANGELOG.md @@ -7,6 +7,7 @@ #### Breaking Changes #### Bugs Fixed +* Added null checking for previous images for deletes in full fidelity change feed. - See [PR 43483](https://github.com/Azure/azure-sdk-for-java/pull/43483) #### Other Changes diff --git a/sdk/cosmos/azure-cosmos-spark_3-5_2-12/CHANGELOG.md b/sdk/cosmos/azure-cosmos-spark_3-5_2-12/CHANGELOG.md index 70baf75320650..e2ee2760a2cc2 100644 --- a/sdk/cosmos/azure-cosmos-spark_3-5_2-12/CHANGELOG.md +++ b/sdk/cosmos/azure-cosmos-spark_3-5_2-12/CHANGELOG.md @@ -7,6 +7,7 @@ #### Breaking Changes #### Bugs Fixed +* Added null checking for previous images for deletes in full fidelity change feed. - See [PR 43483](https://github.com/Azure/azure-sdk-for-java/pull/43483) #### Other Changes diff --git a/sdk/cosmos/azure-cosmos-spark_3_2-12/src/main/scala/com/azure/cosmos/spark/CosmosRowConverterBase.scala b/sdk/cosmos/azure-cosmos-spark_3_2-12/src/main/scala/com/azure/cosmos/spark/CosmosRowConverterBase.scala index 3923e6ca42c4c..6b65e1018b8ba 100644 --- a/sdk/cosmos/azure-cosmos-spark_3_2-12/src/main/scala/com/azure/cosmos/spark/CosmosRowConverterBase.scala +++ b/sdk/cosmos/azure-cosmos-spark_3_2-12/src/main/scala/com/azure/cosmos/spark/CosmosRowConverterBase.scala @@ -724,7 +724,7 @@ private[cosmos] class CosmosRowConverterBase( case _ => -1L } } - null + -1L } private def parseETag(objectNode: ObjectNode): String = { From f1f922f2298c3166e91b279d1306bd9e4dde1357 Mon Sep 17 00:00:00 2001 From: tvaron3 Date: Thu, 19 Dec 2024 13:18:17 -0500 Subject: [PATCH 4/4] Fixed tests --- .../cosmos/spark/CosmosRowConverterBase.scala | 8 ++-- .../cosmos/CosmosItemIdEncodingTest.java | 44 +++++++++---------- 2 files changed, 26 insertions(+), 26 deletions(-) diff --git a/sdk/cosmos/azure-cosmos-spark_3_2-12/src/main/scala/com/azure/cosmos/spark/CosmosRowConverterBase.scala b/sdk/cosmos/azure-cosmos-spark_3_2-12/src/main/scala/com/azure/cosmos/spark/CosmosRowConverterBase.scala index 6b65e1018b8ba..7003f3f8eebcd 100644 --- a/sdk/cosmos/azure-cosmos-spark_3_2-12/src/main/scala/com/azure/cosmos/spark/CosmosRowConverterBase.scala +++ b/sdk/cosmos/azure-cosmos-spark_3_2-12/src/main/scala/com/azure/cosmos/spark/CosmosRowConverterBase.scala @@ -694,7 +694,7 @@ private[cosmos] class CosmosRowConverterBase( private def parseTtlExpired(objectNode: ObjectNode): Boolean = { objectNode.get(MetadataJsonBodyAttributeName) match { case metadataNode: JsonNode => - metadataNode.get(TimeToLiveExpiredPropertyName) match { + return metadataNode.get(TimeToLiveExpiredPropertyName) match { case valueNode: JsonNode => Option(valueNode).fold(false)(v => v.asBoolean(false)) case _ => false @@ -706,7 +706,7 @@ private[cosmos] class CosmosRowConverterBase( private def parseId(objectNode: ObjectNode): String = { val currentNode = getCurrentOrPreviousNode(objectNode) if (currentNode != null) { - currentNode.get(IdAttributeName) match { + return currentNode.get(IdAttributeName) match { case valueNode: JsonNode => Option(valueNode).fold(null: String)(v => v.asText(null)) case _ => null @@ -718,7 +718,7 @@ private[cosmos] class CosmosRowConverterBase( private def parseTimestamp(objectNode: ObjectNode): Long = { val currentNode = getCurrentOrPreviousNode(objectNode) if (currentNode != null) { - currentNode.get(TimestampAttributeName) match { + return currentNode.get(TimestampAttributeName) match { case valueNode: JsonNode => Option(valueNode).fold(-1L)(v => v.asLong(-1)) case _ => -1L @@ -730,7 +730,7 @@ private[cosmos] class CosmosRowConverterBase( private def parseETag(objectNode: ObjectNode): String = { val currentNode = getCurrentOrPreviousNode(objectNode) if (currentNode != null) { - currentNode.get(ETagAttributeName) match { + return currentNode.get(ETagAttributeName) match { case valueNode: JsonNode => Option(valueNode).fold(null: String)(v => v.asText(null)) case _ => null diff --git a/sdk/cosmos/azure-cosmos-tests/src/test/java/com/azure/cosmos/CosmosItemIdEncodingTest.java b/sdk/cosmos/azure-cosmos-tests/src/test/java/com/azure/cosmos/CosmosItemIdEncodingTest.java index fd80a05c4a9f9..3411ec85fe182 100644 --- a/sdk/cosmos/azure-cosmos-tests/src/test/java/com/azure/cosmos/CosmosItemIdEncodingTest.java +++ b/sdk/cosmos/azure-cosmos-tests/src/test/java/com/azure/cosmos/CosmosItemIdEncodingTest.java @@ -45,7 +45,7 @@ public CosmosItemIdEncodingTest(CosmosClientBuilder clientBuilder) { super(clientBuilder.contentResponseOnWriteEnabled(true)); } - @BeforeClass(groups = {"fast", "emulator"}, timeOut = SETUP_TIMEOUT) + @BeforeClass(groups = { "emulator" }, timeOut = SETUP_TIMEOUT) public void before_CosmosItemTest() { assertThat(this.client).isNull(); CosmosContainerProperties containerProperties = getCollectionDefinitionWithRangeRangeIndex(); @@ -74,7 +74,7 @@ public void before_CosmosItemTest() { "any left-overs with weird encoded ids in the shared container.", containerProperties.getId()); } - @AfterClass(groups = {"fast", "emulator"}, timeOut = SHUTDOWN_TIMEOUT, alwaysRun = true) + @AfterClass(groups = { "emulator" }, timeOut = SHUTDOWN_TIMEOUT, alwaysRun = true) public void afterClass() { assertThat(this.client).isNotNull(); if (this.container != null) { @@ -83,7 +83,7 @@ public void afterClass() { this.client.close(); } - @Test(groups = { "fast", "emulator" }, timeOut = TIMEOUT) + @Test(groups = { "emulator" }, timeOut = TIMEOUT) public void plainVanillaId() { TestScenario scenario = new TestScenario( "PlainVanillaId", @@ -110,7 +110,7 @@ public void plainVanillaId() { this.executeTestCase(scenario); } - @Test(groups = { "fast", "emulator" }, timeOut = TIMEOUT) + @Test(groups = { "emulator" }, timeOut = TIMEOUT) public void containerIdWithUnicodeCharacter() { TestScenario scenario = new TestScenario( "ContainerIdWithUnicodeé±€", @@ -137,7 +137,7 @@ public void containerIdWithUnicodeCharacter() { this.executeTestCase(scenario); } - @Test(groups = { "fast", "emulator" }, timeOut = TIMEOUT) + @Test(groups = { "emulator" }, timeOut = TIMEOUT) public void idWithWhitespaces() { TestScenario scenario = new TestScenario( "IdWithWhitespaces", @@ -164,7 +164,7 @@ public void idWithWhitespaces() { this.executeTestCase(scenario); } - @Test(groups = { "fast", "emulator" }, timeOut = TIMEOUT) + @Test(groups = { "emulator" }, timeOut = TIMEOUT) public void idStartingWithWhitespace() { TestScenario scenario = new TestScenario( "IdStartingWithWhitespace", @@ -191,7 +191,7 @@ public void idStartingWithWhitespace() { this.executeTestCase(scenario); } - @Test(groups = { "fast", "emulator" }, timeOut = TIMEOUT) + @Test(groups = { "emulator" }, timeOut = TIMEOUT) public void idStartingWithWhitespaces() { TestScenario scenario = new TestScenario( "IdStartingWithWhitespaces", @@ -218,7 +218,7 @@ public void idStartingWithWhitespaces() { this.executeTestCase(scenario); } - @Test(groups = { "fast", "emulator" }, timeOut = TIMEOUT) + @Test(groups = { "emulator" }, timeOut = TIMEOUT) public void idEndingWithWhitespace() { TestScenario scenario = new TestScenario( "IdEndingWithWhitespace", @@ -245,7 +245,7 @@ public void idEndingWithWhitespace() { this.executeTestCase(scenario); } - @Test(groups = { "fast", "emulator" }, timeOut = TIMEOUT) + @Test(groups = { "emulator" }, timeOut = TIMEOUT) public void idEndingWithWhitespaces() { TestScenario scenario = new TestScenario( "IdEndingWithWhitespaces", @@ -272,7 +272,7 @@ public void idEndingWithWhitespaces() { this.executeTestCase(scenario); } - @Test(groups = { "fast", "emulator" }, timeOut = TIMEOUT) + @Test(groups = { "emulator" }, timeOut = TIMEOUT) public void idWithUnicodeCharacters() { TestScenario scenario = new TestScenario( "IdWithUnicodeCharacters", @@ -299,7 +299,7 @@ public void idWithUnicodeCharacters() { this.executeTestCase(scenario); } - @Test(groups = { "fast", "emulator" }, timeOut = TIMEOUT) + @Test(groups = { "emulator" }, timeOut = TIMEOUT) public void idWithAllowedSpecialCharacters() { TestScenario scenario = new TestScenario( "IdWithAllowedSpecialCharacters", @@ -326,7 +326,7 @@ public void idWithAllowedSpecialCharacters() { this.executeTestCase(scenario); } - @Test(groups = { "fast", "emulator" }, timeOut = TIMEOUT) + @Test(groups = { "emulator" }, timeOut = TIMEOUT) public void idWithBase64EncodedIdCharacters() { String base64EncodedId = "BQE1D3PdG4N4bzU9TKaCIM3qc0TVcZ2/Y3jnsRfwdHC1ombkX3F1dot/SG0/UTq9AbgdX3kOWoP6qL6lJqWeKgV3zwWWPZO/t5X0ehJzv9LGkWld07LID2rhWhGT6huBM6Q="; String safeBase64EncodedId = base64EncodedId.replace("/", "-"); @@ -356,7 +356,7 @@ public void idWithBase64EncodedIdCharacters() { this.executeTestCase(scenario); } - @Test(groups = { "fast", "emulator" }, timeOut = TIMEOUT) + @Test(groups = { "emulator" }, timeOut = TIMEOUT) public void idEndingWithPercentEncodedWhitespace() { TestScenario scenario = new TestScenario( "IdEndingWithPercentEncodedWhitespace", @@ -383,7 +383,7 @@ public void idEndingWithPercentEncodedWhitespace() { this.executeTestCase(scenario); } - @Test(groups = { "fast", "emulator" }, timeOut = TIMEOUT) + @Test(groups = { "emulator" }, timeOut = TIMEOUT) public void idWithPercentEncodedSpecialChar() { TestScenario scenario = new TestScenario( "IdWithPercentEncodedSpecialChar", @@ -410,7 +410,7 @@ public void idWithPercentEncodedSpecialChar() { this.executeTestCase(scenario); } - @Test(groups = { "fast", "emulator" }, timeOut = TIMEOUT) + @Test(groups = { "emulator" }, timeOut = TIMEOUT) public void idWithDisallowedCharQuestionMark() { TestScenario scenario = new TestScenario( "IdWithDisallowedCharQuestionMark", @@ -437,7 +437,7 @@ public void idWithDisallowedCharQuestionMark() { this.executeTestCase(scenario); } - @Test(groups = { "fast", "emulator" }, timeOut = TIMEOUT) + @Test(groups = { "emulator" }, timeOut = TIMEOUT) public void idWithDisallowedCharForwardSlash() { TestScenario scenario = new TestScenario( "IdWithDisallowedCharForwardSlash", @@ -464,7 +464,7 @@ public void idWithDisallowedCharForwardSlash() { this.executeTestCase(scenario); } - @Test(groups = { "fast", "emulator" }, timeOut = TIMEOUT) + @Test(groups = { "emulator" }, timeOut = TIMEOUT) public void idWithDisallowedCharForwardSlashButIdValidationEnabled() { TestScenario scenario = new TestScenario( "IdWithDisallowedCharForwardSlashButIdValidationEnabled", @@ -492,7 +492,7 @@ public void idWithDisallowedCharForwardSlashButIdValidationEnabled() { this.executeTestCase(scenario); } - @Test(groups = { "fast", "emulator" }, timeOut = TIMEOUT) + @Test(groups = { "emulator" }, timeOut = TIMEOUT) public void idWithDisallowedCharBackSlash() { TestScenario scenario = new TestScenario( "IdWithDisallowedCharBackSlash", @@ -519,7 +519,7 @@ public void idWithDisallowedCharBackSlash() { this.executeTestCase(scenario); } - @Test(groups = { "fast", "emulator" }, timeOut = TIMEOUT) + @Test(groups = { "emulator" }, timeOut = TIMEOUT) public void idWithDisallowedCharPoundSign() { TestScenario scenario = new TestScenario( "IdWithDisallowedCharPoundSign", @@ -546,7 +546,7 @@ public void idWithDisallowedCharPoundSign() { this.executeTestCase(scenario); } - @Test(groups = { "fast", "emulator" }, timeOut = TIMEOUT) + @Test(groups = { "emulator" }, timeOut = TIMEOUT) public void idWithCarriageReturn() { TestScenario scenario = new TestScenario( "IdWithCarriageReturn", @@ -573,7 +573,7 @@ public void idWithCarriageReturn() { this.executeTestCase(scenario); } - @Test(groups = { "fast", "emulator" }, timeOut = TIMEOUT) + @Test(groups = { "emulator" }, timeOut = TIMEOUT) public void idWithTab() { TestScenario scenario = new TestScenario( "IdWithTab", @@ -600,7 +600,7 @@ public void idWithTab() { this.executeTestCase(scenario); } - @Test(groups = { "fast", "emulator" }, timeOut = TIMEOUT) + @Test(groups = { "emulator" }, timeOut = TIMEOUT) public void idWithLineFeed() { TestScenario scenario = new TestScenario( "IdWithLineFeed",