From fcafa9c122a8485fc4ac6ed4a7fd6159958e47ff Mon Sep 17 00:00:00 2001 From: ZhongYujiang <42907416+zhongyujiang@users.noreply.github.com> Date: Tue, 6 Aug 2024 11:11:25 +0800 Subject: [PATCH] Fix tests. --- .../apache/paimon/spark/SparkReadITCase.java | 39 +++++++++++++++---- 1 file changed, 31 insertions(+), 8 deletions(-) diff --git a/paimon-spark/paimon-spark-common/src/test/java/org/apache/paimon/spark/SparkReadITCase.java b/paimon-spark/paimon-spark-common/src/test/java/org/apache/paimon/spark/SparkReadITCase.java index 744801aadf37a..122e051672e88 100644 --- a/paimon-spark/paimon-spark-common/src/test/java/org/apache/paimon/spark/SparkReadITCase.java +++ b/paimon-spark/paimon-spark-common/src/test/java/org/apache/paimon/spark/SparkReadITCase.java @@ -190,13 +190,21 @@ public void testCreateTableAs() { spark.sql("INSERT INTO partitionedTable VALUES(1,'aaa','bbb')"); spark.sql( "CREATE TABLE partitionedTableAs PARTITIONED BY (a) AS SELECT * FROM partitionedTable"); + + String tablePath = new Path(warehousePath, "default.db/partitionedTableAs").toString(); assertThat(spark.sql("SHOW CREATE TABLE partitionedTableAs").collectAsList().toString()) .isEqualTo( String.format( - "[[%s" + "PARTITIONED BY (a)\n" + "LOCATION '%s'\n" + "]]", + "[[%s" + + "PARTITIONED BY (a)\n" + + "LOCATION '%s'\n" + + "TBLPROPERTIES (\n" + + " 'path' = '%s')\n" + + "]]", showCreateString( "partitionedTableAs", "a BIGINT", "b STRING", "c STRING"), - new Path(warehousePath, "default.db/partitionedTableAs"))); + tablePath, + tablePath)); List resultPartition = spark.sql("SELECT * FROM partitionedTableAs").collectAsList(); assertThat(resultPartition.stream().map(Row::toString)) .containsExactlyInAnyOrder("[1,aaa,bbb]"); @@ -213,17 +221,21 @@ public void testCreateTableAs() { spark.sql("INSERT INTO testTable VALUES(1,'a','b')"); spark.sql( "CREATE TABLE testTableAs TBLPROPERTIES ('file.format' = 'parquet') AS SELECT * FROM testTable"); + + String testTableAsPath = new Path(warehousePath, "default.db/testTableAs").toString(); assertThat(spark.sql("SHOW CREATE TABLE testTableAs").collectAsList().toString()) .isEqualTo( String.format( "[[%s" + "LOCATION '%s'\n" + "TBLPROPERTIES (\n" - + " 'file.format' = 'parquet')\n" + + " 'file.format' = 'parquet',\n" + + " 'path' = '%s')\n" + "]]", showCreateString( "testTableAs", "a BIGINT", "b VARCHAR(10)", "c CHAR(10)"), - new Path(warehousePath, "default.db/testTableAs"))); + testTableAsPath, + testTableAsPath)); List resultProp = spark.sql("SELECT * FROM testTableAs").collectAsList(); assertThat(resultProp.stream().map(Row::toString)) @@ -241,15 +253,18 @@ public void testCreateTableAs() { + "COMMENT 'table comment'"); spark.sql("INSERT INTO t_pk VALUES(1,'aaa','bbb')"); spark.sql("CREATE TABLE t_pk_as TBLPROPERTIES ('primary-key' = 'a') AS SELECT * FROM t_pk"); + + String tPkAsPath = new Path(warehousePath, "default.db/t_pk_as").toString(); assertThat(spark.sql("SHOW CREATE TABLE t_pk_as").collectAsList().toString()) .isEqualTo( String.format( "[[%s" + "LOCATION '%s'\n" - + "TBLPROPERTIES (\n 'primary-key' = 'a')\n]]", + + "TBLPROPERTIES (\n 'path' = '%s',\n 'primary-key' = 'a')\n]]", showCreateString( "t_pk_as", "a BIGINT NOT NULL", "b STRING", "c STRING"), - new Path(warehousePath, "default.db/t_pk_as"))); + tPkAsPath, + tPkAsPath)); List resultPk = spark.sql("SELECT * FROM t_pk_as").collectAsList(); assertThat(resultPk.stream().map(Row::toString)).containsExactlyInAnyOrder("[1,aaa,bbb]"); @@ -268,6 +283,8 @@ public void testCreateTableAs() { spark.sql("INSERT INTO t_all VALUES(1,2,'bbb','2020-01-01','12')"); spark.sql( "CREATE TABLE t_all_as PARTITIONED BY (dt) TBLPROPERTIES ('primary-key' = 'dt,hh') AS SELECT * FROM t_all"); + + String tAllAsPath = new Path(warehousePath, "default.db/t_all_as").toString(); assertThat(spark.sql("SHOW CREATE TABLE t_all_as").collectAsList().toString()) .isEqualTo( String.format( @@ -275,6 +292,7 @@ public void testCreateTableAs() { + "PARTITIONED BY (dt)\n" + "LOCATION '%s'\n" + "TBLPROPERTIES (\n" + + " 'path' = '%s',\n" + " 'primary-key' = 'dt,hh')\n" + "]]", showCreateString( @@ -284,7 +302,8 @@ public void testCreateTableAs() { "behavior STRING", "dt STRING NOT NULL", "hh STRING NOT NULL"), - new Path(warehousePath, "default.db/t_all_as"))); + tAllAsPath, + tAllAsPath)); List resultAll = spark.sql("SELECT * FROM t_all_as").collectAsList(); assertThat(resultAll.stream().map(Row::toString)) .containsExactlyInAnyOrder("[1,2,bbb,2020-01-01,12]"); @@ -361,6 +380,8 @@ public void testShowCreateTable() { + " 'k1' = 'v1'\n" + ")"); + String tablePath = new Path(warehousePath, "default.db/tbl").toString(); + assertThat(spark.sql("SHOW CREATE TABLE tbl").collectAsList().toString()) .isEqualTo( String.format( @@ -370,12 +391,14 @@ public void testShowCreateTable() { + "LOCATION '%s'\n" + "TBLPROPERTIES (\n" + " 'k1' = 'v1',\n" + + " 'path' = '%s',\n" + " 'primary-key' = 'a,b')\n]]", showCreateString( "tbl", "a INT NOT NULL COMMENT 'a comment'", "b STRING NOT NULL"), - new Path(warehousePath, "default.db/tbl"))); + tablePath, + tablePath)); } @Test