From 102ec3e3fef506c4801d23239e434813b195d3fe Mon Sep 17 00:00:00 2001
From: xuyu <11161569@vivo.com>
Date: Sat, 1 Jun 2024 18:50:21 +0800
Subject: [PATCH 01/11] [spark] Spark3.2 insertoverwrite test support
---
docs/content/engines/overview.md | 6 +-
.../spark/sql/InsertOverwriteTest.scala | 361 ++++++++++++++++++
2 files changed, 364 insertions(+), 3 deletions(-)
create mode 100644 paimon-spark/paimon-spark-3.2/src/test/scala/org/apache/paimon/spark/sql/InsertOverwriteTest.scala
diff --git a/docs/content/engines/overview.md b/docs/content/engines/overview.md
index 19f3bf1c16bc..4a7e4b907255 100644
--- a/docs/content/engines/overview.md
+++ b/docs/content/engines/overview.md
@@ -28,10 +28,10 @@ under the License.
## Compatibility Matrix
-| Engine | Version | Batch Read | Batch Write | Create Table | Alter Table | Streaming Write | Streaming Read | Batch Overwrite | DELETE & UPDATE | MERGE INTO |
-|:-------------------------------------------------------------------------------:|:-------------:|:-----------:|:-------------:|:-------------:|:-------------:|:----------------:|:----------------:|:----------------:|:------------------:|:-----------:|
+| Engine | Version | Batch Read | Batch Write | Create Table | Alter Table | Streaming Write | Streaming Read | Batch Overwrite | DELETE & UPDATE | MERGE INTO |
+|:-------------------------------------------------------------------------------:|:-------------:|:-----------:|:-------------:|:-------------:|:-------------:|:----------------:|:----------------:|:---------------:|:------------------:|:-----------:|
| Flink | 1.15 - 1.19 | ✅ | ✅ | ✅ | ✅(1.17+) | ✅ | ✅ | ✅ | ✅(1.17+) | ❌ |
-| Spark | 3.1 - 3.5 | ✅ | ✅(3.3+) | ✅ | ✅ | ✅(3.3+) | ✅(3.3+) | ✅(3.3+) | ✅(3.2+) | ✅(3.2+) |
+| Spark | 3.1 - 3.5 | ✅ | ✅(3.3+) | ✅ | ✅ | ✅(3.3+) | ✅(3.3+) | ✅(3.2+) | ✅(3.2+) | ✅(3.2+) |
| Hive | 2.1 - 3.1 | ✅ | ✅ | ✅ | ❌ | ❌ | ❌ | ❌ | ❌ | ❌ |
| Trino | 420 - 439 | ✅ | ✅(427+) | ✅(427+) | ✅(427+) | ❌ | ❌ | ❌ | ❌ | ❌ |
| Presto | 0.236 - 0.280 | ✅ | ❌ | ✅ | ✅ | ❌ | ❌ | ❌ | ❌ | ❌ |
diff --git a/paimon-spark/paimon-spark-3.2/src/test/scala/org/apache/paimon/spark/sql/InsertOverwriteTest.scala b/paimon-spark/paimon-spark-3.2/src/test/scala/org/apache/paimon/spark/sql/InsertOverwriteTest.scala
new file mode 100644
index 000000000000..d1252e264207
--- /dev/null
+++ b/paimon-spark/paimon-spark-3.2/src/test/scala/org/apache/paimon/spark/sql/InsertOverwriteTest.scala
@@ -0,0 +1,361 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.paimon.spark.sql
+
+import org.apache.paimon.spark.PaimonSparkTestBase
+
+import org.apache.spark.sql.Row
+import org.apache.spark.sql.types._
+
+class InsertOverwriteTest extends PaimonSparkTestBase {
+
+ withPk.foreach {
+ hasPk =>
+ bucketModes.foreach {
+ bucket =>
+ test(s"insert overwrite non-partitioned table: hasPk: $hasPk, bucket: $bucket") {
+ val primaryKeysProp = if (hasPk) {
+ "'primary-key'='a,b',"
+ } else {
+ ""
+ }
+
+ val bucketKeyProp = if (bucket > 0) {
+ ",'bucket-key'='b'"
+ } else {
+ ""
+ }
+
+ spark.sql(s"""
+ |CREATE TABLE T (a INT, b INT, c STRING)
+ |TBLPROPERTIES ($primaryKeysProp 'bucket'='$bucket' $bucketKeyProp)
+ |""".stripMargin)
+
+ spark.sql("INSERT INTO T values (1, 1, '1'), (2, 2, '2')")
+ checkAnswer(
+ spark.sql("SELECT * FROM T ORDER BY a, b"),
+ Row(1, 1, "1") :: Row(2, 2, "2") :: Nil)
+
+ spark.sql("INSERT OVERWRITE T VALUES (1, 3, '3'), (2, 4, '4')");
+ checkAnswer(
+ spark.sql("SELECT * FROM T ORDER BY a, b"),
+ Row(1, 3, "3") :: Row(2, 4, "4") :: Nil)
+ }
+ }
+ }
+
+ withPk.foreach {
+ hasPk =>
+ bucketModes.foreach {
+ bucket =>
+ test(s"insert overwrite single-partitioned table: hasPk: $hasPk, bucket: $bucket") {
+ val primaryKeysProp = if (hasPk) {
+ "'primary-key'='a,b',"
+ } else {
+ ""
+ }
+
+ val bucketKeyProp = if (bucket > 0) {
+ ",'bucket-key'='b'"
+ } else {
+ ""
+ }
+
+ spark.sql(s"""
+ |CREATE TABLE T (a INT, b INT, c STRING)
+ |TBLPROPERTIES ($primaryKeysProp 'bucket'='$bucket' $bucketKeyProp)
+ |PARTITIONED BY (a)
+ |""".stripMargin)
+
+ spark.sql("INSERT INTO T values (1, 1, '1'), (2, 2, '2')")
+ checkAnswer(
+ spark.sql("SELECT * FROM T ORDER BY a, b"),
+ Row(1, 1, "1") :: Row(2, 2, "2") :: Nil)
+
+ // overwrite the whole table
+ spark.sql("INSERT OVERWRITE T VALUES (1, 3, '3'), (2, 4, '4')")
+ checkAnswer(
+ spark.sql("SELECT * FROM T ORDER BY a, b"),
+ Row(1, 3, "3") :: Row(2, 4, "4") :: Nil)
+
+ // overwrite the a=1 partition
+ spark.sql("INSERT OVERWRITE T PARTITION (a = 1) VALUES (5, '5'), (7, '7')")
+ checkAnswer(
+ spark.sql("SELECT * FROM T ORDER BY a, b"),
+ Row(1, 5, "5") :: Row(1, 7, "7") :: Row(2, 4, "4") :: Nil)
+
+ }
+ }
+ }
+
+ withPk.foreach {
+ hasPk =>
+ bucketModes.foreach {
+ bucket =>
+ test(s"insert overwrite mutil-partitioned table: hasPk: $hasPk, bucket: $bucket") {
+ val primaryKeysProp = if (hasPk) {
+ "'primary-key'='a,pt1,pt2',"
+ } else {
+ ""
+ }
+
+ val bucketKeyProp = if (bucket > 0) {
+ ",'bucket-key'='a'"
+ } else {
+ ""
+ }
+
+ spark.sql(s"""
+ |CREATE TABLE T (a INT, b STRING, pt1 STRING, pt2 INT)
+ |TBLPROPERTIES ($primaryKeysProp 'bucket'='$bucket' $bucketKeyProp)
+ |PARTITIONED BY (pt1, pt2)
+ |""".stripMargin)
+
+ spark.sql(
+ "INSERT INTO T values (1, 'a', 'ptv1', 11), (2, 'b', 'ptv1', 11), (3, 'c', 'ptv1', 22), (4, 'd', 'ptv2', 22)")
+ checkAnswer(
+ spark.sql("SELECT * FROM T ORDER BY a"),
+ Row(1, "a", "ptv1", 11) :: Row(2, "b", "ptv1", 11) :: Row(3, "c", "ptv1", 22) :: Row(
+ 4,
+ "d",
+ "ptv2",
+ 22) :: Nil)
+
+ // overwrite the pt2=22 partition
+ spark.sql(
+ "INSERT OVERWRITE T PARTITION (pt2 = 22) VALUES (3, 'c2', 'ptv1'), (4, 'd2', 'ptv3')")
+ checkAnswer(
+ spark.sql("SELECT * FROM T ORDER BY a"),
+ Row(1, "a", "ptv1", 11) :: Row(2, "b", "ptv1", 11) :: Row(3, "c2", "ptv1", 22) :: Row(
+ 4,
+ "d2",
+ "ptv3",
+ 22) :: Nil)
+
+ // overwrite the pt1=ptv3 partition
+ spark.sql("INSERT OVERWRITE T PARTITION (pt1 = 'ptv3') VALUES (4, 'd3', 22)")
+ checkAnswer(
+ spark.sql("SELECT * FROM T ORDER BY a"),
+ Row(1, "a", "ptv1", 11) :: Row(2, "b", "ptv1", 11) :: Row(3, "c2", "ptv1", 22) :: Row(
+ 4,
+ "d3",
+ "ptv3",
+ 22) :: Nil)
+
+ // overwrite the pt1=ptv1, pt2=11 partition
+ spark.sql("INSERT OVERWRITE T PARTITION (pt1 = 'ptv1', pt2=11) VALUES (5, 'e')")
+ checkAnswer(
+ spark.sql("SELECT * FROM T ORDER BY a"),
+ Row(3, "c2", "ptv1", 22) :: Row(4, "d3", "ptv3", 22) :: Row(
+ 5,
+ "e",
+ "ptv1",
+ 11) :: Nil)
+
+ // overwrite the whole table
+ spark.sql(
+ "INSERT OVERWRITE T VALUES (1, 'a5', 'ptv1', 11), (3, 'c5', 'ptv1', 22), (4, 'd5', 'ptv3', 22)")
+ checkAnswer(
+ spark.sql("SELECT * FROM T ORDER BY a"),
+ Row(1, "a5", "ptv1", 11) :: Row(3, "c5", "ptv1", 22) :: Row(
+ 4,
+ "d5",
+ "ptv3",
+ 22) :: Nil)
+ }
+ }
+ }
+
+ // These cases that date/timestamp/bool is used as the partition field type are to be supported.
+ Seq(IntegerType, LongType, FloatType, DoubleType, DecimalType).foreach {
+ dataType =>
+ test(s"insert overwrite table using $dataType as the partition field type") {
+ case class PartitionSQLAndValue(sql: Any, value: Any)
+
+ val (ptField, sv1, sv2) = dataType match {
+ case IntegerType =>
+ ("INT", PartitionSQLAndValue(1, 1), PartitionSQLAndValue(2, 2))
+ case LongType =>
+ ("LONG", PartitionSQLAndValue(1L, 1L), PartitionSQLAndValue(2L, 2L))
+ case FloatType =>
+ ("FLOAT", PartitionSQLAndValue(12.3f, 12.3f), PartitionSQLAndValue(45.6f, 45.6f))
+ case DoubleType =>
+ ("DOUBLE", PartitionSQLAndValue(12.3d, 12.3), PartitionSQLAndValue(45.6d, 45.6))
+ case DecimalType =>
+ (
+ "DECIMAL(5, 2)",
+ PartitionSQLAndValue(11.222, 11.22),
+ PartitionSQLAndValue(66.777, 66.78))
+ }
+
+ spark.sql(s"""
+ |CREATE TABLE T (a INT, b STRING, pt $ptField)
+ |PARTITIONED BY (pt)
+ |""".stripMargin)
+
+ spark.sql(s"INSERT INTO T SELECT 1, 'a', ${sv1.sql} UNION ALL SELECT 2, 'b', ${sv2.sql}")
+ checkAnswer(
+ spark.sql("SELECT * FROM T ORDER BY a"),
+ Row(1, "a", sv1.value) :: Row(2, "b", sv2.value) :: Nil)
+
+ // overwrite the whole table
+ spark.sql(
+ s"INSERT OVERWRITE T SELECT 3, 'c', ${sv1.sql} UNION ALL SELECT 4, 'd', ${sv2.sql}")
+ checkAnswer(
+ spark.sql("SELECT * FROM T ORDER BY a"),
+ Row(3, "c", sv1.value) :: Row(4, "d", sv2.value) :: Nil)
+
+ // overwrite the a=1 partition
+ spark.sql(s"INSERT OVERWRITE T PARTITION (pt = ${sv1.value}) VALUES (5, 'e'), (7, 'g')")
+ checkAnswer(
+ spark.sql("SELECT * FROM T ORDER BY a"),
+ Row(4, "d", sv2.value) :: Row(5, "e", sv1.value) :: Row(7, "g", sv1.value) :: Nil)
+ }
+ }
+
+ withPk.foreach {
+ hasPk =>
+ bucketModes.foreach {
+ bucket =>
+ test(
+ s"dynamic insert overwrite single-partitioned table: hasPk: $hasPk, bucket: $bucket") {
+ val primaryKeysProp = if (hasPk) {
+ "'primary-key'='a,b',"
+ } else {
+ ""
+ }
+
+ val bucketKeyProp = if (bucket > 0) {
+ ",'bucket-key'='b'"
+ } else {
+ ""
+ }
+
+ spark.sql(s"""
+ |CREATE TABLE T (a INT, b INT, c STRING)
+ |TBLPROPERTIES ($primaryKeysProp 'bucket'='$bucket' $bucketKeyProp)
+ |PARTITIONED BY (a)
+ |""".stripMargin)
+
+ spark.sql("INSERT INTO T values (1, 1, '1'), (2, 2, '2')")
+ checkAnswer(
+ spark.sql("SELECT * FROM T ORDER BY a, b"),
+ Row(1, 1, "1") :: Row(2, 2, "2") :: Nil)
+
+ // overwrite the whole table
+ spark.sql("INSERT OVERWRITE T VALUES (1, 3, '3'), (2, 4, '4')")
+ checkAnswer(
+ spark.sql("SELECT * FROM T ORDER BY a, b"),
+ Row(1, 3, "3") :: Row(2, 4, "4") :: Nil)
+
+ withSQLConf("spark.sql.sources.partitionOverwriteMode" -> "dynamic") {
+ // dynamic overwrite the a=1 partition
+ spark.sql("INSERT OVERWRITE T VALUES (1, 5, '5'), (1, 7, '7')")
+ checkAnswer(
+ spark.sql("SELECT * FROM T ORDER BY a, b"),
+ Row(1, 5, "5") :: Row(1, 7, "7") :: Row(2, 4, "4") :: Nil)
+ }
+ }
+ }
+ }
+
+ withPk.foreach {
+ hasPk =>
+ bucketModes.foreach {
+ bucket =>
+ test(
+ s"dynamic insert overwrite mutil-partitioned table: hasPk: $hasPk, bucket: $bucket") {
+ val primaryKeysProp = if (hasPk) {
+ "'primary-key'='a,pt1,pt2',"
+ } else {
+ ""
+ }
+
+ val bucketKeyProp = if (bucket > 0) {
+ ",'bucket-key'='a'"
+ } else {
+ ""
+ }
+
+ spark.sql(s"""
+ |CREATE TABLE T (a INT, b STRING, pt1 STRING, pt2 INT)
+ |TBLPROPERTIES ($primaryKeysProp 'bucket'='$bucket' $bucketKeyProp)
+ |PARTITIONED BY (pt1, pt2)
+ |""".stripMargin)
+
+ spark.sql(
+ "INSERT INTO T values (1, 'a', 'ptv1', 11), (2, 'b', 'ptv1', 11), (3, 'c', 'ptv1', 22), (4, 'd', 'ptv2', 22)")
+ checkAnswer(
+ spark.sql("SELECT * FROM T ORDER BY a, b"),
+ Row(1, "a", "ptv1", 11) :: Row(2, "b", "ptv1", 11) :: Row(3, "c", "ptv1", 22) :: Row(
+ 4,
+ "d",
+ "ptv2",
+ 22) :: Nil)
+
+ withSQLConf("spark.sql.sources.partitionOverwriteMode" -> "dynamic") {
+ // dynamic overwrite the pt2=22 partition
+ spark.sql(
+ "INSERT OVERWRITE T PARTITION (pt2 = 22) VALUES (3, 'c2', 'ptv1'), (4, 'd2', 'ptv3')")
+ checkAnswer(
+ spark.sql("SELECT * FROM T ORDER BY a, b"),
+ Row(1, "a", "ptv1", 11) :: Row(2, "b", "ptv1", 11) :: Row(
+ 3,
+ "c2",
+ "ptv1",
+ 22) :: Row(4, "d", "ptv2", 22) :: Row(4, "d2", "ptv3", 22) :: Nil
+ )
+
+ // dynamic overwrite the pt1=ptv3 partition
+ spark.sql("INSERT OVERWRITE T PARTITION (pt1 = 'ptv3') VALUES (4, 'd3', 22)")
+ checkAnswer(
+ spark.sql("SELECT * FROM T ORDER BY a, b"),
+ Row(1, "a", "ptv1", 11) :: Row(2, "b", "ptv1", 11) :: Row(
+ 3,
+ "c2",
+ "ptv1",
+ 22) :: Row(4, "d", "ptv2", 22) :: Row(4, "d3", "ptv3", 22) :: Nil
+ )
+
+ // dynamic overwrite the pt1=ptv1, pt2=11 partition
+ spark.sql("INSERT OVERWRITE T PARTITION (pt1 = 'ptv1', pt2=11) VALUES (5, 'e')")
+ checkAnswer(
+ spark.sql("SELECT * FROM T ORDER BY a, b"),
+ Row(3, "c2", "ptv1", 22) :: Row(4, "d", "ptv2", 22) :: Row(
+ 4,
+ "d3",
+ "ptv3",
+ 22) :: Row(5, "e", "ptv1", 11) :: Nil)
+
+ // dynamic overwrite the whole table
+ spark.sql(
+ "INSERT OVERWRITE T VALUES (1, 'a5', 'ptv1', 11), (3, 'c5', 'ptv1', 22), (4, 'd5', 'ptv3', 22)")
+ checkAnswer(
+ spark.sql("SELECT * FROM T ORDER BY a, b"),
+ Row(1, "a5", "ptv1", 11) :: Row(3, "c5", "ptv1", 22) :: Row(
+ 4,
+ "d",
+ "ptv2",
+ 22) :: Row(4, "d5", "ptv3", 22) :: Nil)
+ }
+ }
+ }
+ }
+
+}
From 882a69cf5650ec6be64708dc50b55f447d5e1dcf Mon Sep 17 00:00:00 2001
From: xuyu <11161569@vivo.com>
Date: Mon, 3 Jun 2024 17:14:08 +0800
Subject: [PATCH 02/11] extends from base test
---
.../org/apache/paimon/spark/sql/InsertOverwriteTableTest.scala | 3 +++
1 file changed, 3 insertions(+)
create mode 100644 paimon-spark/paimon-spark-3.2/src/test/scala/org/apache/paimon/spark/sql/InsertOverwriteTableTest.scala
diff --git a/paimon-spark/paimon-spark-3.2/src/test/scala/org/apache/paimon/spark/sql/InsertOverwriteTableTest.scala b/paimon-spark/paimon-spark-3.2/src/test/scala/org/apache/paimon/spark/sql/InsertOverwriteTableTest.scala
new file mode 100644
index 000000000000..e5b5799af122
--- /dev/null
+++ b/paimon-spark/paimon-spark-3.2/src/test/scala/org/apache/paimon/spark/sql/InsertOverwriteTableTest.scala
@@ -0,0 +1,3 @@
+package org.apache.paimon.spark.sql
+
+class InsertOverwriteTableTest extends InsertOverwriteTest {}
From 75c28f2c6908451b0d6635e582bd5ec08769a220 Mon Sep 17 00:00:00 2001
From: xuyu <11161569@vivo.com>
Date: Mon, 3 Jun 2024 17:15:47 +0800
Subject: [PATCH 03/11] style changed
---
.../spark/sql/InsertOverwriteTableTest.scala | 18 ++++++++++++++++++
1 file changed, 18 insertions(+)
diff --git a/paimon-spark/paimon-spark-3.2/src/test/scala/org/apache/paimon/spark/sql/InsertOverwriteTableTest.scala b/paimon-spark/paimon-spark-3.2/src/test/scala/org/apache/paimon/spark/sql/InsertOverwriteTableTest.scala
index e5b5799af122..173488869d41 100644
--- a/paimon-spark/paimon-spark-3.2/src/test/scala/org/apache/paimon/spark/sql/InsertOverwriteTableTest.scala
+++ b/paimon-spark/paimon-spark-3.2/src/test/scala/org/apache/paimon/spark/sql/InsertOverwriteTableTest.scala
@@ -1,3 +1,21 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
package org.apache.paimon.spark.sql
class InsertOverwriteTableTest extends InsertOverwriteTest {}
From c0bcf9600f00b5ba6e2488c6dbdd52b5f84c0e11 Mon Sep 17 00:00:00 2001
From: xuyu <11161569@vivo.com>
Date: Mon, 3 Jun 2024 18:13:07 +0800
Subject: [PATCH 04/11] add test for spark other versions
---
.../spark/sql/InsertOverwriteTableTest.scala | 21 +++++++++++++++++++
.../spark/sql/InsertOverwriteTableTest.scala | 21 +++++++++++++++++++
.../spark/sql/InsertOverwriteTableTest.scala | 21 +++++++++++++++++++
3 files changed, 63 insertions(+)
create mode 100644 paimon-spark/paimon-spark-3.3/src/test/scala/org/apache/paimon/spark/sql/InsertOverwriteTableTest.scala
create mode 100644 paimon-spark/paimon-spark-3.4/src/test/scala/org/apache/paimon/spark/sql/InsertOverwriteTableTest.scala
create mode 100644 paimon-spark/paimon-spark-3.5/src/test/scala/org/apache/paimon/spark/sql/InsertOverwriteTableTest.scala
diff --git a/paimon-spark/paimon-spark-3.3/src/test/scala/org/apache/paimon/spark/sql/InsertOverwriteTableTest.scala b/paimon-spark/paimon-spark-3.3/src/test/scala/org/apache/paimon/spark/sql/InsertOverwriteTableTest.scala
new file mode 100644
index 000000000000..173488869d41
--- /dev/null
+++ b/paimon-spark/paimon-spark-3.3/src/test/scala/org/apache/paimon/spark/sql/InsertOverwriteTableTest.scala
@@ -0,0 +1,21 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.paimon.spark.sql
+
+class InsertOverwriteTableTest extends InsertOverwriteTest {}
diff --git a/paimon-spark/paimon-spark-3.4/src/test/scala/org/apache/paimon/spark/sql/InsertOverwriteTableTest.scala b/paimon-spark/paimon-spark-3.4/src/test/scala/org/apache/paimon/spark/sql/InsertOverwriteTableTest.scala
new file mode 100644
index 000000000000..173488869d41
--- /dev/null
+++ b/paimon-spark/paimon-spark-3.4/src/test/scala/org/apache/paimon/spark/sql/InsertOverwriteTableTest.scala
@@ -0,0 +1,21 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.paimon.spark.sql
+
+class InsertOverwriteTableTest extends InsertOverwriteTest {}
diff --git a/paimon-spark/paimon-spark-3.5/src/test/scala/org/apache/paimon/spark/sql/InsertOverwriteTableTest.scala b/paimon-spark/paimon-spark-3.5/src/test/scala/org/apache/paimon/spark/sql/InsertOverwriteTableTest.scala
new file mode 100644
index 000000000000..173488869d41
--- /dev/null
+++ b/paimon-spark/paimon-spark-3.5/src/test/scala/org/apache/paimon/spark/sql/InsertOverwriteTableTest.scala
@@ -0,0 +1,21 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.paimon.spark.sql
+
+class InsertOverwriteTableTest extends InsertOverwriteTest {}
From 7194afcfb1b8bed6712346f691fbdf228400b98a Mon Sep 17 00:00:00 2001
From: xuyu <11161569@vivo.com>
Date: Mon, 3 Jun 2024 19:03:20 +0800
Subject: [PATCH 05/11] add test in spark3.X
---
paimon-spark/paimon-spark-3.1/pom.xml | 7 +
.../paimon/sql/InsertOverwriteTableTest.scala | 21 +
.../spark/sql/InsertOverwriteTest.scala | 361 ------------------
3 files changed, 28 insertions(+), 361 deletions(-)
create mode 100644 paimon-spark/paimon-spark-3.1/src/test/scala/org/apache/spark/paimon/sql/InsertOverwriteTableTest.scala
delete mode 100644 paimon-spark/paimon-spark-3.2/src/test/scala/org/apache/paimon/spark/sql/InsertOverwriteTest.scala
diff --git a/paimon-spark/paimon-spark-3.1/pom.xml b/paimon-spark/paimon-spark-3.1/pom.xml
index 242287b36156..c230883445e6 100644
--- a/paimon-spark/paimon-spark-3.1/pom.xml
+++ b/paimon-spark/paimon-spark-3.1/pom.xml
@@ -79,6 +79,13 @@ under the License.
+
+ org.apache.paimon
+ paimon-spark-common
+ ${project.version}
+ test-jar
+ test
+
org.apache.spark
spark-sql_2.12
diff --git a/paimon-spark/paimon-spark-3.1/src/test/scala/org/apache/spark/paimon/sql/InsertOverwriteTableTest.scala b/paimon-spark/paimon-spark-3.1/src/test/scala/org/apache/spark/paimon/sql/InsertOverwriteTableTest.scala
new file mode 100644
index 000000000000..173488869d41
--- /dev/null
+++ b/paimon-spark/paimon-spark-3.1/src/test/scala/org/apache/spark/paimon/sql/InsertOverwriteTableTest.scala
@@ -0,0 +1,21 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.paimon.spark.sql
+
+class InsertOverwriteTableTest extends InsertOverwriteTest {}
diff --git a/paimon-spark/paimon-spark-3.2/src/test/scala/org/apache/paimon/spark/sql/InsertOverwriteTest.scala b/paimon-spark/paimon-spark-3.2/src/test/scala/org/apache/paimon/spark/sql/InsertOverwriteTest.scala
deleted file mode 100644
index d1252e264207..000000000000
--- a/paimon-spark/paimon-spark-3.2/src/test/scala/org/apache/paimon/spark/sql/InsertOverwriteTest.scala
+++ /dev/null
@@ -1,361 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.paimon.spark.sql
-
-import org.apache.paimon.spark.PaimonSparkTestBase
-
-import org.apache.spark.sql.Row
-import org.apache.spark.sql.types._
-
-class InsertOverwriteTest extends PaimonSparkTestBase {
-
- withPk.foreach {
- hasPk =>
- bucketModes.foreach {
- bucket =>
- test(s"insert overwrite non-partitioned table: hasPk: $hasPk, bucket: $bucket") {
- val primaryKeysProp = if (hasPk) {
- "'primary-key'='a,b',"
- } else {
- ""
- }
-
- val bucketKeyProp = if (bucket > 0) {
- ",'bucket-key'='b'"
- } else {
- ""
- }
-
- spark.sql(s"""
- |CREATE TABLE T (a INT, b INT, c STRING)
- |TBLPROPERTIES ($primaryKeysProp 'bucket'='$bucket' $bucketKeyProp)
- |""".stripMargin)
-
- spark.sql("INSERT INTO T values (1, 1, '1'), (2, 2, '2')")
- checkAnswer(
- spark.sql("SELECT * FROM T ORDER BY a, b"),
- Row(1, 1, "1") :: Row(2, 2, "2") :: Nil)
-
- spark.sql("INSERT OVERWRITE T VALUES (1, 3, '3'), (2, 4, '4')");
- checkAnswer(
- spark.sql("SELECT * FROM T ORDER BY a, b"),
- Row(1, 3, "3") :: Row(2, 4, "4") :: Nil)
- }
- }
- }
-
- withPk.foreach {
- hasPk =>
- bucketModes.foreach {
- bucket =>
- test(s"insert overwrite single-partitioned table: hasPk: $hasPk, bucket: $bucket") {
- val primaryKeysProp = if (hasPk) {
- "'primary-key'='a,b',"
- } else {
- ""
- }
-
- val bucketKeyProp = if (bucket > 0) {
- ",'bucket-key'='b'"
- } else {
- ""
- }
-
- spark.sql(s"""
- |CREATE TABLE T (a INT, b INT, c STRING)
- |TBLPROPERTIES ($primaryKeysProp 'bucket'='$bucket' $bucketKeyProp)
- |PARTITIONED BY (a)
- |""".stripMargin)
-
- spark.sql("INSERT INTO T values (1, 1, '1'), (2, 2, '2')")
- checkAnswer(
- spark.sql("SELECT * FROM T ORDER BY a, b"),
- Row(1, 1, "1") :: Row(2, 2, "2") :: Nil)
-
- // overwrite the whole table
- spark.sql("INSERT OVERWRITE T VALUES (1, 3, '3'), (2, 4, '4')")
- checkAnswer(
- spark.sql("SELECT * FROM T ORDER BY a, b"),
- Row(1, 3, "3") :: Row(2, 4, "4") :: Nil)
-
- // overwrite the a=1 partition
- spark.sql("INSERT OVERWRITE T PARTITION (a = 1) VALUES (5, '5'), (7, '7')")
- checkAnswer(
- spark.sql("SELECT * FROM T ORDER BY a, b"),
- Row(1, 5, "5") :: Row(1, 7, "7") :: Row(2, 4, "4") :: Nil)
-
- }
- }
- }
-
- withPk.foreach {
- hasPk =>
- bucketModes.foreach {
- bucket =>
- test(s"insert overwrite mutil-partitioned table: hasPk: $hasPk, bucket: $bucket") {
- val primaryKeysProp = if (hasPk) {
- "'primary-key'='a,pt1,pt2',"
- } else {
- ""
- }
-
- val bucketKeyProp = if (bucket > 0) {
- ",'bucket-key'='a'"
- } else {
- ""
- }
-
- spark.sql(s"""
- |CREATE TABLE T (a INT, b STRING, pt1 STRING, pt2 INT)
- |TBLPROPERTIES ($primaryKeysProp 'bucket'='$bucket' $bucketKeyProp)
- |PARTITIONED BY (pt1, pt2)
- |""".stripMargin)
-
- spark.sql(
- "INSERT INTO T values (1, 'a', 'ptv1', 11), (2, 'b', 'ptv1', 11), (3, 'c', 'ptv1', 22), (4, 'd', 'ptv2', 22)")
- checkAnswer(
- spark.sql("SELECT * FROM T ORDER BY a"),
- Row(1, "a", "ptv1", 11) :: Row(2, "b", "ptv1", 11) :: Row(3, "c", "ptv1", 22) :: Row(
- 4,
- "d",
- "ptv2",
- 22) :: Nil)
-
- // overwrite the pt2=22 partition
- spark.sql(
- "INSERT OVERWRITE T PARTITION (pt2 = 22) VALUES (3, 'c2', 'ptv1'), (4, 'd2', 'ptv3')")
- checkAnswer(
- spark.sql("SELECT * FROM T ORDER BY a"),
- Row(1, "a", "ptv1", 11) :: Row(2, "b", "ptv1", 11) :: Row(3, "c2", "ptv1", 22) :: Row(
- 4,
- "d2",
- "ptv3",
- 22) :: Nil)
-
- // overwrite the pt1=ptv3 partition
- spark.sql("INSERT OVERWRITE T PARTITION (pt1 = 'ptv3') VALUES (4, 'd3', 22)")
- checkAnswer(
- spark.sql("SELECT * FROM T ORDER BY a"),
- Row(1, "a", "ptv1", 11) :: Row(2, "b", "ptv1", 11) :: Row(3, "c2", "ptv1", 22) :: Row(
- 4,
- "d3",
- "ptv3",
- 22) :: Nil)
-
- // overwrite the pt1=ptv1, pt2=11 partition
- spark.sql("INSERT OVERWRITE T PARTITION (pt1 = 'ptv1', pt2=11) VALUES (5, 'e')")
- checkAnswer(
- spark.sql("SELECT * FROM T ORDER BY a"),
- Row(3, "c2", "ptv1", 22) :: Row(4, "d3", "ptv3", 22) :: Row(
- 5,
- "e",
- "ptv1",
- 11) :: Nil)
-
- // overwrite the whole table
- spark.sql(
- "INSERT OVERWRITE T VALUES (1, 'a5', 'ptv1', 11), (3, 'c5', 'ptv1', 22), (4, 'd5', 'ptv3', 22)")
- checkAnswer(
- spark.sql("SELECT * FROM T ORDER BY a"),
- Row(1, "a5", "ptv1", 11) :: Row(3, "c5", "ptv1", 22) :: Row(
- 4,
- "d5",
- "ptv3",
- 22) :: Nil)
- }
- }
- }
-
- // These cases that date/timestamp/bool is used as the partition field type are to be supported.
- Seq(IntegerType, LongType, FloatType, DoubleType, DecimalType).foreach {
- dataType =>
- test(s"insert overwrite table using $dataType as the partition field type") {
- case class PartitionSQLAndValue(sql: Any, value: Any)
-
- val (ptField, sv1, sv2) = dataType match {
- case IntegerType =>
- ("INT", PartitionSQLAndValue(1, 1), PartitionSQLAndValue(2, 2))
- case LongType =>
- ("LONG", PartitionSQLAndValue(1L, 1L), PartitionSQLAndValue(2L, 2L))
- case FloatType =>
- ("FLOAT", PartitionSQLAndValue(12.3f, 12.3f), PartitionSQLAndValue(45.6f, 45.6f))
- case DoubleType =>
- ("DOUBLE", PartitionSQLAndValue(12.3d, 12.3), PartitionSQLAndValue(45.6d, 45.6))
- case DecimalType =>
- (
- "DECIMAL(5, 2)",
- PartitionSQLAndValue(11.222, 11.22),
- PartitionSQLAndValue(66.777, 66.78))
- }
-
- spark.sql(s"""
- |CREATE TABLE T (a INT, b STRING, pt $ptField)
- |PARTITIONED BY (pt)
- |""".stripMargin)
-
- spark.sql(s"INSERT INTO T SELECT 1, 'a', ${sv1.sql} UNION ALL SELECT 2, 'b', ${sv2.sql}")
- checkAnswer(
- spark.sql("SELECT * FROM T ORDER BY a"),
- Row(1, "a", sv1.value) :: Row(2, "b", sv2.value) :: Nil)
-
- // overwrite the whole table
- spark.sql(
- s"INSERT OVERWRITE T SELECT 3, 'c', ${sv1.sql} UNION ALL SELECT 4, 'd', ${sv2.sql}")
- checkAnswer(
- spark.sql("SELECT * FROM T ORDER BY a"),
- Row(3, "c", sv1.value) :: Row(4, "d", sv2.value) :: Nil)
-
- // overwrite the a=1 partition
- spark.sql(s"INSERT OVERWRITE T PARTITION (pt = ${sv1.value}) VALUES (5, 'e'), (7, 'g')")
- checkAnswer(
- spark.sql("SELECT * FROM T ORDER BY a"),
- Row(4, "d", sv2.value) :: Row(5, "e", sv1.value) :: Row(7, "g", sv1.value) :: Nil)
- }
- }
-
- withPk.foreach {
- hasPk =>
- bucketModes.foreach {
- bucket =>
- test(
- s"dynamic insert overwrite single-partitioned table: hasPk: $hasPk, bucket: $bucket") {
- val primaryKeysProp = if (hasPk) {
- "'primary-key'='a,b',"
- } else {
- ""
- }
-
- val bucketKeyProp = if (bucket > 0) {
- ",'bucket-key'='b'"
- } else {
- ""
- }
-
- spark.sql(s"""
- |CREATE TABLE T (a INT, b INT, c STRING)
- |TBLPROPERTIES ($primaryKeysProp 'bucket'='$bucket' $bucketKeyProp)
- |PARTITIONED BY (a)
- |""".stripMargin)
-
- spark.sql("INSERT INTO T values (1, 1, '1'), (2, 2, '2')")
- checkAnswer(
- spark.sql("SELECT * FROM T ORDER BY a, b"),
- Row(1, 1, "1") :: Row(2, 2, "2") :: Nil)
-
- // overwrite the whole table
- spark.sql("INSERT OVERWRITE T VALUES (1, 3, '3'), (2, 4, '4')")
- checkAnswer(
- spark.sql("SELECT * FROM T ORDER BY a, b"),
- Row(1, 3, "3") :: Row(2, 4, "4") :: Nil)
-
- withSQLConf("spark.sql.sources.partitionOverwriteMode" -> "dynamic") {
- // dynamic overwrite the a=1 partition
- spark.sql("INSERT OVERWRITE T VALUES (1, 5, '5'), (1, 7, '7')")
- checkAnswer(
- spark.sql("SELECT * FROM T ORDER BY a, b"),
- Row(1, 5, "5") :: Row(1, 7, "7") :: Row(2, 4, "4") :: Nil)
- }
- }
- }
- }
-
- withPk.foreach {
- hasPk =>
- bucketModes.foreach {
- bucket =>
- test(
- s"dynamic insert overwrite mutil-partitioned table: hasPk: $hasPk, bucket: $bucket") {
- val primaryKeysProp = if (hasPk) {
- "'primary-key'='a,pt1,pt2',"
- } else {
- ""
- }
-
- val bucketKeyProp = if (bucket > 0) {
- ",'bucket-key'='a'"
- } else {
- ""
- }
-
- spark.sql(s"""
- |CREATE TABLE T (a INT, b STRING, pt1 STRING, pt2 INT)
- |TBLPROPERTIES ($primaryKeysProp 'bucket'='$bucket' $bucketKeyProp)
- |PARTITIONED BY (pt1, pt2)
- |""".stripMargin)
-
- spark.sql(
- "INSERT INTO T values (1, 'a', 'ptv1', 11), (2, 'b', 'ptv1', 11), (3, 'c', 'ptv1', 22), (4, 'd', 'ptv2', 22)")
- checkAnswer(
- spark.sql("SELECT * FROM T ORDER BY a, b"),
- Row(1, "a", "ptv1", 11) :: Row(2, "b", "ptv1", 11) :: Row(3, "c", "ptv1", 22) :: Row(
- 4,
- "d",
- "ptv2",
- 22) :: Nil)
-
- withSQLConf("spark.sql.sources.partitionOverwriteMode" -> "dynamic") {
- // dynamic overwrite the pt2=22 partition
- spark.sql(
- "INSERT OVERWRITE T PARTITION (pt2 = 22) VALUES (3, 'c2', 'ptv1'), (4, 'd2', 'ptv3')")
- checkAnswer(
- spark.sql("SELECT * FROM T ORDER BY a, b"),
- Row(1, "a", "ptv1", 11) :: Row(2, "b", "ptv1", 11) :: Row(
- 3,
- "c2",
- "ptv1",
- 22) :: Row(4, "d", "ptv2", 22) :: Row(4, "d2", "ptv3", 22) :: Nil
- )
-
- // dynamic overwrite the pt1=ptv3 partition
- spark.sql("INSERT OVERWRITE T PARTITION (pt1 = 'ptv3') VALUES (4, 'd3', 22)")
- checkAnswer(
- spark.sql("SELECT * FROM T ORDER BY a, b"),
- Row(1, "a", "ptv1", 11) :: Row(2, "b", "ptv1", 11) :: Row(
- 3,
- "c2",
- "ptv1",
- 22) :: Row(4, "d", "ptv2", 22) :: Row(4, "d3", "ptv3", 22) :: Nil
- )
-
- // dynamic overwrite the pt1=ptv1, pt2=11 partition
- spark.sql("INSERT OVERWRITE T PARTITION (pt1 = 'ptv1', pt2=11) VALUES (5, 'e')")
- checkAnswer(
- spark.sql("SELECT * FROM T ORDER BY a, b"),
- Row(3, "c2", "ptv1", 22) :: Row(4, "d", "ptv2", 22) :: Row(
- 4,
- "d3",
- "ptv3",
- 22) :: Row(5, "e", "ptv1", 11) :: Nil)
-
- // dynamic overwrite the whole table
- spark.sql(
- "INSERT OVERWRITE T VALUES (1, 'a5', 'ptv1', 11), (3, 'c5', 'ptv1', 22), (4, 'd5', 'ptv3', 22)")
- checkAnswer(
- spark.sql("SELECT * FROM T ORDER BY a, b"),
- Row(1, "a5", "ptv1", 11) :: Row(3, "c5", "ptv1", 22) :: Row(
- 4,
- "d",
- "ptv2",
- 22) :: Row(4, "d5", "ptv3", 22) :: Nil)
- }
- }
- }
- }
-
-}
From 064486c887ff686b383f9a89f7e88f2533be2e8a Mon Sep 17 00:00:00 2001
From: xuyu <11161569@vivo.com>
Date: Mon, 3 Jun 2024 19:32:29 +0800
Subject: [PATCH 06/11] refactor with abstract class
---
.../paimon/sql/InsertOverwriteTableTest.scala | 21 -------------------
.../spark/sql/InsertOverwriteTableTest.scala | 2 +-
.../spark/sql/InsertOverwriteTableTest.scala | 2 +-
.../spark/sql/InsertOverwriteTableTest.scala | 2 +-
.../spark/sql/InsertOverwriteTableTest.scala | 2 +-
.../spark/sql/InsertOverwriteTest.scala | 2 +-
6 files changed, 5 insertions(+), 26 deletions(-)
delete mode 100644 paimon-spark/paimon-spark-3.1/src/test/scala/org/apache/spark/paimon/sql/InsertOverwriteTableTest.scala
diff --git a/paimon-spark/paimon-spark-3.1/src/test/scala/org/apache/spark/paimon/sql/InsertOverwriteTableTest.scala b/paimon-spark/paimon-spark-3.1/src/test/scala/org/apache/spark/paimon/sql/InsertOverwriteTableTest.scala
deleted file mode 100644
index 173488869d41..000000000000
--- a/paimon-spark/paimon-spark-3.1/src/test/scala/org/apache/spark/paimon/sql/InsertOverwriteTableTest.scala
+++ /dev/null
@@ -1,21 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.paimon.spark.sql
-
-class InsertOverwriteTableTest extends InsertOverwriteTest {}
diff --git a/paimon-spark/paimon-spark-3.2/src/test/scala/org/apache/paimon/spark/sql/InsertOverwriteTableTest.scala b/paimon-spark/paimon-spark-3.2/src/test/scala/org/apache/paimon/spark/sql/InsertOverwriteTableTest.scala
index 173488869d41..58bb5b1565e9 100644
--- a/paimon-spark/paimon-spark-3.2/src/test/scala/org/apache/paimon/spark/sql/InsertOverwriteTableTest.scala
+++ b/paimon-spark/paimon-spark-3.2/src/test/scala/org/apache/paimon/spark/sql/InsertOverwriteTableTest.scala
@@ -18,4 +18,4 @@
package org.apache.paimon.spark.sql
-class InsertOverwriteTableTest extends InsertOverwriteTest {}
+class InsertOverwriteTableTest extends InsertOverwriteTestBase {}
diff --git a/paimon-spark/paimon-spark-3.3/src/test/scala/org/apache/paimon/spark/sql/InsertOverwriteTableTest.scala b/paimon-spark/paimon-spark-3.3/src/test/scala/org/apache/paimon/spark/sql/InsertOverwriteTableTest.scala
index 173488869d41..58bb5b1565e9 100644
--- a/paimon-spark/paimon-spark-3.3/src/test/scala/org/apache/paimon/spark/sql/InsertOverwriteTableTest.scala
+++ b/paimon-spark/paimon-spark-3.3/src/test/scala/org/apache/paimon/spark/sql/InsertOverwriteTableTest.scala
@@ -18,4 +18,4 @@
package org.apache.paimon.spark.sql
-class InsertOverwriteTableTest extends InsertOverwriteTest {}
+class InsertOverwriteTableTest extends InsertOverwriteTestBase {}
diff --git a/paimon-spark/paimon-spark-3.4/src/test/scala/org/apache/paimon/spark/sql/InsertOverwriteTableTest.scala b/paimon-spark/paimon-spark-3.4/src/test/scala/org/apache/paimon/spark/sql/InsertOverwriteTableTest.scala
index 173488869d41..58bb5b1565e9 100644
--- a/paimon-spark/paimon-spark-3.4/src/test/scala/org/apache/paimon/spark/sql/InsertOverwriteTableTest.scala
+++ b/paimon-spark/paimon-spark-3.4/src/test/scala/org/apache/paimon/spark/sql/InsertOverwriteTableTest.scala
@@ -18,4 +18,4 @@
package org.apache.paimon.spark.sql
-class InsertOverwriteTableTest extends InsertOverwriteTest {}
+class InsertOverwriteTableTest extends InsertOverwriteTestBase {}
diff --git a/paimon-spark/paimon-spark-3.5/src/test/scala/org/apache/paimon/spark/sql/InsertOverwriteTableTest.scala b/paimon-spark/paimon-spark-3.5/src/test/scala/org/apache/paimon/spark/sql/InsertOverwriteTableTest.scala
index 173488869d41..58bb5b1565e9 100644
--- a/paimon-spark/paimon-spark-3.5/src/test/scala/org/apache/paimon/spark/sql/InsertOverwriteTableTest.scala
+++ b/paimon-spark/paimon-spark-3.5/src/test/scala/org/apache/paimon/spark/sql/InsertOverwriteTableTest.scala
@@ -18,4 +18,4 @@
package org.apache.paimon.spark.sql
-class InsertOverwriteTableTest extends InsertOverwriteTest {}
+class InsertOverwriteTableTest extends InsertOverwriteTestBase {}
diff --git a/paimon-spark/paimon-spark-common/src/test/scala/org/apache/paimon/spark/sql/InsertOverwriteTest.scala b/paimon-spark/paimon-spark-common/src/test/scala/org/apache/paimon/spark/sql/InsertOverwriteTest.scala
index 7dbd0944c206..5f945b332381 100644
--- a/paimon-spark/paimon-spark-common/src/test/scala/org/apache/paimon/spark/sql/InsertOverwriteTest.scala
+++ b/paimon-spark/paimon-spark-common/src/test/scala/org/apache/paimon/spark/sql/InsertOverwriteTest.scala
@@ -25,7 +25,7 @@ import org.apache.spark.sql.types._
import java.sql.Date
-class InsertOverwriteTest extends PaimonSparkTestBase {
+abstract class InsertOverwriteTestBase extends PaimonSparkTestBase {
withPk.foreach {
hasPk =>
From 7c7c6273483c10ee46f9cfae8a34263d96660243 Mon Sep 17 00:00:00 2001
From: xuyu <11161569@vivo.com>
Date: Mon, 3 Jun 2024 19:33:45 +0800
Subject: [PATCH 07/11] remove used dependcy
---
paimon-spark/paimon-spark-3.1/pom.xml | 9 +--------
1 file changed, 1 insertion(+), 8 deletions(-)
diff --git a/paimon-spark/paimon-spark-3.1/pom.xml b/paimon-spark/paimon-spark-3.1/pom.xml
index c230883445e6..62b2c3162e32 100644
--- a/paimon-spark/paimon-spark-3.1/pom.xml
+++ b/paimon-spark/paimon-spark-3.1/pom.xml
@@ -78,14 +78,7 @@ under the License.
-
-
- org.apache.paimon
- paimon-spark-common
- ${project.version}
- test-jar
- test
-
+
org.apache.spark
spark-sql_2.12
From 174578471ff2622b82128acf270e1d71e72a4008 Mon Sep 17 00:00:00 2001
From: xuyu <11161569@vivo.com>
Date: Mon, 3 Jun 2024 19:34:34 +0800
Subject: [PATCH 08/11] remove line
---
paimon-spark/paimon-spark-3.1/pom.xml | 1 -
1 file changed, 1 deletion(-)
diff --git a/paimon-spark/paimon-spark-3.1/pom.xml b/paimon-spark/paimon-spark-3.1/pom.xml
index 62b2c3162e32..fab6868d6555 100644
--- a/paimon-spark/paimon-spark-3.1/pom.xml
+++ b/paimon-spark/paimon-spark-3.1/pom.xml
@@ -78,7 +78,6 @@ under the License.
-
org.apache.spark
spark-sql_2.12
From 80ed2f43286a16c32673ad2666e859cccf314972 Mon Sep 17 00:00:00 2001
From: xuyu <11161569@vivo.com>
Date: Mon, 3 Jun 2024 19:35:25 +0800
Subject: [PATCH 09/11] remove line
---
paimon-spark/paimon-spark-3.1/pom.xml | 1 +
1 file changed, 1 insertion(+)
diff --git a/paimon-spark/paimon-spark-3.1/pom.xml b/paimon-spark/paimon-spark-3.1/pom.xml
index fab6868d6555..242287b36156 100644
--- a/paimon-spark/paimon-spark-3.1/pom.xml
+++ b/paimon-spark/paimon-spark-3.1/pom.xml
@@ -78,6 +78,7 @@ under the License.
+
org.apache.spark
spark-sql_2.12
From c70583a034ba8e8c94b0254fc1c9ad57267d2ffb Mon Sep 17 00:00:00 2001
From: xuyu <11161569@vivo.com>
Date: Mon, 3 Jun 2024 20:01:59 +0800
Subject: [PATCH 10/11] rename base class
---
.../org/apache/paimon/spark/sql/InsertOverwriteTableTest.scala | 2 +-
.../org/apache/paimon/spark/sql/InsertOverwriteTableTest.scala | 2 +-
.../org/apache/paimon/spark/sql/InsertOverwriteTableTest.scala | 2 +-
.../org/apache/paimon/spark/sql/InsertOverwriteTableTest.scala | 2 +-
.../scala/org/apache/paimon/spark/sql/InsertOverwriteTest.scala | 2 +-
5 files changed, 5 insertions(+), 5 deletions(-)
diff --git a/paimon-spark/paimon-spark-3.2/src/test/scala/org/apache/paimon/spark/sql/InsertOverwriteTableTest.scala b/paimon-spark/paimon-spark-3.2/src/test/scala/org/apache/paimon/spark/sql/InsertOverwriteTableTest.scala
index 58bb5b1565e9..4f66584c303b 100644
--- a/paimon-spark/paimon-spark-3.2/src/test/scala/org/apache/paimon/spark/sql/InsertOverwriteTableTest.scala
+++ b/paimon-spark/paimon-spark-3.2/src/test/scala/org/apache/paimon/spark/sql/InsertOverwriteTableTest.scala
@@ -18,4 +18,4 @@
package org.apache.paimon.spark.sql
-class InsertOverwriteTableTest extends InsertOverwriteTestBase {}
+class InsertOverwriteTableTest extends InsertOverwriteTableTestBase {}
diff --git a/paimon-spark/paimon-spark-3.3/src/test/scala/org/apache/paimon/spark/sql/InsertOverwriteTableTest.scala b/paimon-spark/paimon-spark-3.3/src/test/scala/org/apache/paimon/spark/sql/InsertOverwriteTableTest.scala
index 58bb5b1565e9..4f66584c303b 100644
--- a/paimon-spark/paimon-spark-3.3/src/test/scala/org/apache/paimon/spark/sql/InsertOverwriteTableTest.scala
+++ b/paimon-spark/paimon-spark-3.3/src/test/scala/org/apache/paimon/spark/sql/InsertOverwriteTableTest.scala
@@ -18,4 +18,4 @@
package org.apache.paimon.spark.sql
-class InsertOverwriteTableTest extends InsertOverwriteTestBase {}
+class InsertOverwriteTableTest extends InsertOverwriteTableTestBase {}
diff --git a/paimon-spark/paimon-spark-3.4/src/test/scala/org/apache/paimon/spark/sql/InsertOverwriteTableTest.scala b/paimon-spark/paimon-spark-3.4/src/test/scala/org/apache/paimon/spark/sql/InsertOverwriteTableTest.scala
index 58bb5b1565e9..4f66584c303b 100644
--- a/paimon-spark/paimon-spark-3.4/src/test/scala/org/apache/paimon/spark/sql/InsertOverwriteTableTest.scala
+++ b/paimon-spark/paimon-spark-3.4/src/test/scala/org/apache/paimon/spark/sql/InsertOverwriteTableTest.scala
@@ -18,4 +18,4 @@
package org.apache.paimon.spark.sql
-class InsertOverwriteTableTest extends InsertOverwriteTestBase {}
+class InsertOverwriteTableTest extends InsertOverwriteTableTestBase {}
diff --git a/paimon-spark/paimon-spark-3.5/src/test/scala/org/apache/paimon/spark/sql/InsertOverwriteTableTest.scala b/paimon-spark/paimon-spark-3.5/src/test/scala/org/apache/paimon/spark/sql/InsertOverwriteTableTest.scala
index 58bb5b1565e9..4f66584c303b 100644
--- a/paimon-spark/paimon-spark-3.5/src/test/scala/org/apache/paimon/spark/sql/InsertOverwriteTableTest.scala
+++ b/paimon-spark/paimon-spark-3.5/src/test/scala/org/apache/paimon/spark/sql/InsertOverwriteTableTest.scala
@@ -18,4 +18,4 @@
package org.apache.paimon.spark.sql
-class InsertOverwriteTableTest extends InsertOverwriteTestBase {}
+class InsertOverwriteTableTest extends InsertOverwriteTableTestBase {}
diff --git a/paimon-spark/paimon-spark-common/src/test/scala/org/apache/paimon/spark/sql/InsertOverwriteTest.scala b/paimon-spark/paimon-spark-common/src/test/scala/org/apache/paimon/spark/sql/InsertOverwriteTest.scala
index 5f945b332381..1cef45cf94b4 100644
--- a/paimon-spark/paimon-spark-common/src/test/scala/org/apache/paimon/spark/sql/InsertOverwriteTest.scala
+++ b/paimon-spark/paimon-spark-common/src/test/scala/org/apache/paimon/spark/sql/InsertOverwriteTest.scala
@@ -25,7 +25,7 @@ import org.apache.spark.sql.types._
import java.sql.Date
-abstract class InsertOverwriteTestBase extends PaimonSparkTestBase {
+abstract class InsertOverwriteTableTestBase extends PaimonSparkTestBase {
withPk.foreach {
hasPk =>
From 369837aac9d55a256a6aad815d5c9bffbe544012 Mon Sep 17 00:00:00 2001
From: xuyu <11161569@vivo.com>
Date: Mon, 3 Jun 2024 20:19:30 +0800
Subject: [PATCH 11/11] rename base class
---
.../{InsertOverwriteTest.scala => InsertOverwriteTableTest.scala} | 0
1 file changed, 0 insertions(+), 0 deletions(-)
rename paimon-spark/paimon-spark-common/src/test/scala/org/apache/paimon/spark/sql/{InsertOverwriteTest.scala => InsertOverwriteTableTest.scala} (100%)
diff --git a/paimon-spark/paimon-spark-common/src/test/scala/org/apache/paimon/spark/sql/InsertOverwriteTest.scala b/paimon-spark/paimon-spark-common/src/test/scala/org/apache/paimon/spark/sql/InsertOverwriteTableTest.scala
similarity index 100%
rename from paimon-spark/paimon-spark-common/src/test/scala/org/apache/paimon/spark/sql/InsertOverwriteTest.scala
rename to paimon-spark/paimon-spark-common/src/test/scala/org/apache/paimon/spark/sql/InsertOverwriteTableTest.scala