From af335303df790e5b56bc571849f0dbeaf0edc5d5 Mon Sep 17 00:00:00 2001 From: wgcn <1026688210@qq.com> Date: Tue, 10 Oct 2023 17:36:07 +0800 Subject: [PATCH] Adding Considerations for Spark's Insert Overwrite --- docs/content/how-to/writing-tables.md | 10 +++++++--- 1 file changed, 7 insertions(+), 3 deletions(-) diff --git a/docs/content/how-to/writing-tables.md b/docs/content/how-to/writing-tables.md index 540f768cc8c0..b9e878c1a4d6 100644 --- a/docs/content/how-to/writing-tables.md +++ b/docs/content/how-to/writing-tables.md @@ -127,7 +127,11 @@ INSERT INTO MyTable SELECT ... {{< /tabs >}} -## Overwriting the Whole Table +## Overwriting +Note :If `spark.sql.sources.partitionOverwriteMode` is set to `dynamic` by default in Spark, +in order to ensure that the insert overwrite function of the Paimon table can be used normally, +`spark.sql.extensions` should be set to `org.apache.paimon.spark.extensions.PaimonSparkSessionExtensions`. +### Overwriting the Whole Table For unpartitioned tables, Paimon supports overwriting the whole table. @@ -153,7 +157,7 @@ INSERT OVERWRITE MyTable SELECT ... {{< /tabs >}} -## Overwriting a Partition +### Overwriting a Partition For partitioned tables, Paimon supports overwriting a partition. @@ -179,7 +183,7 @@ INSERT OVERWRITE MyTable PARTITION (key1 = value1, key2 = value2, ...) SELECT .. {{< /tabs >}} -## Dynamic Overwrite +### Dynamic Overwrite {{< tabs "dynamic-overwrite" >}}