diff --git a/docs/content/how-to/querying-tables.md b/docs/content/how-to/querying-tables.md index 223c6c7e95af..14ded4412a68 100644 --- a/docs/content/how-to/querying-tables.md +++ b/docs/content/how-to/querying-tables.md @@ -192,7 +192,6 @@ Paimon supports that use Spark SQL to do the incremental query that implemented To enable this needs these configs below: ```text ---conf spark.sql.catalog.spark_catalog=org.apache.paimon.spark.SparkGenericCatalog --conf spark.sql.extensions=org.apache.paimon.spark.extensions.PaimonSparkSessionExtensions ``` diff --git a/docs/content/how-to/writing-tables.md b/docs/content/how-to/writing-tables.md index adbfc2c76937..c72628cc969a 100644 --- a/docs/content/how-to/writing-tables.md +++ b/docs/content/how-to/writing-tables.md @@ -209,7 +209,6 @@ INSERT OVERWRITE MyTable /*+ OPTIONS('dynamic-partition-overwrite' = 'false') */ Spark's default overwrite mode is static partition overwrite. To enable dynamic overwritten needs these configs below: ```text ---conf spark.sql.catalog.spark_catalog=org.apache.paimon.spark.SparkGenericCatalog --conf spark.sql.extensions=org.apache.paimon.spark.extensions.PaimonSparkSessionExtensions ``` @@ -371,7 +370,6 @@ UPDATE MyTable SET b = 1, c = 2 WHERE a = 'myTable'; To enable update needs these configs below: ```text ---conf spark.sql.catalog.spark_catalog=org.apache.paimon.spark.SparkGenericCatalog --conf spark.sql.extensions=org.apache.paimon.spark.extensions.PaimonSparkSessionExtensions ``` @@ -478,7 +476,6 @@ Important table properties setting: To enable delete needs these configs below: ```text ---conf spark.sql.catalog.spark_catalog=org.apache.paimon.spark.SparkGenericCatalog --conf spark.sql.extensions=org.apache.paimon.spark.extensions.PaimonSparkSessionExtensions ``` diff --git a/paimon-spark/paimon-spark-common/src/test/scala/org/apache/paimon/spark/sql/TableValuedFunctionsTest.scala b/paimon-spark/paimon-spark-common/src/test/scala/org/apache/paimon/spark/sql/TableValuedFunctionsTest.scala index 72c582ccd8ac..0c0bf911061e 100644 --- a/paimon-spark/paimon-spark-common/src/test/scala/org/apache/paimon/spark/sql/TableValuedFunctionsTest.scala +++ b/paimon-spark/paimon-spark-common/src/test/scala/org/apache/paimon/spark/sql/TableValuedFunctionsTest.scala @@ -18,19 +18,10 @@ package org.apache.paimon.spark.sql import org.apache.paimon.spark.PaimonSparkTestBase -import org.apache.paimon.spark.extensions.PaimonSparkSessionExtensions - -import org.apache.spark.SparkConf import org.apache.spark.sql.{DataFrame, Row} class TableValuedFunctionsTest extends PaimonSparkTestBase { - override protected def sparkConf: SparkConf = { - super.sparkConf - .set("spark.sql.catalog.spark_catalog", "org.apache.paimon.spark.SparkGenericCatalog") - .set("spark.sql.extensions", classOf[PaimonSparkSessionExtensions].getName) - } - withPk.foreach { hasPk => bucketModes.foreach {