Skip to content

Commit

Permalink
check disable
Browse files Browse the repository at this point in the history
  • Loading branch information
xuzifu666 committed Oct 17, 2024
1 parent 09ee90c commit 91fb0e9
Show file tree
Hide file tree
Showing 4 changed files with 76 additions and 23 deletions.
22 changes: 11 additions & 11 deletions paimon-spark/paimon-spark-3.2/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -36,17 +36,17 @@ under the License.
</properties>

<dependencies>
<dependency>
<groupId>org.apache.paimon</groupId>
<artifactId>paimon-bundle</artifactId>
<version>${project.version}</version>
<exclusions>
<exclusion>
<groupId>*</groupId>
<artifactId>*</artifactId>
</exclusion>
</exclusions>
</dependency>
<!-- <dependency>-->
<!-- <groupId>org.apache.paimon</groupId>-->
<!-- <artifactId>paimon-bundle</artifactId>-->
<!-- <version>${project.version}</version>-->
<!-- <exclusions>-->
<!-- <exclusion>-->
<!-- <groupId>*</groupId>-->
<!-- <artifactId>*</artifactId>-->
<!-- </exclusion>-->
<!-- </exclusions>-->
<!-- </dependency>-->

<dependency>
<groupId>org.apache.paimon</groupId>
Expand Down
22 changes: 11 additions & 11 deletions paimon-spark/paimon-spark-common/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -38,17 +38,17 @@ under the License.
</properties>

<dependencies>
<dependency>
<groupId>org.apache.paimon</groupId>
<artifactId>paimon-bundle</artifactId>
<version>${project.version}</version>
<exclusions>
<exclusion>
<groupId>*</groupId>
<artifactId>*</artifactId>
</exclusion>
</exclusions>
</dependency>
<!-- <dependency>-->
<!-- <groupId>org.apache.paimon</groupId>-->
<!-- <artifactId>paimon-bundle</artifactId>-->
<!-- <version>${project.version}</version>-->
<!-- <exclusions>-->
<!-- <exclusion>-->
<!-- <groupId>*</groupId>-->
<!-- <artifactId>*</artifactId>-->
<!-- </exclusion>-->
<!-- </exclusions>-->
<!-- </dependency>-->

<dependency>
<groupId>org.scala-lang</groupId>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -25,6 +25,7 @@ import org.apache.paimon.table.sink.BatchWriteBuilder
import org.apache.paimon.types.RowType
import org.apache.paimon.utils.{InternalRowPartitionComputer, TypeUtils}

import org.apache.spark.internal.Logging
import org.apache.spark.sql.Row
import org.apache.spark.sql.catalyst.{CatalystTypeConverters, InternalRow}
import org.apache.spark.sql.catalyst.util.CharVarcharUtils
Expand All @@ -36,7 +37,7 @@ import java.util.{Map => JMap, Objects, UUID}

import scala.collection.JavaConverters._

trait PaimonPartitionManagement extends SupportsAtomicPartitionManagement {
trait PaimonPartitionManagement extends SupportsAtomicPartitionManagement with Logging {
self: SparkTable =>

private lazy val partitionRowType: RowType = TypeUtils.project(table.rowType, table.partitionKeys)
Expand Down Expand Up @@ -78,6 +79,10 @@ trait PaimonPartitionManagement extends SupportsAtomicPartitionManagement {
metastoreClient = clientFactory.create()
toPaimonPartitions(rows).foreach(metastoreClient.deletePartition)
}
} catch {
case e: Exception => {
logWarning(s"Not drop partition in metastore due to $e")
}
} finally {
commit.close()
if (metastoreClient != null) {
Expand Down Expand Up @@ -146,6 +151,10 @@ trait PaimonPartitionManagement extends SupportsAtomicPartitionManagement {
val metastoreClient: MetastoreClient = metastoreFactory.create
try {
partitions.foreach(metastoreClient.addPartition)
} catch {
case e: Exception => {
logWarning(s"Not add partition in metastore due to s{e}")
}
} finally {
metastoreClient.close()
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -98,6 +98,34 @@ abstract class DDLWithHiveCatalogTestBase extends PaimonHiveTestBase {
Row("name=n/pt=cc") :: Nil)

}

// disable metastore.partitioned-table
withTable("paimon_db.paimon_tbl2") {
spark.sql(s"""
|CREATE TABLE paimon_db.paimon_tbl2 (id STRING, name STRING, pt STRING)
|USING PAIMON
|PARTITIONED BY (name, pt)
|TBLPROPERTIES('metastore.partitioned-table' = 'false')
|""".stripMargin)
Assertions.assertEquals(
getTableLocation("paimon_db.paimon_tbl2"),
s"${dBLocation.getCanonicalPath}/paimon_tbl2")
spark.sql("insert into paimon_db.paimon_tbl2 select '1', 'n', 'cc'")
spark.sql("insert into paimon_db.paimon_tbl2 select '1', 'n1', 'aa'")
spark.sql("insert into paimon_db.paimon_tbl2 select '1', 'n2', 'bb'")

spark.sql("show partitions paimon_db.paimon_tbl2")
checkAnswer(
spark.sql("show partitions paimon_db.paimon_tbl2"),
Row("name=n/pt=cc") :: Row("name=n1/pt=aa") :: Row("name=n2/pt=bb") :: Nil)
spark.sql(
"alter table paimon_db.paimon_tbl2 drop partition (name='n1', `pt`='aa'), partition (name='n2', `pt`='bb')")
spark.sql("show partitions paimon_db.paimon_tbl2")
checkAnswer(
spark.sql("show partitions paimon_db.paimon_tbl2"),
Row("name=n/pt=cc") :: Nil)

}
}
}
}
Expand Down Expand Up @@ -130,6 +158,22 @@ abstract class DDLWithHiveCatalogTestBase extends PaimonHiveTestBase {

spark.sql("alter table paimon_db.paimon_tbl add partition(name='cc', `pt`='aa') ")
}

// disable metastore.partitioned-table
withTable("paimon_db.paimon_tbl2") {
spark.sql(s"""
|CREATE TABLE paimon_db.paimon_tbl2 (id STRING, name STRING, pt STRING)
|USING PAIMON
|PARTITIONED BY (name, pt)
|TBLPROPERTIES('metastore.partitioned-table' = 'true')
|""".stripMargin)
Assertions.assertEquals(
getTableLocation("paimon_db.paimon_tbl2"),
s"${dBLocation.getCanonicalPath}/paimon_tbl2")
spark.sql("insert into paimon_db.paimon_tbl2 select '1', 'n', 'cc'")

spark.sql("alter table paimon_db.paimon_tbl2 add partition(name='cc', `pt`='aa') ")
}
}
}
}
Expand Down

0 comments on commit 91fb0e9

Please sign in to comment.