Skip to content

Commit

Permalink
[hive] Fix create external based on managed table location
Browse files Browse the repository at this point in the history
  • Loading branch information
Zouxxyy committed Dec 20, 2024
1 parent 85c462e commit b3f289c
Show file tree
Hide file tree
Showing 2 changed files with 21 additions and 0 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -263,6 +263,7 @@ private void checkSchemaForExternalTable(Schema existsSchema, Schema newSchema)
newOptions.forEach(
(key, value) -> {
if (!key.equals(Catalog.OWNER_PROP)
&& !key.equals(CoreOptions.PATH.key())
&& (!existsOptions.containsKey(key)
|| !existsOptions.get(key).equals(value))) {
throw new RuntimeException(
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -553,6 +553,26 @@ abstract class DDLWithHiveCatalogTestBase extends PaimonHiveTestBase {
}
}

test("Paimon DDL with hive catalog: create external based on managed table location") {
Seq(sparkCatalogName, paimonHiveCatalogName).foreach {
catalogName =>
spark.sql(s"USE $catalogName")
withDatabase("paimon_db") {
spark.sql(s"CREATE DATABASE IF NOT EXISTS paimon_db")
spark.sql(s"USE paimon_db")
withTable("external_tbl", "managed_tbl") {
spark.sql(s"CREATE TABLE managed_tbl (id INT) USING paimon")
spark.sql("INSERT INTO managed_tbl VALUES (1)")
checkAnswer(spark.sql("SELECT * FROM managed_tbl"), Row(1))
val tablePath = loadTable("paimon_db", "managed_tbl").location().toString
spark.sql(s"CREATE TABLE external_tbl (id INT) USING paimon LOCATION '$tablePath'")
checkAnswer(spark.sql("SELECT * FROM external_tbl"), Row(1))
assert(loadTable("paimon_db", "external_tbl").location().toString.equals(tablePath))
}
}
}
}

test("Paimon DDL with hive catalog: case sensitive") {
Seq(sparkCatalogName, paimonHiveCatalogName).foreach {
catalogName =>
Expand Down

0 comments on commit b3f289c

Please sign in to comment.