Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[hive] Support recreate the hive table #3084

Merged
merged 4 commits into from
Apr 8, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -56,6 +56,7 @@
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.Optional;
import java.util.concurrent.Callable;
import java.util.concurrent.atomic.AtomicInteger;
Expand Down Expand Up @@ -124,16 +125,26 @@ public List<Long> listAllIds() {
}
}

/** Check TableScheme is be modified. */
public void checkTableSchema(TableSchema oldSchema, TableSchema newSchema) {
boolean isCommon =
oldSchema.version() == newSchema.version()
&& Objects.equals(oldSchema.fields(), newSchema.fields())
&& oldSchema.highestFieldId() == newSchema.highestFieldId()
&& Objects.equals(oldSchema.partitionKeys(), newSchema.partitionKeys())
&& Objects.equals(oldSchema.primaryKeys(), newSchema.primaryKeys());

if (!isCommon) {
throw new IllegalStateException(
"Schema in filesystem exists, please use updating,"
+ " latest schema is: "
+ oldSchema);
}
}

/** Create a new schema from {@link Schema}. */
public TableSchema createTable(Schema schema) throws Exception {
while (true) {
latest().ifPresent(
latest -> {
throw new IllegalStateException(
"Schema in filesystem exists, please use updating,"
+ " latest schema is: "
+ latest());
});

List<DataField> fields = schema.fields();
List<String> partitionKeys = schema.partitionKeys();
Expand All @@ -151,6 +162,11 @@ public TableSchema createTable(Schema schema) throws Exception {
options,
schema.comment());

if (latest().isPresent()) {
checkTableSchema(latest().get(), newSchema);
return newSchema;
}

boolean success = commit(newSchema);
if (success) {
return newSchema;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -120,6 +120,41 @@ public void testCreateExternalTableWithPaimonTable() throws Exception {
" 'org.apache.paimon.hive.PaimonStorageHandler' ");
}

@Test
public void testCallCreateTableToCreatHiveExternalTable() throws Exception {
// Create hive external table with paimon table
String tableName = "with_paimon_table";
String hadoopConfDir = "";

// Create a paimon table
Schema schema =
new Schema(
Lists.newArrayList(
new DataField(0, "col1", DataTypes.INT(), "first comment"),
new DataField(1, "col2", DataTypes.STRING(), "second comment"),
new DataField(2, "col3", DataTypes.DECIMAL(5, 3), "last comment")),
Collections.emptyList(),
Collections.emptyList(),
Maps.newHashMap(),
"");
Identifier identifier = Identifier.create(DATABASE_TEST, tableName);
Options options = new Options();
options.set("warehouse", path);
options.set("metastore", "hive");
options.set("table.type", "external");
options.set("hadoop-conf-dir", hadoopConfDir);
CatalogContext context = CatalogContext.create(options);
Catalog hiveCatalog = CatalogFactory.createCatalog(context);
hiveCatalog.createTable(identifier, schema, false);

// Drop hive external table
String hiveSql = String.join("\n", Arrays.asList("DROP TABLE " + tableName));
assertThatCode(() -> hiveShell.execute(hiveSql)).doesNotThrowAnyException();

assertThatCode(() -> hiveCatalog.createTable(identifier, schema, false))
.doesNotThrowAnyException();
}

@Test
public void testCreateTableUsePartitionedBy() {
// Use `partitioned by` to create hive partition table
Expand Down
Loading