From e3aad489c285b60860a6979c32fdb5d02d97dbce Mon Sep 17 00:00:00 2001 From: zouxxyy Date: Mon, 8 Apr 2024 16:06:56 +0800 Subject: [PATCH 1/2] 1 --- .../org/apache/paimon/spark/SparkGenericCatalog.java | 4 ++-- .../paimon/spark/procedure/CompactProcedure.java | 4 ++-- .../org/apache/paimon/spark/PaimonMetrics.scala | 6 +++--- .../paimon/spark/PaimonRecordReaderIterator.scala | 6 +++--- .../scala/org/apache/paimon/spark/PaimonScan.scala | 2 +- .../org/apache/paimon/spark/PaimonStatistics.scala | 4 ++-- .../analysis/expressions/ExpressionHelper.scala | 2 +- .../commands/DeleteFromPaimonTableCommand.scala | 2 +- .../paimon/spark/commands/MergeIntoPaimonTable.scala | 2 +- .../commands/PaimonAnalyzeTableColumnCommand.scala | 12 ++++++------ .../apache/paimon/spark/commands/PaimonCommand.scala | 2 +- .../PaimonDynamicPartitionOverwriteCommand.scala | 2 +- .../spark/commands/UpdatePaimonTableCommand.scala | 2 +- .../org/apache/paimon/spark/sources/PaimonSink.scala | 4 ++-- .../spark/statistics/StatisticsHelperBase.scala | 4 ++-- .../sql/{StatsUtils.scala => PaimonStatsUtils.scala} | 2 +- .../spark/sql/{Utils.scala => PaimonUtils.scala} | 2 +- .../{CatalogUtils.scala => PaimonCatalogUtils.scala} | 2 +- 18 files changed, 32 insertions(+), 32 deletions(-) rename paimon-spark/paimon-spark-common/src/main/scala/org/apache/spark/sql/{StatsUtils.scala => PaimonStatsUtils.scala} (99%) rename paimon-spark/paimon-spark-common/src/main/scala/org/apache/spark/sql/{Utils.scala => PaimonUtils.scala} (99%) rename paimon-spark/paimon-spark-common/src/main/scala/org/apache/spark/sql/connector/catalog/{CatalogUtils.scala => PaimonCatalogUtils.scala} (98%) diff --git a/paimon-spark/paimon-spark-common/src/main/java/org/apache/paimon/spark/SparkGenericCatalog.java b/paimon-spark/paimon-spark-common/src/main/java/org/apache/paimon/spark/SparkGenericCatalog.java index 3bdff73762cb..62674bf54dbd 100644 --- a/paimon-spark/paimon-spark-common/src/main/java/org/apache/paimon/spark/SparkGenericCatalog.java +++ b/paimon-spark/paimon-spark-common/src/main/java/org/apache/paimon/spark/SparkGenericCatalog.java @@ -38,10 +38,10 @@ import org.apache.spark.sql.catalyst.catalog.SessionCatalog; import org.apache.spark.sql.connector.catalog.CatalogExtension; import org.apache.spark.sql.connector.catalog.CatalogPlugin; -import org.apache.spark.sql.connector.catalog.CatalogUtils; import org.apache.spark.sql.connector.catalog.FunctionCatalog; import org.apache.spark.sql.connector.catalog.Identifier; import org.apache.spark.sql.connector.catalog.NamespaceChange; +import org.apache.spark.sql.connector.catalog.PaimonCatalogUtils; import org.apache.spark.sql.connector.catalog.SupportsNamespaces; import org.apache.spark.sql.connector.catalog.Table; import org.apache.spark.sql.connector.catalog.TableCatalog; @@ -260,7 +260,7 @@ public final void initialize(String name, CaseInsensitiveStringMap options) { hadoopConf.set(entry.getKey(), entry.getValue()); } ExternalCatalog externalCatalog = - CatalogUtils.buildExternalCatalog(sparkConf, hadoopConf); + PaimonCatalogUtils.buildExternalCatalog(sparkConf, hadoopConf); this.sessionCatalog = new V2SessionCatalog(new SessionCatalog(externalCatalog)); } } diff --git a/paimon-spark/paimon-spark-common/src/main/java/org/apache/paimon/spark/procedure/CompactProcedure.java b/paimon-spark/paimon-spark-common/src/main/java/org/apache/paimon/spark/procedure/CompactProcedure.java index 1c5662025723..51fa6bd6e24f 100644 --- a/paimon-spark/paimon-spark-common/src/main/java/org/apache/paimon/spark/procedure/CompactProcedure.java +++ b/paimon-spark/paimon-spark-common/src/main/java/org/apache/paimon/spark/procedure/CompactProcedure.java @@ -51,8 +51,8 @@ import org.apache.spark.api.java.JavaSparkContext; import org.apache.spark.api.java.function.FlatMapFunction; import org.apache.spark.sql.Dataset; +import org.apache.spark.sql.PaimonUtils; import org.apache.spark.sql.Row; -import org.apache.spark.sql.Utils; import org.apache.spark.sql.catalyst.InternalRow; import org.apache.spark.sql.catalyst.expressions.Expression; import org.apache.spark.sql.catalyst.plans.logical.Filter; @@ -363,7 +363,7 @@ private void sortCompactUnAwareBucketTable( LogicalPlan relation, @Nullable Expression condition) { Dataset row = - Utils.createDataset( + PaimonUtils.createDataset( spark(), condition == null ? relation : new Filter(condition, relation)); new WriteIntoPaimonTable( table, diff --git a/paimon-spark/paimon-spark-common/src/main/scala/org/apache/paimon/spark/PaimonMetrics.scala b/paimon-spark/paimon-spark-common/src/main/scala/org/apache/paimon/spark/PaimonMetrics.scala index 0acdffb39ee7..da7ea7be823c 100644 --- a/paimon-spark/paimon-spark-common/src/main/scala/org/apache/paimon/spark/PaimonMetrics.scala +++ b/paimon-spark/paimon-spark-common/src/main/scala/org/apache/paimon/spark/PaimonMetrics.scala @@ -18,7 +18,7 @@ package org.apache.paimon.spark -import org.apache.spark.sql.Utils +import org.apache.spark.sql.PaimonUtils import org.apache.spark.sql.connector.metric.{CustomAvgMetric, CustomSumMetric, CustomTaskMetric} import java.text.DecimalFormat @@ -85,7 +85,7 @@ case class PaimonSplitSizeMetric() extends PaimonSumMetric { override def description(): String = "size of splits read" override def aggregateTaskMetrics(taskMetrics: Array[Long]): String = { - Utils.bytesToString(aggregateTaskMetrics0(taskMetrics)) + PaimonUtils.bytesToString(aggregateTaskMetrics0(taskMetrics)) } } @@ -119,7 +119,7 @@ case class PaimonAvgSplitSizeMetric() extends PaimonAvgMetric { override def aggregateTaskMetrics(taskMetrics: Array[Long]): String = { val average = aggregateTaskMetrics0(taskMetrics).round - Utils.bytesToString(average) + PaimonUtils.bytesToString(average) } } diff --git a/paimon-spark/paimon-spark-common/src/main/scala/org/apache/paimon/spark/PaimonRecordReaderIterator.scala b/paimon-spark/paimon-spark-common/src/main/scala/org/apache/paimon/spark/PaimonRecordReaderIterator.scala index 3debb5e1891d..36fb03fd691e 100644 --- a/paimon-spark/paimon-spark-common/src/main/scala/org/apache/paimon/spark/PaimonRecordReaderIterator.scala +++ b/paimon-spark/paimon-spark-common/src/main/scala/org/apache/paimon/spark/PaimonRecordReaderIterator.scala @@ -23,7 +23,7 @@ import org.apache.paimon.fs.Path import org.apache.paimon.reader.{FileRecordIterator, RecordReader} import org.apache.paimon.utils.CloseableIterator -import org.apache.spark.sql.Utils +import org.apache.spark.sql.PaimonUtils import java.io.IOException @@ -61,7 +61,7 @@ case class PaimonRecordReaderIterator(reader: RecordReader[PaimonInternalRow]) } } finally { reader.close() - Utils.unsetInputFileName() + PaimonUtils.unsetInputFileName() } } @@ -70,7 +70,7 @@ case class PaimonRecordReaderIterator(reader: RecordReader[PaimonInternalRow]) iter match { case fileRecordIterator: FileRecordIterator[_] => if (lastFilePath != fileRecordIterator.filePath()) { - Utils.setInputFileName(fileRecordIterator.filePath().toUri.toString) + PaimonUtils.setInputFileName(fileRecordIterator.filePath().toUri.toString) lastFilePath = fileRecordIterator.filePath() } case _ => diff --git a/paimon-spark/paimon-spark-common/src/main/scala/org/apache/paimon/spark/PaimonScan.scala b/paimon-spark/paimon-spark-common/src/main/scala/org/apache/paimon/spark/PaimonScan.scala index 6cd5ef0edd7e..7f1900e2d3ad 100644 --- a/paimon-spark/paimon-spark-common/src/main/scala/org/apache/paimon/spark/PaimonScan.scala +++ b/paimon-spark/paimon-spark-common/src/main/scala/org/apache/paimon/spark/PaimonScan.scala @@ -21,7 +21,7 @@ package org.apache.paimon.spark import org.apache.paimon.predicate.Predicate import org.apache.paimon.table.Table -import org.apache.spark.sql.Utils.fieldReference +import org.apache.spark.sql.PaimonUtils.fieldReference import org.apache.spark.sql.connector.expressions.NamedReference import org.apache.spark.sql.connector.read.SupportsRuntimeFiltering import org.apache.spark.sql.sources.{Filter, In} diff --git a/paimon-spark/paimon-spark-common/src/main/scala/org/apache/paimon/spark/PaimonStatistics.scala b/paimon-spark/paimon-spark-common/src/main/scala/org/apache/paimon/spark/PaimonStatistics.scala index 865af3957789..d31820cb3a1e 100644 --- a/paimon-spark/paimon-spark-common/src/main/scala/org/apache/paimon/spark/PaimonStatistics.scala +++ b/paimon-spark/paimon-spark-common/src/main/scala/org/apache/paimon/spark/PaimonStatistics.scala @@ -22,7 +22,7 @@ import org.apache.paimon.stats import org.apache.paimon.stats.ColStats import org.apache.paimon.types.DataType -import org.apache.spark.sql.Utils +import org.apache.spark.sql.PaimonUtils import org.apache.spark.sql.catalyst.plans.logical.ColumnStat import org.apache.spark.sql.connector.expressions.NamedReference import org.apache.spark.sql.connector.read.Statistics @@ -59,7 +59,7 @@ case class PaimonStatistics[T <: PaimonBaseScan](scan: T) extends Statistics { .forEach( f => resultMap.put( - Utils.fieldReference(f.name()), + PaimonUtils.fieldReference(f.name()), PaimonColumnStats(f.`type`(), paimonColStats.get(f.name())))) } resultMap diff --git a/paimon-spark/paimon-spark-common/src/main/scala/org/apache/paimon/spark/catalyst/analysis/expressions/ExpressionHelper.scala b/paimon-spark/paimon-spark-common/src/main/scala/org/apache/paimon/spark/catalyst/analysis/expressions/ExpressionHelper.scala index 4e7a8109b055..e801867670ef 100644 --- a/paimon-spark/paimon-spark-common/src/main/scala/org/apache/paimon/spark/catalyst/analysis/expressions/ExpressionHelper.scala +++ b/paimon-spark/paimon-spark-common/src/main/scala/org/apache/paimon/spark/catalyst/analysis/expressions/ExpressionHelper.scala @@ -22,8 +22,8 @@ import org.apache.paimon.predicate.{Predicate, PredicateBuilder} import org.apache.paimon.spark.SparkFilterConverter import org.apache.paimon.types.RowType +import org.apache.spark.sql.PaimonUtils.{normalizeExprs, translateFilter} import org.apache.spark.sql.SparkSession -import org.apache.spark.sql.Utils.{normalizeExprs, translateFilter} import org.apache.spark.sql.catalyst.analysis.Resolver import org.apache.spark.sql.catalyst.expressions.{Alias, And, Attribute, Cast, Expression, GetStructField, Literal, PredicateHelper, SubqueryExpression} import org.apache.spark.sql.catalyst.plans.logical.{Filter, LogicalPlan} diff --git a/paimon-spark/paimon-spark-common/src/main/scala/org/apache/paimon/spark/commands/DeleteFromPaimonTableCommand.scala b/paimon-spark/paimon-spark-common/src/main/scala/org/apache/paimon/spark/commands/DeleteFromPaimonTableCommand.scala index 457467da1b65..95efb7a23bcf 100644 --- a/paimon-spark/paimon-spark-common/src/main/scala/org/apache/paimon/spark/commands/DeleteFromPaimonTableCommand.scala +++ b/paimon-spark/paimon-spark-common/src/main/scala/org/apache/paimon/spark/commands/DeleteFromPaimonTableCommand.scala @@ -29,7 +29,7 @@ import org.apache.paimon.table.sink.{BatchWriteBuilder, CommitMessage} import org.apache.paimon.types.RowKind import org.apache.spark.sql.{Row, SparkSession} -import org.apache.spark.sql.Utils.createDataset +import org.apache.spark.sql.PaimonUtils.createDataset import org.apache.spark.sql.catalyst.expressions.{And, Expression, Not} import org.apache.spark.sql.catalyst.expressions.Literal.TrueLiteral import org.apache.spark.sql.catalyst.plans.logical.{Filter, SupportsSubquery} diff --git a/paimon-spark/paimon-spark-common/src/main/scala/org/apache/paimon/spark/commands/MergeIntoPaimonTable.scala b/paimon-spark/paimon-spark-common/src/main/scala/org/apache/paimon/spark/commands/MergeIntoPaimonTable.scala index ffd09dcdbd2c..ae95b7548c60 100644 --- a/paimon-spark/paimon-spark-common/src/main/scala/org/apache/paimon/spark/commands/MergeIntoPaimonTable.scala +++ b/paimon-spark/paimon-spark-common/src/main/scala/org/apache/paimon/spark/commands/MergeIntoPaimonTable.scala @@ -27,7 +27,7 @@ import org.apache.paimon.table.FileStoreTable import org.apache.paimon.types.RowKind import org.apache.spark.sql.{Column, Dataset, Row, SparkSession} -import org.apache.spark.sql.Utils._ +import org.apache.spark.sql.PaimonUtils._ import org.apache.spark.sql.catalyst.InternalRow import org.apache.spark.sql.catalyst.encoders.ExpressionEncoder import org.apache.spark.sql.catalyst.expressions.{Alias, Attribute, BasePredicate, Expression, Literal, UnsafeProjection} diff --git a/paimon-spark/paimon-spark-common/src/main/scala/org/apache/paimon/spark/commands/PaimonAnalyzeTableColumnCommand.scala b/paimon-spark/paimon-spark-common/src/main/scala/org/apache/paimon/spark/commands/PaimonAnalyzeTableColumnCommand.scala index 19026ef05f9b..5f9957dbecaf 100644 --- a/paimon-spark/paimon-spark-common/src/main/scala/org/apache/paimon/spark/commands/PaimonAnalyzeTableColumnCommand.scala +++ b/paimon-spark/paimon-spark-common/src/main/scala/org/apache/paimon/spark/commands/PaimonAnalyzeTableColumnCommand.scala @@ -26,7 +26,7 @@ import org.apache.paimon.table.FileStoreTable import org.apache.paimon.table.sink.BatchWriteBuilder import org.apache.parquet.Preconditions -import org.apache.spark.sql.{Row, SparkSession, StatsUtils} +import org.apache.spark.sql.{PaimonStatsUtils, Row, SparkSession} import org.apache.spark.sql.catalyst.expressions.Attribute import org.apache.spark.sql.catalyst.plans.logical.ColumnStat import org.apache.spark.sql.catalyst.util.DateTimeUtils @@ -57,12 +57,12 @@ case class PaimonAnalyzeTableColumnCommand( // compute stats val attributes = getColumnsToAnalyze(relation, columnNames, allColumns) - val totalSize = StatsUtils.calculateTotalSize( + val totalSize = PaimonStatsUtils.calculateTotalSize( sparkSession.sessionState, table.name(), Some(table.location().toUri)) val (mergedRecordCount, colStats) = - StatsUtils.computeColumnStats(sparkSession, relation, attributes) + PaimonStatsUtils.computeColumnStats(sparkSession, relation, attributes) val totalRecordCount = currentSnapshot.totalRecordCount() Preconditions.checkState( @@ -113,7 +113,7 @@ case class PaimonAnalyzeTableColumnCommand( } columnsToAnalyze.foreach { attr => - if (!StatsUtils.analyzeSupportsType(attr.dataType)) { + if (!PaimonStatsUtils.analyzeSupportsType(attr.dataType)) { throw new UnsupportedOperationException( s"Analyzing on col: ${attr.name}, data type: ${attr.dataType} is not supported.") } @@ -148,12 +148,12 @@ case class PaimonAnalyzeTableColumnCommand( } /** - * Convert data from spark type to paimon, only cover datatype meet [[StatsUtils.hasMinMax]] + * Convert data from spark type to paimon, only cover datatype meet [[PaimonStatsUtils.hasMinMax]] * currently. */ private def toPaimonData(o: Any, dataType: DataType): Any = { dataType match { - case d if !StatsUtils.hasMinMax(d) => + case d if !PaimonStatsUtils.hasMinMax(d) => // should not reach here throw new UnsupportedOperationException(s"Unsupported data type $d, value is $o.") case _: DecimalType => diff --git a/paimon-spark/paimon-spark-common/src/main/scala/org/apache/paimon/spark/commands/PaimonCommand.scala b/paimon-spark/paimon-spark-common/src/main/scala/org/apache/paimon/spark/commands/PaimonCommand.scala index 6a9afa9c2b40..da91ab1fbd60 100644 --- a/paimon-spark/paimon-spark-common/src/main/scala/org/apache/paimon/spark/commands/PaimonCommand.scala +++ b/paimon-spark/paimon-spark-common/src/main/scala/org/apache/paimon/spark/commands/PaimonCommand.scala @@ -28,8 +28,8 @@ import org.apache.paimon.table.sink.{CommitMessage, CommitMessageImpl} import org.apache.paimon.table.source.DataSplit import org.apache.paimon.types.RowType +import org.apache.spark.sql.PaimonUtils.createDataset import org.apache.spark.sql.SparkSession -import org.apache.spark.sql.Utils.createDataset import org.apache.spark.sql.catalyst.expressions.{Attribute, Expression} import org.apache.spark.sql.catalyst.expressions.Literal.TrueLiteral import org.apache.spark.sql.catalyst.plans.logical.{Filter => FilterLogicalNode} diff --git a/paimon-spark/paimon-spark-common/src/main/scala/org/apache/paimon/spark/commands/PaimonDynamicPartitionOverwriteCommand.scala b/paimon-spark/paimon-spark-common/src/main/scala/org/apache/paimon/spark/commands/PaimonDynamicPartitionOverwriteCommand.scala index 24056142f30d..1edcc99b8ec5 100644 --- a/paimon-spark/paimon-spark-common/src/main/scala/org/apache/paimon/spark/commands/PaimonDynamicPartitionOverwriteCommand.scala +++ b/paimon-spark/paimon-spark-common/src/main/scala/org/apache/paimon/spark/commands/PaimonDynamicPartitionOverwriteCommand.scala @@ -23,7 +23,7 @@ import org.apache.paimon.spark.DynamicOverWrite import org.apache.paimon.table.FileStoreTable import org.apache.spark.sql.{Row, SparkSession} -import org.apache.spark.sql.Utils.createDataset +import org.apache.spark.sql.PaimonUtils.createDataset import org.apache.spark.sql.catalyst.analysis.NamedRelation import org.apache.spark.sql.catalyst.plans.logical.{Command, LogicalPlan, V2WriteCommand} import org.apache.spark.sql.execution.command.RunnableCommand diff --git a/paimon-spark/paimon-spark-common/src/main/scala/org/apache/paimon/spark/commands/UpdatePaimonTableCommand.scala b/paimon-spark/paimon-spark-common/src/main/scala/org/apache/paimon/spark/commands/UpdatePaimonTableCommand.scala index fbfe01a3f8c5..6c16ce5e8b8b 100644 --- a/paimon-spark/paimon-spark-common/src/main/scala/org/apache/paimon/spark/commands/UpdatePaimonTableCommand.scala +++ b/paimon-spark/paimon-spark-common/src/main/scala/org/apache/paimon/spark/commands/UpdatePaimonTableCommand.scala @@ -27,7 +27,7 @@ import org.apache.paimon.table.sink.CommitMessage import org.apache.paimon.types.RowKind import org.apache.spark.sql.{Column, Row, SparkSession} -import org.apache.spark.sql.Utils.createDataset +import org.apache.spark.sql.PaimonUtils.createDataset import org.apache.spark.sql.catalyst.expressions.{Alias, Expression, If} import org.apache.spark.sql.catalyst.expressions.Literal.TrueLiteral import org.apache.spark.sql.catalyst.plans.logical.{Assignment, Filter, Project, SupportsSubquery} diff --git a/paimon-spark/paimon-spark-common/src/main/scala/org/apache/paimon/spark/sources/PaimonSink.scala b/paimon-spark/paimon-spark-common/src/main/scala/org/apache/paimon/spark/sources/PaimonSink.scala index 59651cf1e5db..54a7a1114c16 100644 --- a/paimon-spark/paimon-spark-common/src/main/scala/org/apache/paimon/spark/sources/PaimonSink.scala +++ b/paimon-spark/paimon-spark-common/src/main/scala/org/apache/paimon/spark/sources/PaimonSink.scala @@ -23,7 +23,7 @@ import org.apache.paimon.spark.{InsertInto, Overwrite} import org.apache.paimon.spark.commands.{PaimonCommand, SchemaHelper, WriteIntoPaimonTable} import org.apache.paimon.table.FileStoreTable -import org.apache.spark.sql.{DataFrame, SQLContext, Utils} +import org.apache.spark.sql.{DataFrame, PaimonUtils, SQLContext} import org.apache.spark.sql.execution.streaming.Sink import org.apache.spark.sql.sources.AlwaysTrue import org.apache.spark.sql.streaming.OutputMode @@ -44,7 +44,7 @@ class PaimonSink( InsertInto } partitionColumns.foreach(println) - val newData = Utils.createNewDataFrame(data) + val newData = PaimonUtils.createNewDataFrame(data) WriteIntoPaimonTable(originTable, saveMode, newData, options).run(sqlContext.sparkSession) } } diff --git a/paimon-spark/paimon-spark-common/src/main/scala/org/apache/paimon/spark/statistics/StatisticsHelperBase.scala b/paimon-spark/paimon-spark-common/src/main/scala/org/apache/paimon/spark/statistics/StatisticsHelperBase.scala index 85f40fdf34f9..275f17fe8278 100644 --- a/paimon-spark/paimon-spark-common/src/main/scala/org/apache/paimon/spark/statistics/StatisticsHelperBase.scala +++ b/paimon-spark/paimon-spark-common/src/main/scala/org/apache/paimon/spark/statistics/StatisticsHelperBase.scala @@ -20,7 +20,7 @@ package org.apache.paimon.spark.statistics import org.apache.paimon.spark.PaimonColumnStats -import org.apache.spark.sql.Utils +import org.apache.spark.sql.PaimonUtils import org.apache.spark.sql.catalyst.{SQLConfHelper, StructFilters} import org.apache.spark.sql.catalyst.expressions.{Attribute, AttributeReference, BoundReference, Expression} import org.apache.spark.sql.catalyst.plans.logical @@ -86,7 +86,7 @@ trait StatisticsHelperBase extends SQLConfHelper { v1Stats.attributeStats.foreach { case (attr, v1ColStats) => columnStatsMap.put( - Utils.fieldReference(attr.name), + PaimonUtils.fieldReference(attr.name), PaimonColumnStats(v1ColStats) ) } diff --git a/paimon-spark/paimon-spark-common/src/main/scala/org/apache/spark/sql/StatsUtils.scala b/paimon-spark/paimon-spark-common/src/main/scala/org/apache/spark/sql/PaimonStatsUtils.scala similarity index 99% rename from paimon-spark/paimon-spark-common/src/main/scala/org/apache/spark/sql/StatsUtils.scala rename to paimon-spark/paimon-spark-common/src/main/scala/org/apache/spark/sql/PaimonStatsUtils.scala index 57b4fbd29291..3e3a8c9d3806 100644 --- a/paimon-spark/paimon-spark-common/src/main/scala/org/apache/spark/sql/StatsUtils.scala +++ b/paimon-spark/paimon-spark-common/src/main/scala/org/apache/spark/sql/PaimonStatsUtils.scala @@ -32,7 +32,7 @@ import java.net.URI * [[org.apache.spark.sql]] package, Hence, use this class to adapt then so that we can use them * indirectly. */ -object StatsUtils { +object PaimonStatsUtils { def calculateTotalSize( sessionState: SessionState, diff --git a/paimon-spark/paimon-spark-common/src/main/scala/org/apache/spark/sql/Utils.scala b/paimon-spark/paimon-spark-common/src/main/scala/org/apache/spark/sql/PaimonUtils.scala similarity index 99% rename from paimon-spark/paimon-spark-common/src/main/scala/org/apache/spark/sql/Utils.scala rename to paimon-spark/paimon-spark-common/src/main/scala/org/apache/spark/sql/PaimonUtils.scala index 4767dab39468..06a255ebbc62 100644 --- a/paimon-spark/paimon-spark-common/src/main/scala/org/apache/spark/sql/Utils.scala +++ b/paimon-spark/paimon-spark-common/src/main/scala/org/apache/spark/sql/PaimonUtils.scala @@ -31,7 +31,7 @@ import org.apache.spark.util.{Utils => SparkUtils} * [[org.apache.spark.sql]] package, Hence, use this class to adapt then so that we can use them * indirectly. */ -object Utils { +object PaimonUtils { /** * In the streaming write case, An "Queries with streaming sources must be executed with diff --git a/paimon-spark/paimon-spark-common/src/main/scala/org/apache/spark/sql/connector/catalog/CatalogUtils.scala b/paimon-spark/paimon-spark-common/src/main/scala/org/apache/spark/sql/connector/catalog/PaimonCatalogUtils.scala similarity index 98% rename from paimon-spark/paimon-spark-common/src/main/scala/org/apache/spark/sql/connector/catalog/CatalogUtils.scala rename to paimon-spark/paimon-spark-common/src/main/scala/org/apache/spark/sql/connector/catalog/PaimonCatalogUtils.scala index 283fbf25427b..265c82866195 100644 --- a/paimon-spark/paimon-spark-common/src/main/scala/org/apache/spark/sql/connector/catalog/CatalogUtils.scala +++ b/paimon-spark/paimon-spark-common/src/main/scala/org/apache/spark/sql/connector/catalog/PaimonCatalogUtils.scala @@ -28,7 +28,7 @@ import org.apache.spark.util.Utils import scala.reflect.ClassTag import scala.util.control.NonFatal -object CatalogUtils { +object PaimonCatalogUtils { def buildExternalCatalog(conf: SparkConf, hadoopConf: Configuration): ExternalCatalog = { val externalCatalogClassName = From 7b56bed2cc9a19c92a79c62fcaa7051f4ca7b073 Mon Sep 17 00:00:00 2001 From: zouxxyy Date: Mon, 8 Apr 2024 18:03:13 +0800 Subject: [PATCH 2/2] 1 --- .../spark/sql/{StatsUtils.scala => PaimonStatsUtils.scala} | 2 +- .../spark/sql/{StatsUtils.scala => PaimonStatsUtils.scala} | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) rename paimon-spark/paimon-spark-3.1/src/main/scala/org/apache/spark/sql/{StatsUtils.scala => PaimonStatsUtils.scala} (99%) rename paimon-spark/paimon-spark-3.2/src/main/scala/org/apache/spark/sql/{StatsUtils.scala => PaimonStatsUtils.scala} (99%) diff --git a/paimon-spark/paimon-spark-3.1/src/main/scala/org/apache/spark/sql/StatsUtils.scala b/paimon-spark/paimon-spark-3.1/src/main/scala/org/apache/spark/sql/PaimonStatsUtils.scala similarity index 99% rename from paimon-spark/paimon-spark-3.1/src/main/scala/org/apache/spark/sql/StatsUtils.scala rename to paimon-spark/paimon-spark-3.1/src/main/scala/org/apache/spark/sql/PaimonStatsUtils.scala index 802d1fe198d1..bcefa32e3848 100644 --- a/paimon-spark/paimon-spark-3.1/src/main/scala/org/apache/spark/sql/StatsUtils.scala +++ b/paimon-spark/paimon-spark-3.1/src/main/scala/org/apache/spark/sql/PaimonStatsUtils.scala @@ -27,7 +27,7 @@ import org.apache.spark.sql.types.{BinaryType, BooleanType, DataType, DateType, import java.net.URI -object StatsUtils { +object PaimonStatsUtils { def calculateTotalSize( sessionState: SessionState, diff --git a/paimon-spark/paimon-spark-3.2/src/main/scala/org/apache/spark/sql/StatsUtils.scala b/paimon-spark/paimon-spark-3.2/src/main/scala/org/apache/spark/sql/PaimonStatsUtils.scala similarity index 99% rename from paimon-spark/paimon-spark-3.2/src/main/scala/org/apache/spark/sql/StatsUtils.scala rename to paimon-spark/paimon-spark-3.2/src/main/scala/org/apache/spark/sql/PaimonStatsUtils.scala index 10dc54ff4c9f..f3a2ad3aa7aa 100644 --- a/paimon-spark/paimon-spark-3.2/src/main/scala/org/apache/spark/sql/StatsUtils.scala +++ b/paimon-spark/paimon-spark-3.2/src/main/scala/org/apache/spark/sql/PaimonStatsUtils.scala @@ -27,7 +27,7 @@ import org.apache.spark.sql.types.{BinaryType, BooleanType, DataType, DateType, import java.net.URI -object StatsUtils { +object PaimonStatsUtils { def calculateTotalSize( sessionState: SessionState, tableName: String,