diff --git a/paimon-spark/paimon-spark-common/src/main/scala/org/apache/paimon/spark/SaveMode.scala b/paimon-spark/paimon-spark-common/src/main/scala/org/apache/paimon/spark/SaveMode.scala index c4dae5e3ddd8..bcd6f68ab1a1 100644 --- a/paimon-spark/paimon-spark-common/src/main/scala/org/apache/paimon/spark/SaveMode.scala +++ b/paimon-spark/paimon-spark-common/src/main/scala/org/apache/paimon/spark/SaveMode.scala @@ -23,15 +23,15 @@ import org.apache.spark.sql.sources.{AlwaysTrue, Filter} sealed private[spark] trait SaveMode extends Serializable -object InsertInto extends SaveMode +case object InsertInto extends SaveMode case class Overwrite(filters: Option[Filter]) extends SaveMode -object DynamicOverWrite extends SaveMode +case object DynamicOverWrite extends SaveMode -object ErrorIfExists extends SaveMode +case object ErrorIfExists extends SaveMode -object Ignore extends SaveMode +case object Ignore extends SaveMode object SaveMode { def transform(saveMode: SparkSaveMode): SaveMode = { diff --git a/paimon-spark/paimon-spark-common/src/main/scala/org/apache/paimon/spark/SparkTable.scala b/paimon-spark/paimon-spark-common/src/main/scala/org/apache/paimon/spark/SparkTable.scala index e45d15aa9017..39b1947e4f37 100644 --- a/paimon-spark/paimon-spark-common/src/main/scala/org/apache/paimon/spark/SparkTable.scala +++ b/paimon-spark/paimon-spark-common/src/main/scala/org/apache/paimon/spark/SparkTable.scala @@ -107,4 +107,8 @@ case class SparkTable(table: Table) throw new RuntimeException("Only FileStoreTable can be written.") } } + + override def toString: String = { + s"${table.getClass.getSimpleName}[${table.fullName()}]" + } } diff --git a/paimon-spark/paimon-spark-common/src/main/scala/org/apache/paimon/spark/SparkWrite.scala b/paimon-spark/paimon-spark-common/src/main/scala/org/apache/paimon/spark/SparkWrite.scala index 7e7919592eb7..fd43decfc6ad 100644 --- a/paimon-spark/paimon-spark-common/src/main/scala/org/apache/paimon/spark/SparkWrite.scala +++ b/paimon-spark/paimon-spark-common/src/main/scala/org/apache/paimon/spark/SparkWrite.scala @@ -35,4 +35,8 @@ class SparkWrite(val table: FileStoreTable, saveMode: SaveMode, options: Options WriteIntoPaimonTable(table, saveMode, data, options).run(data.sparkSession) } } + + override def toString: String = { + s"table: ${table.fullName()}, saveMode: $saveMode, options: ${options.toMap}" + } }