Skip to content

Commit

Permalink
[update]
Browse files Browse the repository at this point in the history
  • Loading branch information
YannByron committed Nov 26, 2024
1 parent 817728a commit 14b1371
Show file tree
Hide file tree
Showing 5 changed files with 14 additions and 13 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -25,19 +25,18 @@ import scala.collection.JavaConverters._
/** Load a [[SparkShim]]'s implementation. */
object SparkShimLoader {

private var sparkShim: SparkShim = _
private lazy val sparkShim: SparkShim = loadSparkShim()

def getSparkShim: SparkShim = {
if (sparkShim == null) {
sparkShim = loadSparkShim()
}
sparkShim
}

private def loadSparkShim(): SparkShim = {
val shims = ServiceLoader.load(classOf[SparkShim]).asScala
if (shims.size != 1) {
if (shims.isEmpty) {
throw new IllegalStateException("No available spark shim here.")
} else if (shims.size > 1) {
throw new IllegalStateException("Found more than one spark shim here.")
}
shims.head
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -16,9 +16,10 @@
* limitations under the License.
*/

package org.apache.spark.sql.catalyst.parser.extensions
package org.apache.paimon.spark.catalyst.parser.extensions

import org.apache.spark.sql.catalyst.parser.ParserInterface
import org.apache.spark.sql.catalyst.parser.extensions.AbstractPaimonSparkSqlExtensionsParser

class PaimonSparkSqlExtensionsParser(override val delegate: ParserInterface)
class PaimonSpark3SqlExtensionsParser(override val delegate: ParserInterface)
extends AbstractPaimonSparkSqlExtensionsParser(delegate) {}
Original file line number Diff line number Diff line change
Expand Up @@ -18,13 +18,13 @@

package org.apache.spark.sql.paimon.shims

import org.apache.paimon.spark.catalyst.parser.extensions.PaimonSpark3SqlExtensionsParser
import org.apache.paimon.spark.data.{Spark3ArrayData, Spark3InternalRow, SparkArrayData, SparkInternalRow}
import org.apache.paimon.types.{DataType, RowType}

import org.apache.spark.sql.{Column, SparkSession}
import org.apache.spark.sql.catalyst.expressions.{Attribute, Expression}
import org.apache.spark.sql.catalyst.parser.ParserInterface
import org.apache.spark.sql.catalyst.parser.extensions.PaimonSparkSqlExtensionsParser
import org.apache.spark.sql.catalyst.plans.logical.Aggregate
import org.apache.spark.sql.connector.catalog.{Identifier, Table, TableCatalog}
import org.apache.spark.sql.connector.expressions.Transform
Expand All @@ -35,7 +35,7 @@ import java.util.{Map => JMap}
class Spark3Shim extends SparkShim {

override def createSparkParser(delegate: ParserInterface): ParserInterface = {
new PaimonSparkSqlExtensionsParser(delegate)
new PaimonSpark3SqlExtensionsParser(delegate)
}

override def createSparkInternalRow(rowType: RowType): SparkInternalRow = {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -16,11 +16,12 @@
* limitations under the License.
*/

package org.apache.spark.sql.catalyst.parser.extensions
package org.apache.paimon.spark.catalyst.parser.extensions

import org.apache.spark.sql.catalyst.parser.{CompoundBody, ParserInterface}
import org.apache.spark.sql.catalyst.parser.extensions.AbstractPaimonSparkSqlExtensionsParser

class PaimonSparkSqlExtensionsParser(override val delegate: ParserInterface)
class PaimonSpark4SqlExtensionsParser(override val delegate: ParserInterface)
extends AbstractPaimonSparkSqlExtensionsParser(delegate) {

def parseScript(sqlScriptText: String): CompoundBody = delegate.parseScript(sqlScriptText)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -18,13 +18,13 @@

package org.apache.spark.sql.paimon.shims

import org.apache.paimon.spark.catalyst.parser.extensions.PaimonSpark4SqlExtensionsParser
import org.apache.paimon.spark.data.{Spark4ArrayData, Spark4InternalRow, SparkArrayData, SparkInternalRow}
import org.apache.paimon.types.{DataType, RowType}

import org.apache.spark.sql.{Column, SparkSession}
import org.apache.spark.sql.catalyst.expressions.{Attribute, Expression}
import org.apache.spark.sql.catalyst.parser.ParserInterface
import org.apache.spark.sql.catalyst.parser.extensions.PaimonSparkSqlExtensionsParser
import org.apache.spark.sql.catalyst.plans.logical.Aggregate
import org.apache.spark.sql.connector.catalog.{CatalogV2Util, Identifier, Table, TableCatalog}
import org.apache.spark.sql.connector.expressions.Transform
Expand All @@ -36,7 +36,7 @@ import java.util.{Map => JMap}
class Spark4Shim extends SparkShim {

override def createSparkParser(delegate: ParserInterface): ParserInterface = {
new PaimonSparkSqlExtensionsParser(delegate)
new PaimonSpark4SqlExtensionsParser(delegate)
}
override def createSparkInternalRow(rowType: RowType): SparkInternalRow = {
new Spark4InternalRow(rowType)
Expand Down

0 comments on commit 14b1371

Please sign in to comment.