From a7a7ecc2d7716e4393e3baa51ac9915c2b078b00 Mon Sep 17 00:00:00 2001 From: peacewong Date: Wed, 6 Nov 2024 20:02:41 +0800 Subject: [PATCH] Turn off use secure random by default close #5196 --- .../engineplugin/spark/config/SparkConfiguration.scala | 5 ++++- .../spark/executor/SparkPythonExecutor.scala | 9 +++++++-- 2 files changed, 11 insertions(+), 3 deletions(-) diff --git a/linkis-engineconn-plugins/spark/src/main/scala/org/apache/linkis/engineplugin/spark/config/SparkConfiguration.scala b/linkis-engineconn-plugins/spark/src/main/scala/org/apache/linkis/engineplugin/spark/config/SparkConfiguration.scala index a493c5ff37..429048c77f 100644 --- a/linkis-engineconn-plugins/spark/src/main/scala/org/apache/linkis/engineplugin/spark/config/SparkConfiguration.scala +++ b/linkis-engineconn-plugins/spark/src/main/scala/org/apache/linkis/engineplugin/spark/config/SparkConfiguration.scala @@ -156,7 +156,7 @@ object SparkConfiguration extends Logging { CommonVars("wds.linkis.spark.engineconn.fatal.log", "error writing class;OutOfMemoryError") val PYSPARK_PYTHON3_PATH = - CommonVars[String]("pyspark.python3.path", "/appcom/Install/anaconda3/bin/python") + CommonVars[String]("pyspark.python3.path", "python3") val ENABLE_REPLACE_PACKAGE_NAME = CommonVars("wds.linkis.spark.engine.scala.replace_package_header.enable", true) @@ -182,6 +182,9 @@ object SparkConfiguration extends Logging { val LINKIS_SPARK_ETL_SUPPORT_HUDI = CommonVars[Boolean]("linkis.spark.etl.support.hudi", false) + val LINKIS_PYSPARK_USE_SECURE_RANDOM = + CommonVars[Boolean]("linkis.pyspark.use.secure.random", false).getValue + val SCALA_PARSE_APPEND_CODE = CommonVars("linkis.scala.parse.append.code", "val linkisVar=1").getValue diff --git a/linkis-engineconn-plugins/spark/src/main/scala/org/apache/linkis/engineplugin/spark/executor/SparkPythonExecutor.scala b/linkis-engineconn-plugins/spark/src/main/scala/org/apache/linkis/engineplugin/spark/executor/SparkPythonExecutor.scala index 5d4305c67c..f947db9338 100644 --- a/linkis-engineconn-plugins/spark/src/main/scala/org/apache/linkis/engineplugin/spark/executor/SparkPythonExecutor.scala +++ b/linkis-engineconn-plugins/spark/src/main/scala/org/apache/linkis/engineplugin/spark/executor/SparkPythonExecutor.scala @@ -46,6 +46,7 @@ import org.apache.spark.sql.execution.datasources.csv.UDF import java.io._ import java.net.InetAddress +import java.security.SecureRandom import java.util import scala.collection.JavaConverters._ @@ -76,7 +77,12 @@ class SparkPythonExecutor(val sparkEngineSession: SparkEngineSession, val id: In private val lineOutputStream = new RsOutputStream val sqlContext = sparkEngineSession.sqlContext val SUCCESS = "success" - private lazy val py4jToken: String = SecureRandomStringUtils.randomAlphanumeric(256) + + private lazy val py4jToken: String = if (SparkConfiguration.LINKIS_PYSPARK_USE_SECURE_RANDOM) { + SecureRandomStringUtils.randomAlphanumeric(256) + } else { + SecureRandom.getInstance("SHA1PRNG").nextInt(100000).toString + } private lazy val gwBuilder: GatewayServerBuilder = { val builder = new GatewayServerBuilder() @@ -152,7 +158,6 @@ class SparkPythonExecutor(val sparkEngineSession: SparkEngineSession, val id: In ) val userDefinePythonVersion = engineCreationContext.getOptions .getOrDefault("spark.python.version", "python") - .toString .toLowerCase() val sparkPythonVersion = if (