Skip to content

Commit

Permalink
【1.1.15】code view fix (#286)
Browse files Browse the repository at this point in the history
* code view fix
---------

Co-authored-by: huangKai-2323 <[email protected]>
  • Loading branch information
v-kkhuang and v-kkhuang authored Sep 7, 2023
1 parent f4aa15e commit 17d6288
Show file tree
Hide file tree
Showing 14 changed files with 50 additions and 49 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -298,7 +298,9 @@ class DefaultEngineCreateService
val queueName = props.getOrDefault("wds.linkis.rm.yarnqueue", "default")
val newQueueName = queueName + "_" + queueRuleSuffix
props.put("wds.linkis.rm.yarnqueue", newQueueName)
logger.info(s"Switch queues according to queueRule with queue name : $queueName to $newQueueName")
logger.info(
s"Switch queues according to queueRule with queue name : $queueName to $newQueueName"
)
}

val timeoutEngineResourceRequest = TimeoutEngineResourceRequest(
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -90,7 +90,8 @@ class DriverAndYarnReqResourceService(
)

if (
StringUtils.isNotBlank(acrossClusterTask) && acrossClusterTask.toBoolean && StringUtils.isNotBlank(CPUThreshold) && StringUtils
StringUtils.isNotBlank(acrossClusterTask) && acrossClusterTask.toBoolean && StringUtils
.isNotBlank(CPUThreshold) && StringUtils
.isNotBlank(MemoryThreshold)
&& StringUtils
.isNotBlank(CPUPercentageThreshold) && StringUtils.isNotBlank(MemoryPercentageThreshold)
Expand All @@ -115,7 +116,10 @@ class DriverAndYarnReqResourceService(
if (!acrossClusterFlag) {
logger.info(s"user: $user, creator: $creator task not meet the threshold rule")

throw new RMWarnException(RMErrorCode.ACROSS_CLUSTER_RULE_FAILED.getErrorCode, RMErrorCode.ACROSS_CLUSTER_RULE_FAILED.getErrorDesc)
throw new RMWarnException(
RMErrorCode.ACROSS_CLUSTER_RULE_FAILED.getErrorCode,
RMErrorCode.ACROSS_CLUSTER_RULE_FAILED.getErrorDesc
)
}

logger.info(s"user: $user, creator: $creator task meet the threshold rule")
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -25,12 +25,12 @@
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.test.context.SpringBootTest;

import org.junit.jupiter.api.*;

import java.util.HashMap;
import java.util.List;
import java.util.Map;

import org.junit.jupiter.api.*;

/** StdLabelBuilderFactory Tester */
@SpringBootTest(classes = {StdLabelBuilderFactory.class})
public class StdLabelBuilderFactoryTest {
Expand Down Expand Up @@ -80,13 +80,12 @@ public void testCreateLabelForInLabelKeyInValueStreamOutLabelClassOutValueTypes(
}

@Test
public void test(){
Map input= new HashMap<String,String>();
input.put("userCreator","username-IDE");
input.put("yarnCluster","bdp-test");
input.put("executeOnce","true");
List<Label> res= stdLabelBuilderFactory.getLabels(input);
public void test() {
Map input = new HashMap<String, String>();
input.put("userCreator", "username-IDE");
input.put("yarnCluster", "bdp-test");
input.put("executeOnce", "true");
List<Label> res = stdLabelBuilderFactory.getLabels(input);
System.out.println(res);
}

}
3 changes: 2 additions & 1 deletion linkis-dist/package/db/linkis_dml.sql
Original file line number Diff line number Diff line change
Expand Up @@ -89,7 +89,7 @@ INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`,
INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('spark.tispark.tidb.port', NULL, NULL, '4000', 'None', NULL, '0', '0', '1', 'tidb设置', 'spark');
INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('spark.tispark.tidb.user', NULL, NULL, 'root', 'None', NULL, '0', '0', '1', 'tidb设置', 'spark');
INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('spark.python.version', '取值范围:python2,python3', 'python版本','python2', 'OFT', '[\"python3\",\"python2\"]', '0', '0', '1', 'spark引擎设置', 'spark');
INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `engine_conn_type`, `is_hidden`, `is_advanced`, `level`, `treeName`, `boundary_type`, `en_treeName`, `en_description`, `en_name`) VALUES ('spark.conf', '多个参数使用分号[;]分隔 例如spark.sql.shuffle.partitions=10;', 'spark自定义配置参数',null, 'None', NULL, 'spark',0, 1, 1,'spark资源设置', 0, 'Spark Resource Settings','Multiple parameters are separated by semicolons [;] For example, spark.sql.shuffle.partitions=10;', 'Spark Custom Configuration Parameters');
INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `engine_conn_type`, `is_hidden`, `is_advanced`, `level`, `treeName`, `boundary_type`, `en_treeName`, `en_description`, `en_name`) VALUES ('spark.conf', '多个参数使用分号[;]分隔 例如spark.shuffle.spill=true;', 'spark自定义配置参数',null, 'None', NULL, 'spark',0, 1, 1,'spark资源设置', 0, 'Spark Resource Settings','Multiple parameters are separated by semicolons [;] For example, spark.sql.shuffle.partitions=10;', 'Spark Custom Configuration Parameters');
INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `engine_conn_type`, `is_hidden`, `is_advanced`, `level`, `treeName`, `boundary_type`, `en_treeName`, `en_description`, `en_name`) VALUES ('spark.locality.wait', '范围:0-3000,单位:毫秒', '任务调度本地等待时间', '3000', 'OFT', '[\"0\",\"1000\",\"2000\",\"3000\"]', 'spark', 0, 1, 1, 'spark资源设置', 0, 'Spark Resource Settings', 'Range: 0-3000, Unit: millisecond', 'Task Scheduling Local Waiting Time');
INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `engine_conn_type`, `is_hidden`, `is_advanced`, `level`, `treeName`, `boundary_type`, `en_treeName`, `en_description`, `en_name`) VALUES ('spark.memory.fraction', '范围:0.4,0.5,0.6,单位:百分比', '执行内存和存储内存的百分比', '0.6', 'OFT', '[\"0.4\",\"0.5\",\"0.6\"]', 'spark', 0, 1, 1, 'spark资源设置', 0, 'Spark Resource Settings', 'Range: 0.4, 0.5, 0.6, in percentage', 'Percentage Of Execution Memory And Storage Memory');

Expand Down Expand Up @@ -410,6 +410,7 @@ INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type)
INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('13006','Spark context退出,可能是复杂任务导致','Spark application sc has already stopped',0);
INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('13007','Pyspark子进程意外退出,可能是复杂任务导致','Pyspark process has stopped',0);
INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('13008','任务产生的序列化结果总大小超过了配置的spark.driver.maxResultSize限制。请检查您的任务,看看是否有可能减小任务产生的结果大小,或则可以考虑压缩或合并结果,以减少传输的数据量','is bigger than spark.driver.maxResultSize',0);
INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('13009','您的任务因为引擎退出(退出可能是引擎进程OOM或者主动kill引擎)导致失败','ERROR EC exits unexpectedly and actively kills the task',0);

-- 21 cluster Authority 22 db Authority
INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('21001','会话创建失败,用户%s不能提交应用到队列:%s,请联系提供队列给您的人员','User (\\S+) cannot submit applications to queue (\\S+)',0);
Expand Down
12 changes: 6 additions & 6 deletions linkis-dist/package/db/upgrade/1.4.1_schema/mysql/linkis_ddl.sql
Original file line number Diff line number Diff line change
Expand Up @@ -17,16 +17,16 @@



ALTER TABLE `linkis_ps_udf_user_load` ADD CONSTRAINT `uniq_uid_uname` UNIQUE (`udf_id`, `user_name`);
ALTER TABLE `linkis_ps_bml_resources` ADD CONSTRAINT `uniq_rid_eflag` UNIQUE (`resource_id`, `enable_flag`);
ALTER TABLE linkis_ps_udf_user_load ADD CONSTRAINT uniq_uid_uname UNIQUE (`udf_id`, `user_name`);
ALTER TABLE linkis_ps_bml_resources ADD CONSTRAINT uniq_rid_eflag UNIQUE (`resource_id`, `enable_flag`);


ALTER TABLE `linkis_ps_configuration_config_key` ADD UNIQUE `uniq_key_ectype` (`key`,`engine_conn_type`);
ALTER TABLE linkis_ps_configuration_config_key ADD UNIQUE uniq_key_ectype (`key`,`engine_conn_type`);

ALTER TABLE `linkis_ps_configuration_config_key` modify column `engine_conn_type` varchar(50) DEFAULT '' COMMENT 'engine type,such as spark,hive etc';
ALTER TABLE linkis_ps_configuration_config_key modify column engine_conn_type varchar(50) DEFAULT '' COMMENT 'engine type,such as spark,hive etc';

ALTER TABLE linkis_ps_common_lock ADD COLUMN locker VARCHAR(255) NOT NULL COMMENT 'locker';

ALTER TABLE `linkis_ps_configuration_config_key` ADD column `template_required` tinyint(1) DEFAULT 0 COMMENT 'template required 0 none / 1 must'
ALTER TABLE linkis_ps_configuration_config_key ADD column template_required tinyint(1) DEFAULT 0 COMMENT 'template required 0 none / 1 must'

ALTER TABLE linkis_ps_configuration_config_value modify COLUMN config_value varchar(500);
ALTER TABLE linkis_ps_configuration_config_value modify COLUMN config_value varchar(500);
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,7 @@
update linkis_ps_configuration_config_key set engine_conn_type = "" where engine_conn_type is NULL;

INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('13008','任务产生的序列化结果总大小超过了配置的spark.driver.maxResultSize限制。请检查您的任务,看看是否有可能减小任务产生的结果大小,或则可以考虑压缩或合并结果,以减少传输的数据量','is bigger than spark.driver.maxResultSize',0);
INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('13009','您的任务因为引擎退出(退出可能是引擎进程OOM或者主动kill引擎)导致失败','ERROR EC exits unexpectedly and actively kills the task',0);

update linkis_ps_configuration_config_key set template_required = 1 where `key` in (
"spark.executor.instances",
Expand All @@ -36,11 +37,11 @@ is_hidden, is_advanced, `level`,
treeName, boundary_type, en_treeName,
en_description, en_name)
VALUES(
'spark.conf', '多个参数使用分号[;]分隔 例如spark.sql.shuffle.partitions=10;', 'spark自定义配置参数',
'spark.conf', '多个参数使用分号[;]分隔 例如spark.shuffle.spill=true;', 'spark自定义配置参数',
null, 'None', NULL, 'spark',
0, 1, 1,
'spark资源设置', 0, 'Spark Resource Settings',
'Multiple parameters are separated by semicolons [;] For example, spark.sql.shuffle.partitions=10;', 'Spark Custom Configuration Parameters');
'Multiple parameters are separated by semicolons [;] For example, spark.shuffle.compress=ture;', 'Spark Custom Configuration Parameters');

INSERT INTO `linkis_ps_configuration_key_engine_relation` (`config_key_id`, `engine_type_label_id`)
(
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -134,7 +134,7 @@ object SparkConfiguration extends Logging {

val REPLACE_PACKAGE_TO_HEADER = "org.apache.linkis"

val SPARK_CONF = CommonVars[String]("spark.conf", "")
val LINKIS_SPARK_CONF = CommonVars[String]("spark.conf", "")
val SPARK_APPLICATION_ARGS = CommonVars("spark.app.args", "")
val SPARK_APPLICATION_MAIN_CLASS = CommonVars("spark.app.main.class", "")

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -21,8 +21,8 @@ import org.apache.linkis.common.conf.CommonVars
import org.apache.linkis.common.utils.Logging
import org.apache.linkis.engineplugin.spark.config.SparkConfiguration.{
ENGINE_JAR,
LINKIS_SPARK_CONF,
SPARK_APP_NAME,
SPARK_CONF,
SPARK_DEFAULT_EXTERNAL_JARS_PATH,
SPARK_DRIVER_CLASSPATH,
SPARK_DRIVER_EXTRA_JAVA_OPTIONS,
Expand Down Expand Up @@ -63,13 +63,13 @@ class SparkSubmitProcessEngineConnLaunchBuilder(builder: JavaProcessEngineConnLa
val executorCores = getValueAndRemove(properties, LINKIS_SPARK_EXECUTOR_CORES)
val executorMemory = getValueAndRemove(properties, LINKIS_SPARK_EXECUTOR_MEMORY)
val numExecutors = getValueAndRemove(properties, LINKIS_SPARK_EXECUTOR_INSTANCES)
val sparkcsonf = getValueAndRemove(properties, SPARK_CONF)
val sparkcsonf = getValueAndRemove(properties, LINKIS_SPARK_CONF)
// sparkcsonf DEMO:spark.sql.shuffle.partitions=10;spark.memory.fraction=0.6
if (StringUtils.isNotBlank(sparkcsonf)) {
val strArrary = sparkcsonf.split(";").toList
strArrary.foreach { keyAndValue =>
val key = keyAndValue.split("=")(0)
val value = keyAndValue.split("=")(1)
val key = keyAndValue.split("=")(0).trim
val value = keyAndValue.split("=")(1).trim
if (StringUtils.isNotBlank(key) && StringUtils.isNotBlank(value)) {
engineConnBuildRequest.engineConnCreationDesc.properties.put(key, value)
} else {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -60,7 +60,7 @@ public class ConfigKey {

private String enTreeName;

private String templateRequired;
private Boolean templateRequired;

public String getEngineType() {
return engineType;
Expand Down Expand Up @@ -190,11 +190,11 @@ public void setEnTreeName(String enTreeName) {
this.enTreeName = enTreeName;
}

public String getTemplateRequired() {
public Boolean getTemplateRequired() {
return templateRequired;
}

public void setTemplateRequired(String templateRequired) {
public void setTemplateRequired(Boolean templateRequired) {
this.templateRequired = templateRequired;
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -187,11 +187,7 @@ public Message getItemList(
temp.put("validateRange", configKey.getValidateRange());
temp.put("boundaryType", configKey.getBoundaryType());
temp.put("defaultValue", configKey.getDefaultValue());
if (StringUtils.isNotBlank(configKey.getTemplateRequired())) {
temp.put("require", configKey.getTemplateRequired().equals("1"));
} else {
temp.put("require", "false");
}
temp.put("require", configKey.getTemplateRequired());
filterResult.add(temp);
}

Expand Down Expand Up @@ -371,6 +367,7 @@ private void sparkConfCheck(List<ConfigKeyValue> settings, String sparkConf)
throws ConfigurationException {
if (StringUtils.isNotBlank(sparkConf)) {
// Check if there are any duplicates in spark. conf
// spark.conf : spark.shuffle.compress=ture;spark.executor.memory=4g
String[] split = sparkConf.split(";");
int setSize =
Arrays.stream(split).map(s -> s.split("=")[0].trim()).collect(Collectors.toSet()).size();
Expand Down Expand Up @@ -722,9 +719,6 @@ public Message saveBaseKeyValue(HttpServletRequest req, @RequestBody ConfigKey c
if (null == boundaryType) {
return Message.error("boundaryType cannot be empty");
}
if (null == configKey.getTemplateRequired()) {
configKey.setTemplateRequired("1");
}
if (StringUtils.isNotEmpty(defaultValue)
&& !validatorManager
.getOrCreateValidator(validateType)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -276,11 +276,7 @@ public List<Object> queryKeyInfoList(List<String> uuidList) throws Configuration
temp.put("validateRange", configKey.getValidateRange());
temp.put("boundaryType", configKey.getBoundaryType());
temp.put("defaultValue", configKey.getDefaultValue());
if (StringUtils.isNotBlank(configKey.getTemplateRequired())) {
temp.put("require", configKey.getTemplateRequired().equals("1"));
} else {
temp.put("require", "false");
}
temp.put("require", configKey.getTemplateRequired());
temp.put("keyId", configKey.getId());

Long keyId = configKey.getId();
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -51,7 +51,7 @@
<result column="level" property="level" jdbcType="TINYINT"/>
<result column="treeName" property="treeName" jdbcType="VARCHAR"/>
<result column="boundary_type" property="boundaryType" jdbcType="TINYINT"/>
<result column="template_required" property="templateRequired" jdbcType="VARCHAR"/>
<result column="template_required" property="templateRequired" jdbcType="TINYINT"/>
</resultMap>

<resultMap id="ConfigValueMap" type="org.apache.linkis.configuration.entity.ConfigValue">
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -25,6 +25,7 @@
import org.apache.linkis.jobhistory.conversions.TaskConversions;
import org.apache.linkis.jobhistory.entity.*;
import org.apache.linkis.jobhistory.service.JobHistoryQueryService;
import org.apache.linkis.jobhistory.transitional.TaskStatus;
import org.apache.linkis.jobhistory.util.QueryUtils;
import org.apache.linkis.protocol.constants.TaskConstant;
import org.apache.linkis.server.Message;
Expand Down Expand Up @@ -101,7 +102,7 @@ public Message getTaskByID(HttpServletRequest req, @PathVariable("id") Long jobI
return Message.error(
"The corresponding job was not found, or there may be no permission to view the job"
+ "(没有找到对应的job,也可能是没有查看该job的权限)");
} else if (taskVO.getStatus().equals("Running")) {
} else if (taskVO.getStatus().equals(TaskStatus.Running.toString())) {
// 任务运行时不显示异常信息(Do not display exception information during task runtime)
taskVO.setErrCode(null);
taskVO.setErrDesc(null);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -84,8 +84,6 @@ public class FsRestfulApi {

private final Logger LOGGER = LoggerFactory.getLogger(getClass());



/**
* check 权限
*
Expand All @@ -94,8 +92,10 @@ public class FsRestfulApi {
* @return
*/
private boolean checkIsUsersDirectory(String requestPath, String userName, Boolean withAdmin) {
// 配置文件默认关闭检查,withadmin默认true,特殊情况传false 开启权限检查(
// The configuration file defaults to disable checking, with admin defaulting to true, and in special cases, false is passed to enable permission checking)
boolean ownerCheck = WorkSpaceConfiguration.FILESYSTEM_PATH_CHECK_OWNER.getValue();
if (!ownerCheck) {
if (!ownerCheck && withAdmin) {
LOGGER.debug("not check filesystem owner.");
return true;
}
Expand All @@ -107,6 +107,10 @@ private boolean checkIsUsersDirectory(String requestPath, String userName, Boole

String workspacePath = hdfsUserRootPathPrefix + userName + hdfsUserRootPathSuffix;
String enginconnPath = localUserRootPath + userName;
// 管理员修改其他用户文件目录时,会导致用户无法使用文件,故此优化管理员不能修改(When administrators modify the file directory of other
// users,
// it will cause users to be unable to use the file, so the optimization administrator cannot
// modify it)
if (withAdmin && Configuration.isJobHistoryAdmin(userName)) {
workspacePath = hdfsUserRootPathPrefix;
enginconnPath = localUserRootPath;
Expand All @@ -119,10 +123,9 @@ private boolean checkIsUsersDirectory(String requestPath, String userName, Boole
}

private boolean checkIsUsersDirectory(String requestPath, String userName) {
return checkIsUsersDirectory(requestPath, userName, true);
return checkIsUsersDirectory(requestPath, userName, true);
}


@ApiOperation(value = "getUserRootPath", notes = "get user root path", response = Message.class)
@ApiImplicitParams({
@ApiImplicitParam(name = "pathType", required = false, dataType = "String", value = "path type")
Expand Down

0 comments on commit 17d6288

Please sign in to comment.