From bf8d9764633c7adddb385f8f0a630b2d33899e9d Mon Sep 17 00:00:00 2001 From: Zzm0809 <934230207@qq.com> Date: Sat, 16 Dec 2023 21:35:05 +0800 Subject: [PATCH] refactor-push-dolphinscheduler --- .../dinky/controller/SchedulerController.java | 206 +------- .../org/dinky/service/SchedulerService.java | 64 +++ .../service/impl/SchedulerServiceImpl.java | 284 +++++++++++ .../dinky/data/exception/BusException.java | 7 + .../resources/i18n/messages_en_US.properties | 14 +- .../resources/i18n/messages_zh_CN.properties | 470 +++++++++--------- .../java/org/dinky/job/JobContextHolder.java | 42 -- .../dinky/scheduler/client/TaskClient.java | 13 +- .../scheduler/model/DinkyTaskRequest.java | 104 ++++ .../dinky/scheduler/model/TaskRequest.java | 12 +- dinky-web/src/locales/en-US/pages.ts | 22 + dinky-web/src/locales/zh-CN/pages.ts | 22 + .../HeaderContainer/PushDolphin/constants.tsx | 83 ++-- .../HeaderContainer/PushDolphin/function.tsx | 50 ++ .../HeaderContainer/PushDolphin/index.tsx | 310 ++++++------ .../DataStudio/HeaderContainer/index.tsx | 112 +++-- dinky-web/src/types/Studio/data.d.ts | 27 +- 17 files changed, 1150 insertions(+), 692 deletions(-) create mode 100644 dinky-admin/src/main/java/org/dinky/service/SchedulerService.java create mode 100644 dinky-admin/src/main/java/org/dinky/service/impl/SchedulerServiceImpl.java delete mode 100644 dinky-core/src/main/java/org/dinky/job/JobContextHolder.java create mode 100644 dinky-scheduler/src/main/java/org/dinky/scheduler/model/DinkyTaskRequest.java create mode 100644 dinky-web/src/pages/DataStudio/HeaderContainer/PushDolphin/function.tsx diff --git a/dinky-admin/src/main/java/org/dinky/controller/SchedulerController.java b/dinky-admin/src/main/java/org/dinky/controller/SchedulerController.java index 9cbbfa3ddf..3c15894348 100644 --- a/dinky-admin/src/main/java/org/dinky/controller/SchedulerController.java +++ b/dinky-admin/src/main/java/org/dinky/controller/SchedulerController.java @@ -20,41 +20,22 @@ package org.dinky.controller; import org.dinky.data.enums.Status; -import org.dinky.data.model.Catalogue; -import org.dinky.data.model.SystemConfiguration; import org.dinky.data.result.Result; -import org.dinky.init.SystemInit; -import org.dinky.scheduler.client.ProcessClient; -import org.dinky.scheduler.client.TaskClient; -import org.dinky.scheduler.enums.ReleaseState; -import org.dinky.scheduler.exception.SchedulerException; -import org.dinky.scheduler.model.DagData; -import org.dinky.scheduler.model.DinkyTaskParams; -import org.dinky.scheduler.model.ProcessDefinition; -import org.dinky.scheduler.model.Project; +import org.dinky.scheduler.model.DinkyTaskRequest; import org.dinky.scheduler.model.TaskDefinition; import org.dinky.scheduler.model.TaskMainInfo; -import org.dinky.scheduler.model.TaskRequest; -import org.dinky.service.CatalogueService; +import org.dinky.service.SchedulerService; import java.util.List; -import javax.validation.Valid; - import org.springframework.web.bind.annotation.GetMapping; import org.springframework.web.bind.annotation.PostMapping; -import org.springframework.web.bind.annotation.PutMapping; import org.springframework.web.bind.annotation.RequestBody; import org.springframework.web.bind.annotation.RequestMapping; import org.springframework.web.bind.annotation.RequestParam; import org.springframework.web.bind.annotation.RestController; -import com.baomidou.mybatisplus.core.conditions.query.LambdaQueryWrapper; -import com.google.common.base.Strings; - -import cn.hutool.json.JSONArray; -import cn.hutool.json.JSONObject; -import cn.hutool.json.JSONUtil; +import cn.hutool.core.collection.CollUtil; import io.swagger.annotations.Api; import io.swagger.annotations.ApiImplicitParam; import io.swagger.annotations.ApiOperation; @@ -67,13 +48,12 @@ @RequiredArgsConstructor public class SchedulerController { - public static final String TASK_TYPE = "DINKY"; - private final ProcessClient processClient; - private final TaskClient taskClient; - private final CatalogueService catalogueService; + private final SchedulerService schedulerService; - /** 获取任务定义 */ - @GetMapping("/task") + /** + * get task definition + */ + @GetMapping("/queryTaskDefinition") @ApiOperation("Get Task Definition") @ApiImplicitParam( name = "dinkyTaskId", @@ -83,37 +63,17 @@ public class SchedulerController { paramType = "query", example = "1") public Result getTaskDefinition(@ApiParam(value = "dinky任务id") @RequestParam Long dinkyTaskId) { - Catalogue catalogue = - catalogueService.getOne(new LambdaQueryWrapper().eq(Catalogue::getTaskId, dinkyTaskId)); - if (catalogue == null) { - return Result.failed(Status.DS_GET_NODE_LIST_ERROR); - } - - Project dinkyProject = SystemInit.getProject(); - long projectCode = dinkyProject.getCode(); - - String processName = getDinkyNames(catalogue, 0); - String taskName = catalogue.getName() + ":" + catalogue.getId(); - TaskMainInfo taskMainInfo = taskClient.getTaskMainInfo(projectCode, processName, taskName); - TaskDefinition taskDefinition = null; - if (taskMainInfo == null) { - return Result.succeed(taskDefinition); - } - - taskDefinition = taskClient.getTaskDefinition(projectCode, taskMainInfo.getTaskCode()); - if (taskDefinition == null) { - return Result.failed(Status.DS_WORK_FLOW_NOT_SAVE); + TaskDefinition taskDefinitionInfo = schedulerService.getTaskDefinitionInfo(dinkyTaskId); + if (taskDefinitionInfo == null) { + return Result.failed(Status.DS_TASK_NOT_EXIST); } - - taskDefinition.setProcessDefinitionCode(taskMainInfo.getProcessDefinitionCode()); - taskDefinition.setProcessDefinitionName(taskMainInfo.getProcessDefinitionName()); - taskDefinition.setProcessDefinitionVersion(taskMainInfo.getProcessDefinitionVersion()); - taskDefinition.setUpstreamTaskMap(taskMainInfo.getUpstreamTaskMap()); - return Result.succeed(taskDefinition); + return Result.succeed(taskDefinitionInfo); } - /** 获取前置任务定义集合 */ - @GetMapping("/upstream/tasks") + /** + * query upstream task + */ + @GetMapping("/queryUpstreamTasks") @ApiOperation("Get Upstream Task Definition") @ApiImplicitParam( name = "dinkyTaskId", @@ -123,134 +83,22 @@ public Result getTaskDefinition(@ApiParam(value = "dinky任务id paramType = "query", example = "1") public Result> getTaskMainInfos(@ApiParam(value = "dinky任务id") @RequestParam Long dinkyTaskId) { - Catalogue catalogue = - catalogueService.getOne(new LambdaQueryWrapper().eq(Catalogue::getTaskId, dinkyTaskId)); - if (catalogue == null) { - return Result.failed(Status.DS_GET_NODE_LIST_ERROR); + List taskMainInfos = schedulerService.getTaskMainInfos(dinkyTaskId); + if (CollUtil.isEmpty(taskMainInfos)) { + return Result.failed(); } - - long projectCode = SystemInit.getProject().getCode(); - String processName = getDinkyNames(catalogue, 0); - List taskMainInfos = taskClient.getTaskMainInfos(projectCode, processName, ""); - // 去掉本身 - taskMainInfos.removeIf(taskMainInfo -> - (catalogue.getName() + ":" + catalogue.getId()).equalsIgnoreCase(taskMainInfo.getTaskName())); - return Result.succeed(taskMainInfos); } - /** 创建任务定义 */ - @PostMapping("/task") - @ApiOperation("Create Task Definition") - public Result createTaskDefinition( - @ApiParam(value = "前置任务编号 列表") @RequestParam(required = false) List upstreamCodes, - @ApiParam(value = "dinky任务id") @RequestParam Long dinkyTaskId, - @Valid @RequestBody TaskRequest taskRequest) { - DinkyTaskParams dinkyTaskParams = new DinkyTaskParams(); - dinkyTaskParams.setTaskId(dinkyTaskId.toString()); - dinkyTaskParams.setAddress( - SystemConfiguration.getInstances().getDinkyAddr().getValue()); - taskRequest.setTaskParams(JSONUtil.parseObj(dinkyTaskParams).toString()); - taskRequest.setTaskType(TASK_TYPE); - - Catalogue catalogue = - catalogueService.getOne(new LambdaQueryWrapper().eq(Catalogue::getTaskId, dinkyTaskId)); - if (catalogue == null) { - return Result.failed(Status.DS_GET_NODE_LIST_ERROR); + /** + * create or update + */ + @PostMapping("/createOrUpdateTaskDefinition") + @ApiOperation("Create or Update Task Definition") + public Result createOrUpdateTaskDefinition(@RequestBody DinkyTaskRequest dinkyTaskRequest) { + if (schedulerService.pushAddTask(dinkyTaskRequest)) { + return Result.succeed(Status.DS_ADD_TASK_DEFINITION_SUCCESS); } - - String processName = getDinkyNames(catalogue, 0); - long projectCode = SystemInit.getProject().getCode(); - ProcessDefinition process = processClient.getProcessDefinitionInfo(projectCode, processName); - - String taskName = catalogue.getName() + ":" + catalogue.getId(); - taskRequest.setName(taskName); - if (process == null) { - Long taskCode = taskClient.genTaskCode(projectCode); - taskRequest.setCode(taskCode); - JSONObject jsonObject = JSONUtil.parseObj(taskRequest); - JSONArray array = new JSONArray(); - array.set(jsonObject); - processClient.createProcessDefinition(projectCode, processName, taskCode, array.toString()); - return Result.succeed(Status.DS_ADD_WORK_FLOW_DEFINITION_SUCCESS); - } - - if (process.getReleaseState() == ReleaseState.ONLINE) { - return Result.failed(Status.DS_WORK_FLOW_DEFINITION_ONLINE, (Object) processName); - } - - TaskMainInfo taskDefinitionInfo = taskClient.getTaskMainInfo(projectCode, processName, taskName); - if (taskDefinitionInfo != null) { - return Result.failed(Status.DS_WORK_FLOW_DEFINITION_TASK_NAME_EXIST, processName, taskName); - } - - Long taskCode = taskClient.genTaskCode(projectCode); - taskRequest.setCode(taskCode); - - String taskDefinitionJsonObj = JSONUtil.toJsonStr(taskRequest); - taskClient.createTaskDefinition(projectCode, process.getCode(), upstreamCodes, taskDefinitionJsonObj); - return Result.succeed(Status.DS_ADD_TASK_DEFINITION_SUCCESS); } - - /** 更新任务定义 */ - @PutMapping("/task") - @ApiOperation("Update Task Definition") - public Result updateTaskDefinition( - @ApiParam(value = "项目编号") @RequestParam long projectCode, - @ApiParam(value = "工作流定义编号") @RequestParam long processCode, - @ApiParam(value = "任务定义编号") @RequestParam long taskCode, - @ApiParam(value = "前置任务编号 ") @RequestParam(required = false) List upstreamCodes, - @Valid @RequestBody TaskRequest taskRequest) { - - TaskDefinition taskDefinition = taskClient.getTaskDefinition(projectCode, taskCode); - if (taskDefinition == null) { - return Result.failed(Status.DS_TASK_NOT_EXIST); - } - - if (!TASK_TYPE.equals(taskDefinition.getTaskType())) { - return Result.failed(Status.DS_TASK_TYPE_NOT_SUPPORT, (Object) taskDefinition.getTaskType()); - } - - DagData dagData = processClient.getProcessDefinitionInfo(projectCode, processCode); - if (dagData == null) { - return Result.failed(Status.DS_WORK_FLOW_DEFINITION_NOT_EXIST); - } - - ProcessDefinition process = dagData.getProcessDefinition(); - if (process == null) { - return Result.failed(Status.DS_WORK_FLOW_DEFINITION_NOT_EXIST); - } - - if (process.getReleaseState() == ReleaseState.ONLINE) { - return Result.failed(Status.DS_WORK_FLOW_DEFINITION_ONLINE, (Object) process.getName()); - } - - taskRequest.setName(taskDefinition.getName()); - taskRequest.setTaskParams(taskDefinition.getTaskParams()); - taskRequest.setTaskType(TASK_TYPE); - - String taskDefinitionJsonObj = JSONUtil.toJsonStr(taskRequest); - taskClient.updateTaskDefinition(projectCode, taskCode, upstreamCodes, taskDefinitionJsonObj); - return Result.succeed(Status.MODIFY_SUCCESS); - } - - private String getDinkyNames(Catalogue catalogue, int i) { - if (i == 3 || catalogue.getParentId().equals(0)) { - return ""; - } - - catalogue = catalogueService.getById(catalogue.getParentId()); - if (catalogue == null) { - throw new SchedulerException("Get Node List Error"); - } - - String name = i == 0 ? catalogue.getName() + ":" + catalogue.getId() : catalogue.getName(); - String next = getDinkyNames(catalogue, ++i); - - if (Strings.isNullOrEmpty(next)) { - return name; - } - return name + "_" + next; - } } diff --git a/dinky-admin/src/main/java/org/dinky/service/SchedulerService.java b/dinky-admin/src/main/java/org/dinky/service/SchedulerService.java new file mode 100644 index 0000000000..54b04fc042 --- /dev/null +++ b/dinky-admin/src/main/java/org/dinky/service/SchedulerService.java @@ -0,0 +1,64 @@ +/* + * + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +package org.dinky.service; + +import org.dinky.scheduler.model.DinkyTaskRequest; +import org.dinky.scheduler.model.TaskDefinition; +import org.dinky.scheduler.model.TaskMainInfo; + +import java.util.List; + +public interface SchedulerService { + + /** + * Pushes the specified DinkyTaskRequest to the task queue. + * + * @param dinkyTaskRequest the DinkyTaskRequest to be added to the task queue + * @return true if the task was successfully added, false otherwise + */ + boolean pushAddTask(DinkyTaskRequest dinkyTaskRequest); + + /** + * A description of the entire Java function. + * + * @param projectCode description of parameter + * @param processCode description of parameter + * @param taskCode description of parameter + * @param dinkyTaskRequest description of parameter + * @return description of return value + */ + boolean pushUpdateTask(long projectCode, long processCode, long taskCode, DinkyTaskRequest dinkyTaskRequest); + + /** + * Retrieves a list of TaskMainInfo objects based on the provided dinkyTaskId. + * + * @param dinkyTaskId the ID of the task + * @return a list of TaskMainInfo objects + */ + List getTaskMainInfos(long dinkyTaskId); + + /** + * Retrieves the task definition information for a given dinky task ID. + * + * @param dinkyTaskId the ID of the dinky task + * @return the task definition information + */ + TaskDefinition getTaskDefinitionInfo(long dinkyTaskId); +} diff --git a/dinky-admin/src/main/java/org/dinky/service/impl/SchedulerServiceImpl.java b/dinky-admin/src/main/java/org/dinky/service/impl/SchedulerServiceImpl.java new file mode 100644 index 0000000000..bb3b76739b --- /dev/null +++ b/dinky-admin/src/main/java/org/dinky/service/impl/SchedulerServiceImpl.java @@ -0,0 +1,284 @@ +/* + * + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +package org.dinky.service.impl; + +import org.dinky.data.enums.Status; +import org.dinky.data.exception.BusException; +import org.dinky.data.model.Catalogue; +import org.dinky.data.model.SystemConfiguration; +import org.dinky.init.SystemInit; +import org.dinky.scheduler.client.ProcessClient; +import org.dinky.scheduler.client.TaskClient; +import org.dinky.scheduler.enums.ReleaseState; +import org.dinky.scheduler.exception.SchedulerException; +import org.dinky.scheduler.model.DagData; +import org.dinky.scheduler.model.DinkyTaskParams; +import org.dinky.scheduler.model.DinkyTaskRequest; +import org.dinky.scheduler.model.ProcessDefinition; +import org.dinky.scheduler.model.Project; +import org.dinky.scheduler.model.TaskDefinition; +import org.dinky.scheduler.model.TaskMainInfo; +import org.dinky.scheduler.model.TaskRequest; +import org.dinky.service.CatalogueService; +import org.dinky.service.SchedulerService; + +import java.util.List; + +import org.springframework.stereotype.Service; + +import com.baomidou.mybatisplus.core.conditions.query.LambdaQueryWrapper; +import com.google.common.base.Strings; + +import cn.hutool.core.bean.BeanUtil; +import cn.hutool.json.JSONArray; +import cn.hutool.json.JSONObject; +import cn.hutool.json.JSONUtil; +import lombok.RequiredArgsConstructor; +import lombok.extern.slf4j.Slf4j; + +@Service +@Slf4j +@RequiredArgsConstructor +public class SchedulerServiceImpl implements SchedulerService { + + public static final String TASK_TYPE = "DINKY"; + private final ProcessClient processClient; + private final TaskClient taskClient; + private final CatalogueService catalogueService; + + /** + * Pushes the specified DinkyTaskRequest to the task queue. + * + * @param dinkyTaskRequest the DinkyTaskRequest to be added to the task queue + * @return true if the task was successfully added, false otherwise + */ + @Override + public boolean pushAddTask(DinkyTaskRequest dinkyTaskRequest) { + + DinkyTaskParams dinkyTaskParams = new DinkyTaskParams(); + dinkyTaskParams.setTaskId(dinkyTaskRequest.getTaskId()); + dinkyTaskParams.setAddress( + SystemConfiguration.getInstances().getDinkyAddr().getValue()); + dinkyTaskRequest.setTaskParams(JSONUtil.parseObj(dinkyTaskParams).toString()); + dinkyTaskRequest.setTaskType(TASK_TYPE); + + Catalogue catalogue = catalogueService.getOne( + new LambdaQueryWrapper().eq(Catalogue::getTaskId, dinkyTaskRequest.getTaskId())); + if (catalogue == null) { + log.error(Status.DS_GET_NODE_LIST_ERROR.getMessage()); + throw new BusException(Status.DS_GET_NODE_LIST_ERROR); + } + + String processName = getDinkyNames(catalogue, 0); + long projectCode = SystemInit.getProject().getCode(); + ProcessDefinition process = processClient.getProcessDefinitionInfo(projectCode, processName); + + String taskName = catalogue.getName() + ":" + catalogue.getId(); + dinkyTaskRequest.setName(taskName); + + TaskRequest taskRequest = new TaskRequest(); + + if (process == null) { + Long taskCode = taskClient.genTaskCode(projectCode); + dinkyTaskRequest.setCode(taskCode); + BeanUtil.copyProperties(dinkyTaskRequest, taskRequest); + taskRequest.setTimeoutFlag(dinkyTaskRequest.getTimeoutFlag()); + taskRequest.setFlag(dinkyTaskRequest.getFlag()); + JSONObject jsonObject = JSONUtil.parseObj(taskRequest); + JSONArray array = new JSONArray(); + array.set(jsonObject); + processClient.createProcessDefinition(projectCode, processName, taskCode, array.toString()); + log.info(Status.DS_ADD_WORK_FLOW_DEFINITION_SUCCESS.getMessage()); + } + + if (process != null && process.getReleaseState() == ReleaseState.ONLINE) { + log.error(Status.DS_WORK_FLOW_DEFINITION_ONLINE.getMessage(), processName); + } + + TaskMainInfo taskMainInfo = taskClient.getTaskMainInfo(projectCode, processName, taskName, "DINKY"); + if (taskMainInfo != null) { + // if task name exist, update task definition + log.warn(Status.DS_WORK_FLOW_DEFINITION_TASK_NAME_EXIST.getMessage(), processName, taskName); + return pushUpdateTask( + projectCode, taskMainInfo.getProcessDefinitionCode(), taskMainInfo.getTaskCode(), dinkyTaskRequest); + } + + Long taskCode = taskClient.genTaskCode(projectCode); + dinkyTaskRequest.setCode(taskCode); + BeanUtil.copyProperties(dinkyTaskRequest, taskRequest); + taskRequest.setTimeoutFlag(dinkyTaskRequest.getTimeoutFlag()); + taskRequest.setFlag(dinkyTaskRequest.getFlag()); + String taskDefinitionJsonObj = JSONUtil.toJsonStr(taskRequest); + if (process != null) { + taskClient.createTaskDefinition( + projectCode, process.getCode(), dinkyTaskRequest.getUpstreamCodes(), taskDefinitionJsonObj); + log.info(Status.DS_ADD_TASK_DEFINITION_SUCCESS.getMessage()); + return true; + } + return false; + } + + /** + * Pushes an update task to the API. + * + * @param projectCode the project code + * @param processCode the process code + * @param taskCode the task code + * @param dinkyTaskRequest the DinkyTaskRequest object containing task details + * @return true if the task is successfully updated, false otherwise + */ + @Override + public boolean pushUpdateTask( + long projectCode, long processCode, long taskCode, DinkyTaskRequest dinkyTaskRequest) { + TaskDefinition taskDefinition = taskClient.getTaskDefinition(projectCode, taskCode); + if (taskDefinition == null) { + log.error(Status.DS_TASK_NOT_EXIST.getMessage()); + throw new BusException(Status.DS_TASK_NOT_EXIST); + } + + if (!TASK_TYPE.equals(taskDefinition.getTaskType())) { + log.error(Status.DS_TASK_TYPE_NOT_SUPPORT.getMessage(), taskDefinition.getTaskType()); + throw new BusException(Status.DS_TASK_TYPE_NOT_SUPPORT, taskDefinition.getTaskType()); + } + + DagData dagData = processClient.getProcessDefinitionInfo(projectCode, processCode); + if (dagData == null) { + log.error(Status.DS_WORK_FLOW_DEFINITION_NOT_EXIST.getMessage()); + throw new BusException(Status.DS_WORK_FLOW_DEFINITION_NOT_EXIST); + } + + ProcessDefinition process = dagData.getProcessDefinition(); + if (process == null) { + log.error(Status.DS_WORK_FLOW_DEFINITION_NOT_EXIST.getMessage()); + throw new BusException(Status.DS_WORK_FLOW_DEFINITION_NOT_EXIST); + } + + if (process.getReleaseState() == ReleaseState.ONLINE) { + log.error(Status.DS_WORK_FLOW_DEFINITION_ONLINE.getMessage(), process.getName()); + throw new BusException(Status.DS_WORK_FLOW_DEFINITION_ONLINE, process.getName()); + } + TaskRequest taskRequest = new TaskRequest(); + + dinkyTaskRequest.setName(taskDefinition.getName()); + dinkyTaskRequest.setTaskParams(taskDefinition.getTaskParams()); + dinkyTaskRequest.setTaskType(TASK_TYPE); + BeanUtil.copyProperties(dinkyTaskRequest, taskRequest); + taskRequest.setTimeoutFlag(dinkyTaskRequest.getTimeoutFlag()); + taskRequest.setFlag(dinkyTaskRequest.getFlag()); + + String taskDefinitionJsonObj = JSONUtil.toJsonStr(taskRequest); + Long updatedTaskDefinition = taskClient.updateTaskDefinition( + projectCode, taskCode, dinkyTaskRequest.getUpstreamCodes(), taskDefinitionJsonObj); + if (updatedTaskDefinition != null && updatedTaskDefinition > 0) { + log.info(Status.MODIFY_SUCCESS.getMessage()); + return true; + } + log.error(Status.MODIFY_FAILED.getMessage()); + return false; + } + + /** + * Retrieves the list of TaskMainInfo objects for a given dinkyTaskId. + * + * @param dinkyTaskId the id of the dinky task + * @return the list of TaskMainInfo objects + */ + @Override + public List getTaskMainInfos(long dinkyTaskId) { + Catalogue catalogue = + catalogueService.getOne(new LambdaQueryWrapper().eq(Catalogue::getTaskId, dinkyTaskId)); + if (catalogue == null) { + log.error(Status.DS_GET_NODE_LIST_ERROR.getMessage()); + throw new BusException(Status.DS_GET_NODE_LIST_ERROR); + } + long projectCode = SystemInit.getProject().getCode(); + List taskMainInfos = taskClient.getTaskMainInfos(projectCode, "", "", ""); + // 去掉本身 + taskMainInfos.removeIf(taskMainInfo -> + (catalogue.getName() + ":" + catalogue.getId()).equalsIgnoreCase(taskMainInfo.getTaskName())); + return taskMainInfos; + } + + /** + * Retrieves the task definition information for a given dinkyTaskId. + * + * @param dinkyTaskId the ID of the dinky task + * @return the task definition information + */ + @Override + public TaskDefinition getTaskDefinitionInfo(long dinkyTaskId) { + Catalogue catalogue = + catalogueService.getOne(new LambdaQueryWrapper().eq(Catalogue::getTaskId, dinkyTaskId)); + if (catalogue == null) { + log.error(Status.DS_GET_NODE_LIST_ERROR.getMessage()); + throw new BusException(Status.DS_GET_NODE_LIST_ERROR); + } + + Project dinkyProject = SystemInit.getProject(); + long projectCode = dinkyProject.getCode(); + + String processName = getDinkyNames(catalogue, 0); + String taskName = catalogue.getName() + ":" + catalogue.getId(); + TaskMainInfo taskMainInfo = taskClient.getTaskMainInfo(projectCode, processName, taskName, "DINKY"); + TaskDefinition taskDefinition = null; + if (taskMainInfo == null) { + log.error(Status.DS_WORK_FLOW_DEFINITION_TASK_NAME_EXIST.getMessage(), processName, taskName); + throw new BusException(Status.DS_WORK_FLOW_DEFINITION_TASK_NAME_EXIST, processName, taskName); + } + + taskDefinition = taskClient.getTaskDefinition(projectCode, taskMainInfo.getTaskCode()); + if (taskDefinition == null) { + log.error(Status.DS_WORK_FLOW_NOT_SAVE.getMessage()); + throw new BusException(Status.DS_WORK_FLOW_NOT_SAVE); + } + + taskDefinition.setProcessDefinitionCode(taskMainInfo.getProcessDefinitionCode()); + taskDefinition.setProcessDefinitionName(taskMainInfo.getProcessDefinitionName()); + taskDefinition.setProcessDefinitionVersion(taskMainInfo.getProcessDefinitionVersion()); + taskDefinition.setUpstreamTaskMap(taskMainInfo.getUpstreamTaskMap()); + return taskDefinition; + } + + /** + * Retrieves the dinky names from the given catalogue and index. + * + * @param catalogue the catalogue object to retrieve the names from + * @param i the index to start retrieving the names from + * @return the dinky names retrieved from the catalogue + */ + private String getDinkyNames(Catalogue catalogue, int i) { + if (i == 3 || catalogue.getParentId().equals(0)) { + return ""; + } + + catalogue = catalogueService.getById(catalogue.getParentId()); + if (catalogue == null) { + throw new SchedulerException("Get Node List Error"); + } + + String name = i == 0 ? catalogue.getName() + ":" + catalogue.getId() : catalogue.getName(); + String next = getDinkyNames(catalogue, ++i); + + if (Strings.isNullOrEmpty(next)) { + return name; + } + return name + "_" + next; + } +} diff --git a/dinky-common/src/main/java/org/dinky/data/exception/BusException.java b/dinky-common/src/main/java/org/dinky/data/exception/BusException.java index 7cfc2407ac..12fbe3f98e 100644 --- a/dinky-common/src/main/java/org/dinky/data/exception/BusException.java +++ b/dinky-common/src/main/java/org/dinky/data/exception/BusException.java @@ -22,6 +22,7 @@ import org.dinky.data.enums.Status; import cn.hutool.core.exceptions.ExceptionUtil; +import cn.hutool.core.util.StrUtil; import lombok.Getter; import lombok.Setter; import lombok.extern.slf4j.Slf4j; @@ -58,6 +59,12 @@ public BusException(Status status) { setMsg(status.getMessage()); } + public BusException(Status status, Object... errorArgs) { + super(status.getMessage()); + setCode(String.valueOf(status.getCode())); + setMsg(StrUtil.format(status.getMessage(), errorArgs)); + } + public BusException(String message, Object... args) { super(); setCode(message); diff --git a/dinky-common/src/main/resources/i18n/messages_en_US.properties b/dinky-common/src/main/resources/i18n/messages_en_US.properties index d03793581e..d58dddabf9 100644 --- a/dinky-common/src/main/resources/i18n/messages_en_US.properties +++ b/dinky-common/src/main/resources/i18n/messages_en_US.properties @@ -109,7 +109,7 @@ success=Successfully tenant.not.exist=Tenant Not Exist user.already.exists=User Already Exists git.building=Git Building -ds.work.flow.definition.task.name.exist=Add Failed, Workflow Definition [{}] Already Exists Task Definition [{}] Please Refresh +ds.work.flow.definition.task.name.exist=Workflow Definition [{}] Already Exists Task Definition [{}] , Will Update Operation role.already.exists=Role Already Exists internal.server.error.args=Internal Server Error: {0} kick.out=token has been kicked offline @@ -160,23 +160,23 @@ daemon.task.not.support=threaded task types are notsupported sys.flink.settings.useRestAPI=Use Rest API sys.flink.settings.useRestAPI.note=Whether to use RestAPI when operating and maintaining Flink tasks sys.flink.settings.sqlSeparator=SQL Separator -sys.flink.settings.sqlSeparator.note=SQL Separator +sys.flink.settings.sqlSeparator.note=please note: The default delimiter is ';', if your SQL contains ';', please modify this configuration to other characters, for example: ';\\n', please ignore the single quotes!!!! sys.flink.settings.jobIdWait=Job submission waiting time sys.flink.settings.jobIdWait.note=Maximum waiting time (seconds) for obtaining Job ID when submitting Application or PerJob tasks sys.maven.settings.settingsFilePath=Maven configuration file path -sys.maven.settings.settingsFilePath.note=Maven configuration file path, eg +sys.maven.settings.settingsFilePath.note=Maven configuration file path, eg: /opt/maven-3.9.2/conf/settings.xml, please note: By default, the MAVEN_HOME environment variables of the host will be obtained, so there is no need to fill in here. If there is no configuration, please fill in the absolute path. sys.maven.settings.repository=Maven warehouse address sys.maven.settings.repository.note=Maven warehouse address sys.maven.settings.repositoryUser=Maven warehouse user name -sys.maven.settings.repositoryUser.note=Maven private server authentication user name +sys.maven.settings.repositoryUser.note=Maven private server authentication user name, if you need to configure Maven private server warehouse authentication information, please fill in this sys.maven.settings.repositoryPassword=Maven warehouse password -sys.maven.settings.repositoryPassword.note=Maven private server authentication password, please note +sys.maven.settings.repositoryPassword.note=Maven private server authentication password, if you need to configure Maven private server warehouse authentication information, please fill in this item sys.env.settings.pythonHome=Python Env variables -sys.env.settings.pythonHome.note=Python Env variables +sys.env.settings.pythonHome.note=Python environment variables, used to submit Python tasks and build Python Udf sys.env.settings.dinkyAddr=Dinky Address sys.env.settings.dinkyAddr.note=The address must be the same as the address configured in the Dinky Application background url sys.dolphinscheduler.settings.enable=Whether to enable DolphinScheduler -sys.dolphinscheduler.settings.enable.note=Whether to enable DolphinScheduler, the relevant functions of DolphinScheduler can only be used after it is enabled, please ensure that the relevant configuration of DolphinScheduler is correct +sys.dolphinscheduler.settings.enable.note=Whether to enable DolphinScheduler. Only after enabling it can you use the related functions of DolphinScheduler. Please fill in the following configuration items first, and then enable this configuration after completion. Also: Please ensure that the related configurations of DolphinScheduler are correct. sys.dolphinscheduler.settings.url=DolphinScheduler address sys.dolphinscheduler.settings.url.note=The address must be consistent with the address configured in the DolphinScheduler background, eg: http://127.0.0.1:12345/dolphinscheduler sys.dolphinscheduler.settings.token=DolphinScheduler Token diff --git a/dinky-common/src/main/resources/i18n/messages_zh_CN.properties b/dinky-common/src/main/resources/i18n/messages_zh_CN.properties index 24b5b2b71d..31263ba022 100644 --- a/dinky-common/src/main/resources/i18n/messages_zh_CN.properties +++ b/dinky-common/src/main/resources/i18n/messages_zh_CN.properties @@ -1,256 +1,256 @@ -test.connection.success=\u6D4B\u8BD5\u8FDE\u63A5\u6210\u529F -assign.menu.failed=\u5206\u914D\u83DC\u5355\u5931\u8D25 -ldap.user.autoload.forbaid=\u672A\u5F00\u542F\u81EA\u52A8\u6620\u5C04LDAP\u7528\u6237\uFF0C\u8BF7\u8054\u7CFB\u7BA1\u7406\u5458\u5BFC\u5165 -cluster.instance.recycle.success=\u56DE\u6536\u6210\u529F -execute.failed=\u6267\u884C\u5931\u8D25 -ldap.user.duplicat=ldap\u5339\u914D\u5230\u591A\u4E2A\u7528\u6237\u6570\u636E -git.branch.not.found=\u83B7\u53D6\u4E0D\u5230\u5206\u652F\u4FE1\u606F -copy.success=\u590D\u5236\u6210\u529F -user.superadmin.cannot.disable=\u8D85\u7EA7\u7BA1\u7406\u5458\u7528\u6237\u4E0D\u80FD\u505C\u7528 -ds.work.flow.not.save=\u8BF7\u5148\u4FDD\u5B58\u5DE5\u4F5C\u6D41 -schedule.status.unknown=\u672A\u77E5\u72B6\u6001: {0} -user.binding.role.delete.all=\u7528\u6237\u7ED1\u5B9A\u89D2\u8272\u5220\u9664\u6240\u6709 -modify.failed=\u4FEE\u6539\u5931\u8D25 -git.build.success=\u9884\u66F4\u65B0\u72B6\u6001\u6210\u529F,\u5F00\u59CB\u6267\u884C\u6784\u5EFA\u6D41\u7A0B -menu.has.child=\u5B58\u5728\u5B50\u83DC\u5355 \u4E0D\u5141\u8BB8\u5220\u9664 -tenant.already.exists=\u79DF\u6237\u5DF2\u5B58\u5728 -save.failed=\u4FDD\u5B58\u5931\u8D25 -assign.menu.success=\u5206\u914D\u83DC\u5355\u6210\u529F -user.disabled.by.admin=\u5F53\u524D\u7528\u6237\u5DF2\u88AB\u7BA1\u7406\u5458\u505C\u7528 -select.menu=\u8BF7\u9009\u62E9\u83DC\u5355 -role.not.exist=\u89D2\u8272\u4E0D\u5B58\u5728 -delete.success=\u5220\u9664\u6210\u529F -clear.success=\u6E05\u9664\u6210\u529F -move.success=\u79FB\u52A8\u6210\u529F -ldap.login.forbid=\u5F53\u524D\u7528\u6237\u767B\u5F55\u6A21\u5F0F\u4E0D\u662FLDAP\uFF0C\u8BF7\u8054\u7CFB\u7BA1\u7406\u5458\u4FEE\u6539,\u6216\u4E0D\u4F7F\u7528LDAP\u767B\u5F55 -request.params.not.valid.error=\u8BF7\u6C42\u53C2\u6570[{0}]\u65E0\u6548 -change.password.failed=\u4FEE\u6539\u5BC6\u7801\u5931\u8D25 -menu.name.exist=\u83DC\u5355\u5DF2\u5B58\u5728 -ds.task.type.not.support=\u6D77\u8C5A\u8C03\u5EA6\u7C7B\u578B\u4E3A [{}] \u4E0D\u652F\u6301,\u975EDINKY\u7C7B\u578B -datasource.connect.normal=\u6570\u636E\u6E90\u8FDE\u63A5\u6B63\u5E38 -restart.success=\u91CD\u542F\u6210\u529F -test.msg.job.log.url=\u70B9\u51FB\u67E5\u770B\u8BE5\u4EFB\u52A1\u7684\u5F02\u5E38\u65E5\u5FD7 -user.assign.role.success=\u7528\u6237\u5206\u914D\u89D2\u8272\u6210\u529F -global.params.check.error.value=\u5B57\u6BB5: {0}, \u4E0D\u5408\u6CD5\u7684\u503C: {1} -change.password.success=\u4FEE\u6539\u5BC6\u7801\u6210\u529F -user.not.exist=\u7528\u6237\u4E0D\u5B58\u5728 -refresh.success=\u5237\u65B0\u6210\u529F -ds.get.node.list.error=\u8282\u70B9\u83B7\u53D6\u5931\u8D25 -ldap.default.tenant.nofound=LDAP\u9ED8\u8BA4\u79DF\u6237\u4E0D\u5B58\u5728 -copy.failed=\u590D\u5236\u5931\u8D25 -folder.not.empty=\u8BE5\u76EE\u5F55\u4E0B\u5B58\u5728\u5B50\u76EE\u5F55/\u4F5C\u4E1A\uFF0C\u65E0\u6CD5\u5220\u9664 -be.replaced=token \u5DF2\u88AB\u9876\u4E0B\u7EBF -datasource.connect.success=\u6570\u636E\u6E90\u8FDE\u63A5\u6D4B\u8BD5\u6210\u529F -sign.out.success=\u9000\u51FA\u6210\u529F -added.success=\u65B0\u589E\u6210\u529F -tenant.binding.user=\u5220\u9664\u79DF\u6237\u5931\u8D25\uFF0C\u8BE5\u79DF\u6237\u5DF2\u7ED1\u5B9A\u7528\u6237 -send.test.failed=\u6D4B\u8BD5\u4FE1\u606F\u53D1\u9001\u5931\u8D25 -delete.failed=\u5220\u9664\u5931\u8D25 -role.binding.user=\u8BE5\u89D2\u8272\u5DF2\u7ED1\u5B9A\u7528\u6237\uFF0C\u65E0\u6CD5\u5220\u9664 -not.token=\u672A\u80FD\u8BFB\u53D6\u5230\u6709\u6548 Token -execute.success=\u6267\u884C\u6210\u529F -debug.success=\u8C03\u8BD5\u6210\u529F -debug.failed=\u8C03\u8BD5\u5931\u8D25 -publish.success=\u53D1\u5E03\u6210\u529F -publish.failed=\u53D1\u5E03\u5931\u8D25 -offline.success=\u4E0B\u7EBF\u6210\u529F -offline.failed=\u4E0B\u7EBF\u5931\u8D25 -version.rollback.success=\u7248\u672C\u56DE\u6EDA\u6210\u529F -version.rollback.failed=\u7248\u672C\u56DE\u6EDA\u5931\u8D25 -token.freezed=token \u5DF2\u88AB\u51BB\u7ED3 -menu.has.assign=\u83DC\u5355\u5DF2\u5206\u914D,\u4E0D\u5141\u8BB8\u5220\u9664 -datasource.status.refresh.success=\u6570\u636E\u6E90\u72B6\u6001\u5237\u65B0\u6210\u529F -user.not.login=\u7528\u6237\u672A\u767B\u5F55 -tenant.assign.user.failed=\u5206\u914D\u7528\u6237\u5931\u8D25 -stop.success=\u5DF2\u6210\u529F\u505C\u6B62 -move.failed=\u79FB\u52A8\u5931\u8D25 -get.tenant.failed=\u83B7\u53D6\u79DF\u6237\u4FE1\u606F\u5931\u8D25 -send.test.success=\u6D4B\u8BD5\u4FE1\u606F\u53D1\u9001\u6210\u529F -login.success=\u767B\u5F55\u6210\u529F -login.password.not.null=\u767B\u5F55\u5BC6\u7801\u4E0D\u80FD\u4E3A\u7A7A -unknown.error=\u672A\u77E5\u5F02\u5E38: {0} -stop.failed=\u505C\u6B62\u5931\u8D25 -role.name.exist=\u89D2\u8272\u5DF2\u5B58\u5728 -ldap.filter.incorrect=\u7528\u6237\u8FC7\u6EE4\u89C4\u5219\u4E0D\u80FD\u4E3A\u7A7A\uFF0C\u8BF7\u586B\u5199\u76F8\u5173\u914D\u7F6E -tenant.assign.user.success=\u5206\u914D\u7528\u6237\u6210\u529F -ds.add.work.flow.definition.success=\u6DFB\u52A0\u5DE5\u4F5C\u6D41\u5B9A\u4E49\u6210\u529F -expired.token=Token \u5DF2\u8FC7\u671F -refresh.failed=\u5237\u65B0\u5931\u8D25 -operate.success=\u64CD\u4F5C\u6210\u529F -git.project.not.found=\u83B7\u53D6\u4E0D\u5230\u9879\u76EE\u4FE1\u606F -cluster.instance.heartbeat.success=\u96C6\u7FA4\u5B9E\u4F8B\u5FC3\u8DF3\u6210\u529F -ldap.no.user.found=LDAP\u8FDE\u63A5\u6210\u529F\uFF0C\u4F46\u672A\u5339\u914D\u5230\u4EFB\u4F55\u7528\u6237 -login.failure=\u7528\u6237\u767B\u5F55\u5931\u8D25 -request.params.error=\u8BF7\u6C42\u53C2\u6570\u9519\u8BEF -user.not.binding.tenant=\u7528\u6237\u672A\u7ED1\u5B9A\u79DF\u6237 -user.assign.role.failed=\u7528\u6237\u5206\u914D\u89D2\u8272\u5931\u8D25 -rename.failed=\u91CD\u547D\u540D\u5931\u8D25 -test.msg.job.name=\u6D4B\u8BD5\u4EFB\u52A1 -tenant.binding.user.delete.all=\u8BE5\u79DF\u6237\u7ED1\u5B9A\u7684\u7528\u6237\u5DF2\u88AB\u5168\u90E8\u5220\u9664 -menu.not.exist=\u83DC\u5355\u4E0D\u5B58\u5728 -test.msg.job.name.title=\u4EFB\u52A1 -ds.task.not.exist=\u4EFB\u52A1\u4E0D\u5B58\u5728 -global.params.check.error=\u5B57\u6BB5: {0}, {1} -test.msg.title=\u5B9E\u65F6\u544A\u8B66\u76D1\u63A7 -user.name.passwd.error=\u7528\u6237\u540D\u6216\u5BC6\u7801\u4E0D\u6B63\u786E -no.prefix=\u672A\u6309\u7167\u6307\u5B9A\u524D\u7F00\u63D0\u4EA4 token -query.success=\u67E5\u8BE2\u6210\u529F -ds.work.flow.definition.not.exist=\u5DE5\u4F5C\u6D41\u5B9A\u4E49\u4E0D\u5B58\u5728 -tenant.name.exist=\u79DF\u6237\u5DF2\u5B58\u5728 -failed=\u83B7\u53D6\u5931\u8D25 -added.failed=\u65B0\u589E\u5931\u8D25 -task.not.exist=\u4EFB\u52A1\u4E0D\u5B58\u5728 -task.is.online=\u4EFB\u52A1\u5DF2\u4E0A\u7EBF\uFF0C\u7981\u6B62\u4FEE\u6539 -cluster.instance.deploy=\u90E8\u7F72\u5B8C\u6210 -clear.failed=\u6E05\u9664\u5931\u8D25 -rename.success=\u91CD\u547D\u540D\u6210\u529F -job.release.disabled.update=\u4F5C\u4E1A\u5DF2\u53D1\u5E03\uFF0C\u7981\u6B62\u4FEE\u6539 -success=\u83B7\u53D6\u6210\u529F -tenant.not.exist=\u79DF\u6237\u4E0D\u5B58\u5728 -user.already.exists=\u7528\u6237\u540D\u5DF2\u5B58\u5728 -git.building=\u6B64\u4EFB\u52A1\u6B63\u5728\u6784\u5EFA -ds.work.flow.definition.task.name.exist=\u6DFB\u52A0\u5931\u8D25,\u5DE5\u4F5C\u6D41\u5B9A\u4E49 [{}] \u5DF2\u5B58\u5728\u4EFB\u52A1\u5B9A\u4E49 [{}] \u8BF7\u5237\u65B0 -role.already.exists=\u89D2\u8272\u5DF2\u5B58\u5728 -internal.server.error.args=\u670D\u52A1\u7AEF\u5F02\u5E38: {0} -kick.out=token \u5DF2\u88AB\u8E22\u4E0B\u7EBF -restart.failed=\u91CD\u542F\u5931\u8D25 -invalid.token=\u65E0\u6548\u7684 Token -datasource.not.exist=\u6570\u636E\u6E90\u4E0D\u5B58\u5728 -datasource.clear.cache.success=\u6E05\u9664\u5E93\u8868\u7F13\u5B58\u6210\u529F -tenant.admin.already.exists=\u5DF2\u5B58\u5728\u79DF\u6237\u7BA1\u7406\u5458, \u79DF\u6237\u8D85\u7BA1\u53EA\u80FD\u6709\u4E00\u4E2A -ds.work.flow.definition.online=\u5DE5\u4F5C\u6D41\u5B9A\u4E49 [{}] \u5DF2\u7ECF\u4E0A\u7EBF -test.msg.job.url=\u8DF3\u8F6C\u81F3\u8BE5\u4EFB\u52A1 -savepoint.is.null=\u4FDD\u5B58\u70B9\u4E3A\u7A7A -git.sort.success=\u6392\u5E8F\u6210\u529F -ds.add.task.definition.success=\u6DFB\u52A0\u4EFB\u52A1\u5B9A\u4E49\u6210\u529F -alert.group.exist=\u544A\u8B66\u7EC4\u5DF2\u5B58\u5728 -git.sort.failed=\u6392\u5E8F\u5931\u8D25 -query.failed=\u67E5\u8BE2\u5931\u8D25 -save.success=\u4FDD\u5B58\u6210\u529F -cluster.instance.kill=\u5DF2\u6740\u6B7B\u8BE5\u8FDB\u7A0B/\u96C6\u7FA4 -cluster.not.exist=\u96C6\u7FA4\u4E0D\u5B58\u5728 -operate.failed=\u64CD\u4F5C\u5931\u8D25 -test.connection.failed=\u6D4B\u8BD5\u8FDE\u63A5\u5931\u8D25 -switching.tenant.success=\u9009\u62E9\u79DF\u6237\u6210\u529F -tenant.name.not.exist=\u79DF\u6237\u4E0D\u5B58\u5728 -job.instance.not.exist=\u4F5C\u4E1A\u5B9E\u4F8B\u4E0D\u5B58\u5728 -modify.success=\u4FEE\u6539\u6210\u529F -user.old.password.incorrect=\u7528\u6237\u65E7\u5BC6\u7801\u4E0D\u6B63\u786E -ldap.user.incorrect=LDAP\u7528\u6237\u540D\uFF08DN\uFF09\u4E0D\u6B63\u786E -role.binding.row.permission=\u8BE5\u89D2\u8272\u5DF2\u7ED1\u5B9A\u884C\u6743\u9650\uFF0C\u65E0\u6CD5\u5220\u9664 +test.connection.success=测试连接成功 +assign.menu.failed=分配菜单失败 +ldap.user.autoload.forbaid=未开启自动映射LDAP用户,请联系管理员导入 +cluster.instance.recycle.success=回收成功 +execute.failed=执行失败 +ldap.user.duplicat=ldap匹配到多个用户数据 +git.branch.not.found=获取不到分支信息 +copy.success=复制成功 +user.superadmin.cannot.disable=超级管理员用户不能停用 +ds.work.flow.not.save=请先保存工作流 +schedule.status.unknown=未知状态: {0} +user.binding.role.delete.all=用户绑定角色删除所有 +modify.failed=修改失败 +git.build.success=预更新状态成功,开始执行构建流程 +menu.has.child=存在子菜单 不允许删除 +tenant.already.exists=租户已存在 +save.failed=保存失败 +assign.menu.success=分配菜单成功 +user.disabled.by.admin=当前用户已被管理员停用 +select.menu=请选择菜单 +role.not.exist=角色不存在 +delete.success=删除成功 +clear.success=清除成功 +move.success=移动成功 +ldap.login.forbid=当前用户登录模式不是LDAP,请联系管理员修改,或不使用LDAP登录 +request.params.not.valid.error=请求参数[{0}]无效 +change.password.failed=修改密码失败 +menu.name.exist=菜单已存在 +ds.task.type.not.support=海豚调度类型为 [{}] 不支持,非DINKY类型 +datasource.connect.normal=数据源连接正常 +restart.success=重启成功 +test.msg.job.log.url=点击查看该任务的异常日志 +user.assign.role.success=用户分配角色成功 +global.params.check.error.value=字段: {0}, 不合法的值: {1} +change.password.success=修改密码成功 +user.not.exist=用户不存在 +refresh.success=刷新成功 +ds.get.node.list.error=节点获取失败 +ldap.default.tenant.nofound=LDAP默认租户不存在 +copy.failed=复制失败 +folder.not.empty=该目录下存在子目录/作业,无法删除 +be.replaced=token 已被顶下线 +datasource.connect.success=数据源连接测试成功 +sign.out.success=退出成功 +added.success=新增成功 +tenant.binding.user=删除租户失败,该租户已绑定用户 +send.test.failed=测试信息发送失败 +delete.failed=删除失败 +role.binding.user=该角色已绑定用户,无法删除 +not.token=未能读取到有效 Token +execute.success=执行成功 +debug.success=调试成功 +debug.failed=调试失败 +publish.success=发布成功 +publish.failed=发布失败 +offline.success=下线成功 +offline.failed=下线失败 +version.rollback.success=版本回滚成功 +version.rollback.failed=版本回滚失败 +token.freezed=token 已被冻结 +menu.has.assign=菜单已分配,不允许删除 +datasource.status.refresh.success=数据源状态刷新成功 +user.not.login=用户未登录 +tenant.assign.user.failed=分配用户失败 +stop.success=已成功停止 +move.failed=移动失败 +get.tenant.failed=获取租户信息失败 +send.test.success=测试信息发送成功 +login.success=登录成功 +login.password.not.null=登录密码不能为空 +unknown.error=未知异常: {0} +stop.failed=停止失败 +role.name.exist=角色已存在 +ldap.filter.incorrect=用户过滤规则不能为空,请填写相关配置 +tenant.assign.user.success=分配用户成功 +ds.add.work.flow.definition.success=添加工作流定义成功 +expired.token=Token 已过期 +refresh.failed=刷新失败 +operate.success=操作成功 +git.project.not.found=获取不到项目信息 +cluster.instance.heartbeat.success=集群实例心跳成功 +ldap.no.user.found=LDAP连接成功,但未匹配到任何用户 +login.failure=用户登录失败 +request.params.error=请求参数错误 +user.not.binding.tenant=用户未绑定租户 +user.assign.role.failed=用户分配角色失败 +rename.failed=重命名失败 +test.msg.job.name=测试任务 +tenant.binding.user.delete.all=该租户绑定的用户已被全部删除 +menu.not.exist=菜单不存在 +test.msg.job.name.title=任务 +ds.task.not.exist=任务不存在 +global.params.check.error=字段: {0}, {1} +test.msg.title=实时告警监控 +user.name.passwd.error=用户名或密码不正确 +no.prefix=未按照指定前缀提交 token +query.success=查询成功 +ds.work.flow.definition.not.exist=工作流定义不存在 +tenant.name.exist=租户已存在 +failed=获取失败 +added.failed=新增失败 +task.not.exist=任务不存在 +task.is.online=任务已上线,禁止修改 +cluster.instance.deploy=部署完成 +clear.failed=清除失败 +rename.success=重命名成功 +job.release.disabled.update=作业已发布,禁止修改 +success=获取成功 +tenant.not.exist=租户不存在 +user.already.exists=用户名已存在 +git.building=此任务正在构建 +ds.work.flow.definition.task.name.exist=工作流定义 [{}] 已存在任务定义 [{}] ,将进行更新操作 +role.already.exists=角色已存在 +internal.server.error.args=服务端异常: {0} +kick.out=token 已被踢下线 +restart.failed=重启失败 +invalid.token=无效的 Token +datasource.not.exist=数据源不存在 +datasource.clear.cache.success=清除库表缓存成功 +tenant.admin.already.exists=已存在租户管理员, 租户超管只能有一个 +ds.work.flow.definition.online=工作流定义 [{}] 已经上线 +test.msg.job.url=跳转至该任务 +savepoint.is.null=保存点为空 +git.sort.success=排序成功 +ds.add.task.definition.success=添加任务定义成功 +alert.group.exist=告警组已存在 +git.sort.failed=排序失败 +query.failed=查询失败 +save.success=保存成功 +cluster.instance.kill=已杀死该进程/集群 +cluster.not.exist=集群不存在 +operate.failed=操作失败 +test.connection.failed=测试连接失败 +switching.tenant.success=选择租户成功 +tenant.name.not.exist=租户不存在 +job.instance.not.exist=作业实例不存在 +modify.success=修改成功 +user.old.password.incorrect=用户旧密码不正确 +ldap.user.incorrect=LDAP用户名(DN)不正确 +role.binding.row.permission=该角色已绑定行权限,无法删除 # dinky-admin -unknown.i18n=\u672A\u77E5 i18n \u4FE1\u606F,\u8BF7\u68C0\u67E5. . . +unknown.i18n=未知 i18n 信息,请检查. . . -file.upload.failed=\u6587\u4EF6\u4E0A\u4F20\u5931\u8D25, \u539F\u56E0: {0} +file.upload.failed=文件上传失败, 原因: {0} -daemon.task.config.not.exist=\u7EBF\u7A0B\u4EFB\u52A1\u914D\u7F6E\u4E0D\u80FD\u4E3A\u7A7A -daemon.task.not.support=\u4E0D\u652F\u6301\u7EBF\u7A0B\u4EFB\u52A1\u7C7B\u578B\uFF1A +daemon.task.config.not.exist=线程任务配置不能为空 +daemon.task.not.support=不支持线程任务类型: # dinky-alert -alert.rule.jobFail=\u4F5C\u4E1A\u5931\u8D25 -alert.rule.getJobInfoFail=\u83B7\u53D6\u4F5C\u4E1A\u4FE1\u606F\u5931\u8D25 -alert.rule.jobRestart=\u4F5C\u4E1A\u91CD\u542F -alert.rule.checkpointFail=checkpoint\u5931\u8D25 -alert.rule.jobRunException=\u4F5C\u4E1A\u8FD0\u884C\u5F02\u5E38 -alert.rule.checkpointTimeout=checkpoint\u8D85\u65F6 +alert.rule.jobFail=作业失败 +alert.rule.getJobInfoFail=获取作业信息失败 +alert.rule.jobRestart=作业重启 +alert.rule.checkpointFail=checkpoint失败 +alert.rule.jobRunException=作业运行异常 +alert.rule.checkpointTimeout=checkpoint超时 # system config -sys.flink.settings.useRestAPI=\u4F7F\u7528 Rest API -sys.flink.settings.useRestAPI.note=\u5728\u8FD0\u7EF4 Flink \u4EFB\u52A1\u65F6\u662F\u5426\u4F7F\u7528 RestAPI -sys.flink.settings.sqlSeparator=SQL \u5206\u9694\u7B26 -sys.flink.settings.sqlSeparator.note=SQL \u5206\u9694\u7B26 -sys.flink.settings.jobIdWait=Job \u63D0\u4EA4\u7B49\u5F85\u65F6\u95F4 -sys.flink.settings.jobIdWait.note=\u63D0\u4EA4 Application \u6216 PerJob \u4EFB\u52A1\u65F6\u83B7\u53D6 Job ID \u7684\u6700\u5927\u7B49\u5F85\u65F6\u95F4\uFF08\u79D2\uFF09 -sys.maven.settings.settingsFilePath=Maven \u914D\u7F6E\u6587\u4EF6\u8DEF\u5F84 -sys.maven.settings.settingsFilePath.note=Maven \u914D\u7F6E\u6587\u4EF6\u8DEF\u5F84, eg -sys.maven.settings.repository=Maven \u4ED3\u5E93\u5730\u5740 -sys.maven.settings.repository.note=Maven \u4ED3\u5E93\u5730\u5740 -sys.maven.settings.repositoryUser=Maven \u4ED3\u5E93\u7528\u6237\u540D -sys.maven.settings.repositoryUser.note=Maven \u79C1\u670D\u8BA4\u8BC1\u7528\u6237\u540D -sys.maven.settings.repositoryPassword=Maven \u4ED3\u5E93\u5BC6\u7801 -sys.maven.settings.repositoryPassword.note=Maven \u79C1\u670D\u8BA4\u8BC1\u5BC6\u7801,\u8BF7\u6CE8\u610F -sys.env.settings.pythonHome=Python \u73AF\u5883\u53D8\u91CF -sys.env.settings.pythonHome.note=Python \u73AF\u5883\u53D8\u91CF -sys.env.settings.dinkyAddr=Dinky \u5730\u5740 -sys.env.settings.dinkyAddr.note=\u8BE5\u5730\u5740\u5FC5\u987B\u4E0EDinky Application\u540E\u53F0url\u4E2D\u914D\u7F6E\u7684\u5730\u5740\u76F8\u540C -sys.dolphinscheduler.settings.enable=\u662F\u5426\u542F\u7528 DolphinScheduler -sys.dolphinscheduler.settings.enable.note=\u662F\u5426\u542F\u7528 DolphinScheduler ,\u542F\u7528\u540E\u624D\u80FD\u4F7F\u7528 DolphinScheduler \u7684\u76F8\u5173\u529F\u80FD, \u8BF7\u786E\u4FDD DolphinScheduler \u7684\u76F8\u5173\u914D\u7F6E\u6B63\u786E -sys.dolphinscheduler.settings.url=DolphinScheduler \u5730\u5740 -sys.dolphinscheduler.settings.url.note=\u5730\u5740\u5FC5\u987B\u548CDolphinScheduler\u540E\u53F0\u914D\u7F6E\u7684\u5730\u5740\u4E00\u81F4\uFF0Ceg: http://127.0.0.1:12345/dolphinscheduler +sys.flink.settings.useRestAPI=使用 Rest API +sys.flink.settings.useRestAPI.note=在运维 Flink 任务时是否使用 RestAPI +sys.flink.settings.sqlSeparator=SQL 分隔符 +sys.flink.settings.sqlSeparator.note=请注意: 默认分隔符为 ';' ,如果您的 SQL 中包含 ';' ,请修改此项配置为其他字符,例如: ';\\n', 请忽略单引号!!!! +sys.flink.settings.jobIdWait=Job 提交等待时间 +sys.flink.settings.jobIdWait.note=提交 Application 或 PerJob 任务时获取 Job ID 的最大等待时间(秒) +sys.maven.settings.settingsFilePath=Maven 配置文件路径 +sys.maven.settings.settingsFilePath.note=Maven 配置文件路径, eg: /opt/maven-3.9.2/conf/settings.xml ,请注意: 默认情况下会获取所在主机的 MAVEN_HOME 环境变量,则此处无需填写,如果没有配置请填写绝对路径 +sys.maven.settings.repository=Maven 仓库地址 +sys.maven.settings.repository.note=Maven 仓库地址 +sys.maven.settings.repositoryUser=Maven 仓库用户名 +sys.maven.settings.repositoryUser.note=Maven 私服认证用户名,如果需要配置 Maven 私服仓库认证信息,请填写此项 +sys.maven.settings.repositoryPassword=Maven 仓库密码 +sys.maven.settings.repositoryPassword.note=Maven 私服认证密码,如果需要配置 Maven 私服仓库认证信息,请填写此项 +sys.env.settings.pythonHome=Python 环境变量 +sys.env.settings.pythonHome.note=Python 环境变量,用于提交 Python 任务以及构建 Python Udf +sys.env.settings.dinkyAddr=Dinky 地址 +sys.env.settings.dinkyAddr.note=该地址必须与Dinky Application后台url中配置的地址相同 +sys.dolphinscheduler.settings.enable=是否启用 DolphinScheduler +sys.dolphinscheduler.settings.enable.note=是否启用 DolphinScheduler ,启用后才能使用 DolphinScheduler 的相关功能,请先填写下列配置项,完成后再开启此项配置, 另:请确保 DolphinScheduler 的相关配置正确 +sys.dolphinscheduler.settings.url=DolphinScheduler 地址 +sys.dolphinscheduler.settings.url.note=地址必须和DolphinScheduler后台配置的地址一致,eg: http://127.0.0.1:12345/dolphinscheduler sys.dolphinscheduler.settings.token=DolphinScheduler Token -sys.dolphinscheduler.settings.token.note=DolphinScheduler\u7684Token\uFF0C\u8BF7\u5728DolphinScheduler\u7684\u5B89\u5168\u4E2D\u5FC3->\u4EE4\u724C\u7BA1\u7406\u4E2D\u521B\u5EFA\u4E00\u4E2Atoken\uFF0C\u5E76\u586B\u5165\u8BE5\u914D\u7F6E\u4E2D -sys.dolphinscheduler.settings.projectName=DolphinScheduler \u9879\u76EE\u540D -sys.dolphinscheduler.settings.projectName.note=DolphinScheduler \u4E2D\u6307\u5B9A\u7684\u9879\u76EE\u540D\u79F0\uFF0C\u4E0D\u533A\u5206\u5927\u5C0F\u5199 -sys.ldap.settings.url=ldap\u670D\u52A1\u5730\u5740 -sys.ldap.settings.url.note=ldap\u8BA4\u8BC1\u670D\u52A1\u5730\u5740\uFF0C\u4F8B\u5982\uFF1Aldap://192.168.111.1:389 -sys.ldap.settings.userDn=\u767B\u5F55\u7528\u6237\u540D\uFF08DN\uFF09 -sys.ldap.settings.userDn.note=\u7528\u4E8E\u8FDE\u63A5ldap\u670D\u52A1\u7684\u7528\u6237\u540D\uFF0C\u6216\u8005\u7BA1\u7406\u5458DN -sys.ldap.settings.userPassword=\u767B\u5F55\u5BC6\u7801 -sys.ldap.settings.userPassword.note=\u7528\u4E8E\u8FDE\u63A5ldap\u670D\u52A1\u7684\u5BC6\u7801 -sys.ldap.settings.timeLimit=\u8FDE\u63A5\u8D85\u65F6 -sys.ldap.settings.timeLimit.note=\u8FDE\u63A5ldap\u670D\u52A1\u7684\u6700\u5927\u65F6\u95F4\uFF0C\u8D85\u8FC7\u5219\u65AD\u5F00 -sys.ldap.settings.baseDn=\u7528\u6237\u57FA\u7840DN -sys.ldap.settings.baseDn.note=Dinky\u4F1A\u5728\u6B64\u57FA\u7840dn\u4E0B\u8FDB\u884C\u7528\u6237\u641C\u7D22,\u4F8B\u5982\uFF1Aou=users,dc=dinky,dc=com -sys.ldap.settings.filter=\u7528\u6237\u8FC7\u6EE4\u89C4\u5219 -sys.ldap.settings.filter.note=\u4F7F\u7528ldap\u7684filter\u8BED\u6CD5\u8FDB\u884C\u7528\u6237\u8FC7\u6EE4\uFF0C\u4F8B\u5982 -sys.ldap.settings.autoload=\u767B\u5F55\u65F6\u81EA\u52A8\u6620\u5C04\u7528\u6237 -sys.ldap.settings.autoload.note=\u5F00\u542F\u540E\uFF0C\u5F53\u7528\u6237\u4F7F\u7528LDAP\u767B\u5F55\u65F6\uFF0C\u5982\u679C\u6CA1\u6709\u76F8\u5E94\u7684Dinky\u7528\u6237\u6620\u5C04\uFF0C\u5219\u4F1A\u81EA\u52A8\u62C9\u53D6LDAP\u4FE1\u606F\u521B\u5EFA\u4E00\u4E2ADinky\u7528\u6237\u4E0E\u4E4B\u6620\u5C04\uFF0C\u5982\u679C\u5173\u95ED\u6B64\u529F\u80FD\uFF0C\u5BF9\u4E8E\u672A\u5BFC\u5165\u7684LDAP\u7528\u6237\u5C06\u65E0\u6CD5\u767B\u5F55 -sys.ldap.settings.defaultTeant=LDAP\u5BFC\u5165\u9ED8\u8BA4\u79DF\u6237\u7F16\u7801 -sys.ldap.settings.defaultTeant.note=\u5F00\u542F\u81EA\u52A8\u5BFC\u5165\u7528\u6237\u540E\uFF0C\u65B0\u7528\u6237\u767B\u5F55\u9700\u8981\u4E00\u4E2A\u9ED8\u8BA4\u7684\u79DF\u6237\u7F16\u7801\uFF0C\u5426\u5219\u65E0\u6CD5\u767B\u5F55 -sys.ldap.settings.castUsername=LDAP\u7528\u6237\u540D\u5B57\u6BB5 -sys.ldap.settings.castUsername.note=\u9700\u8981\u586B\u5199\u4E00\u4E2ALDAP\u4E2D\u7528\u6237\u7684\u5C5E\u6027\u5B57\u6BB5\u6765\u4E0EDinky\u7528\u6237\u5BF9\u5E94\uFF0C\u5FC5\u987B\u586B\u5199\uFF0C\u4E00\u822C\u53EF\u9009\u4E3A cn \u6216\u8005 uid\u8868\u793A\u7528\u6237\u552F\u4E00\u6807\u8BC6 -sys.ldap.settings.castNickname=LDAP\u6635\u79F0\u5B57\u6BB5 -sys.ldap.settings.castNickname.note=\u9700\u8981\u586B\u5199\u4E00\u4E2ALDAP\u4E2D\u7528\u6237\u7684\u5C5E\u6027\u5B57\u6BB5\u6765\u4E0EDinky\u6635\u79F0\u5BF9\u5E94\uFF0C\u5FC5\u987B\u586B\u5199\uFF0C\u4E00\u822C\u53EF\u9009\u4E3A sn \u6216\u8005\u5176\u4ED6\u6807\u8BC6\uFF0C\u4E0D\u8981\u6C42\u552F\u4E00 -sys.ldap.settings.enable=\u662F\u5426\u542F\u7528ldap -sys.ldap.settings.enable.note=\u5F00\u542FLDAP\u767B\u5F55\u529F\u80FD -sys.metrics.settings.sys.enable=Dinky JVM Monitor \u5F00\u5173 -sys.metrics.settings.sys.enable.note=\u6B64\u5F00\u5173\u4F1A\u5173\u7CFB\u5230Dinky JVM Monitor\uFF0C\u51B3\u5B9A\u76D1\u63A7\u9875\u9762\u4E2D\u7684Dinky Server\u663E\u793A\uFF0C\u4EE5\u53CAJVM Metrics\u91C7\u96C6 -sys.metrics.settings.sys.gatherTiming=Dinky JVM Metrics \u91C7\u96C6\u65F6\u95F4\u7C92\u5EA6 -sys.metrics.settings.sys.gatherTiming.note=Dinky JVM Metrics \u91C7\u96C6\u65F6\u95F4\u7C92\u5EA6\uFF0C\u5B9A\u65F6\u4EFB\u52A1\u95F4\u9694\u89E6\u53D1 -sys.metrics.settings.flink.gatherTiming=Flink Metrics \u91C7\u96C6\u65F6\u95F4\u7C92\u5EA6 -sys.metrics.settings.flink.gatherTiming.note=Flink Metrics \u91C7\u96C6\u65F6\u95F4\u7C92\u5EA6\uFF0C\u5B9A\u65F6\u4EFB\u52A1\u95F4\u9694\u89E6\u53D1 -sys.metrics.settings.flink.gatherTimeout=Flink Metrics \u91C7\u96C6\u65F6\u95F4\u7C92\u5EA6\uFF0C\u5B9A\u65F6\u4EFB\u52A1\u95F4\u9694\u89E6\u53D1 -sys.metrics.settings.flink.gatherTimeout.note=Flink Metrics \u91C7\u96C6\u8D85\u65F6\u65F6\u957F\uFF0C\u5B9A\u65F6\u4EFB\u52A1\u95F4\u9694\u89E6\u53D1\uFF08\u6B64\u914D\u7F6E\u9879\u5E94\u5C0F\u4E8EFlink Metrics \u91C7\u96C6\u65F6\u95F4\u7C92\u5EA6\uFF09 -sys.resource.settings.base.enable=\u662F\u5426\u542F\u7528Resource -sys.resource.settings.base.enable.note=\u542F\u7528\u8D44\u6E90\u7BA1\u7406\u529F\u80FD\uFF0C\u5982\u679C\u5207\u6362\u5B58\u50A8\u6A21\u5F0F\u65F6\uFF0C\u9700\u5173\u95ED\u6B64\u5F00\u5173\uFF0C\u76F8\u5173\u914D\u7F6E\u5B8C\u6210\u540E\uFF0C\u518D\u5F00\u542F -sys.resource.settings.base.upload.base.path=\u4E0A\u4F20\u76EE\u5F55\u7684\u6839\u8DEF\u5F84 -sys.resource.settings.base.upload.base.path.note=\u8D44\u6E90\u5B58\u50A8\u5728HDFS/OSS\u8DEF\u5F84\u4E0A\uFF0C\u8D44\u6E90\u6587\u4EF6\u5C06\u5B58\u50A8\u5230\u6B64\u57FA\u672C\u8DEF\u5F84\uFF0C\u81EA\u884C\u914D\u7F6E\uFF0C\u8BF7\u786E\u4FDD\u8BE5\u76EE\u5F55\u5B58\u5728\u4E8E\u76F8\u5173\u5B58\u50A8\u7CFB\u7EDF\u4E0A\u5E76\u5177\u6709\u8BFB\u5199\u6743\u9650\u3002\u63A8\u8350 -sys.resource.settings.base.model=\u5B58\u50A8\u6A21\u5F0F -sys.resource.settings.base.model.note=\u652F\u6301HDFS\u3001OSS\uFF0C\u5207\u6362\u9009\u9879\u540E\u5373\u53EF\u751F\u6548\uFF0C\u540C\u65F6\u5E76\u8FC1\u79FB\u8D44\u6E90\u6587\u4EF6 -sys.resource.settings.oss.endpoint=\u5BF9\u8C61\u5B58\u50A8\u670D\u52A1\u7684URL -sys.resource.settings.oss.endpoint.note=\u4F8B\u5982\uFF1Ahttps://oss-cn-hangzhou.aliyuncs.com +sys.dolphinscheduler.settings.token.note=DolphinScheduler的Token,请在DolphinScheduler的安全中心->令牌管理中创建一个token,并填入该配置中 +sys.dolphinscheduler.settings.projectName=DolphinScheduler 项目名 +sys.dolphinscheduler.settings.projectName.note=DolphinScheduler 中指定的项目名称,不区分大小写 +sys.ldap.settings.url=Ldap 服务地址 +sys.ldap.settings.url.note=Ldap 认证服务地址,例如:ldap://192.168.111.1:389 +sys.ldap.settings.userDn=登录用户名(DN) +sys.ldap.settings.userDn.note=用于连接 Ldap 服务的用户名,或者管理员DN +sys.ldap.settings.userPassword=登录密码 +sys.ldap.settings.userPassword.note=用于连接ldap服务的密码 +sys.ldap.settings.timeLimit=连接超时 +sys.ldap.settings.timeLimit.note=连接 Ldap 服务的最大时间,超过则断开 +sys.ldap.settings.baseDn=用户基础 DN +sys.ldap.settings.baseDn.note=Dinky 会在此基础 DN 下进行用户搜索,例如:ou=users,dc=dinky,dc=com +sys.ldap.settings.filter=用户过滤规则 +sys.ldap.settings.filter.note=使用 Ldap 的 filter 语法进行用户过滤 +sys.ldap.settings.autoload=登录时自动映射用户 +sys.ldap.settings.autoload.note=开启后,当用户使用 LDAP 登录时,如果没有相应的 Dinky 用户映射,则会自动拉取 LDAP 信息创建一个 Dinky 用户与之映射,如果关闭此功能,对于未导入的 LDAP 用户将无法登录 +sys.ldap.settings.defaultTeant=LDAP 导入默认租户编码 +sys.ldap.settings.defaultTeant.note=开启自动导入用户后,新用户登录需要一个默认的租户编码,否则无法登录,例如: DefaultTenant +sys.ldap.settings.castUsername=LDAP 用户名字段 +sys.ldap.settings.castUsername.note=需要填写一个 LDAP 中用户的属性字段来与 Dinky 用户对应,必须填写,一般可选为 cn 或者 uid表示用户唯一标识 +sys.ldap.settings.castNickname=LDAP 昵称字段 +sys.ldap.settings.castNickname.note=需要填写一个 LDAP 中用户的属性字段来与 Dinky 昵称对应,必须填写,一般可选为 sn 或者其他标识,不要求唯一 +sys.ldap.settings.enable=是否启用 Ldap +sys.ldap.settings.enable.note=开启 LDAP 登录功能 +sys.metrics.settings.sys.enable=Dinky JVM Monitor 开关 +sys.metrics.settings.sys.enable.note=此开关会关系到Dinky JVM Monitor,决定监控页面中的Dinky Server显示,以及JVM Metrics采集 +sys.metrics.settings.sys.gatherTiming=Dinky JVM Metrics 采集时间粒度 +sys.metrics.settings.sys.gatherTiming.note=Dinky JVM Metrics 采集时间粒度,定时任务间隔触发 +sys.metrics.settings.flink.gatherTiming=Flink Metrics 采集时间粒度 +sys.metrics.settings.flink.gatherTiming.note=Flink Metrics 采集时间粒度,定时任务间隔触发 +sys.metrics.settings.flink.gatherTimeout=Flink Metrics 采集时间粒度,定时任务间隔触发 +sys.metrics.settings.flink.gatherTimeout.note=Flink Metrics 采集超时时长,定时任务间隔触发(此配置项应小于Flink Metrics 采集时间粒度) +sys.resource.settings.base.enable=是否启用Resource +sys.resource.settings.base.enable.note=启用资源管理功能,如果切换存储模式时,需关闭此开关,相关配置完成后,再开启 +sys.resource.settings.base.upload.base.path=上传目录的根路径 +sys.resource.settings.base.upload.base.path.note=资源存储在HDFS/OSS路径上,资源文件将存储到此基本路径,自行配置,请确保该目录存在于相关存储系统上并具有读写权限。 +sys.resource.settings.base.model=存储模式 +sys.resource.settings.base.model.note=支持HDFS、OSS,切换选项后即可生效,同时并迁移资源文件 +sys.resource.settings.oss.endpoint=对象存储服务的 URL +sys.resource.settings.oss.endpoint.note=例如:https://oss-cn-hangzhou.aliyuncs.com sys.resource.settings.oss.accessKey=Access key -sys.resource.settings.oss.accessKey.note=Access key\u5C31\u50CF\u7528\u6237ID\uFF0C\u53EF\u4EE5\u552F\u4E00\u6807\u8BC6\u4F60\u7684\u8D26\u6237 +sys.resource.settings.oss.accessKey.note=Access key就像用户ID,可以唯一标识你的账户 sys.resource.settings.oss.secretKey=Secret key -sys.resource.settings.oss.secretKey.note=Secret key\u662F\u4F60\u8D26\u6237\u7684\u5BC6\u7801 -sys.resource.settings.oss.bucketName=\u5B58\u50A8\u6876\u540D\u79F0 -sys.resource.settings.oss.bucketName.note=\u9ED8\u8BA4\u7684\u5B58\u50A8\u6876\u540D\u79F0 -sys.resource.settings.oss.region=\u533A\u57DF -sys.resource.settings.oss.region.note=\u533A\u57DF -sys.resource.settings.hdfs.root.user=HDFS\u64CD\u4F5C\u7528\u6237\u540D -sys.resource.settings.hdfs.root.user.note=HDFS\u64CD\u4F5C\u7528\u6237\u540D +sys.resource.settings.oss.secretKey.note=Secret key是你账户的密码 +sys.resource.settings.oss.bucketName=存储桶名称 +sys.resource.settings.oss.bucketName.note=默认的存储桶名称 +sys.resource.settings.oss.region=区域 +sys.resource.settings.oss.region.note=区域,例如:oss-cn-hangzhou +sys.resource.settings.hdfs.root.user=HDFS 操作用户名 +sys.resource.settings.hdfs.root.user.note=HDFS 操作用户名 sys.resource.settings.hdfs.fs.defaultFS=HDFS defaultFS -sys.resource.settings.hdfs.fs.defaultFS.note=fs.defaultFS \u914D\u7F6E\u9879\uFF0C\u4F8B\u5982\u8FDC\u7A0B\uFF1Ahdfs://localhost:9000\uFF0C\u672C\u5730\uFF1Afile:/// +sys.resource.settings.hdfs.fs.defaultFS.note=fs.defaultFS 配置项,例如: 远程 HDFS:hdfs://localhost:9000,本地:file:/// #Dinky Gateway -gateway.kubernetes.test.failed=\u6D4B\u8BD5 Flink \u914D\u7F6E\u5931\u8D25\uFF1A +gateway.kubernetes.test.failed=测试 Flink 配置失败: # Task -task.status.is.not.done=\u5F53\u524D\u4F5C\u4E1A\u72B6\u6001\u672A\u505C\u6B62\uFF0C\u8BF7\u505C\u6B62\u540E\u64CD\u4F5C -task.sql.explain.failed=sql\u89E3\u6790\u5931\u8D25\uFF0C\u8BF7\u68C0\u67E5 -task.update.failed=Task\u66F4\u65B0\u5931\u8D25 +task.status.is.not.done=当前作业状态未停止,请停止后操作 +task.sql.explain.failed=sql解析失败,请检查 +task.update.failed=Task更新失败 # process -process.submit.submitTask=\u63D0\u4EA4\u4F5C\u4E1A -process.submit.checkSql=\u68C0\u67E5\u4F5C\u4E1A -process.submit.execute=\u6267\u884C\u4F5C\u4E1A -process.submit.buildConfig=\u6784\u5EFA\u914D\u7F6E\u4FE1\u606F -process.submit.execute.commSql=\u6267\u884C\u666E\u901Asql -process.submit.execute.flinkSql=\u6267\u884CflinkSql -process.register.exits=\u5F53\u524D\u4EFB\u52A1\u6B63\u5728\u6267\u884C\uFF0C\u8BF7\u52FF\u91CD\u590D\u63D0\u4EA4\uFF0C\u5982\u6709\u95EE\u9898\u8BF7\u524D\u5F80\u914D\u7F6E\u4E2D\u5FC3\u67E5\u770B +process.submit.submitTask=提交作业 +process.submit.checkSql=检查作业 +process.submit.execute=执行作业 +process.submit.buildConfig=构建配置信息 +process.submit.execute.commSql=执行普通sql +process.submit.execute.flinkSql=执行flinkSql +process.register.exits=当前任务正在执行,请勿重复提交,如有问题请前往配置中心查看 # resource -resource.root.dir.not.allow.delete=\u6839\u76EE\u5F55\u4E0D\u5141\u8BB8\u5220\u9664 -resource.dir.or.file.not.exist=\u8D44\u6E90\u76EE\u5F55\u6216\u6587\u4EF6\u4E0D\u5B58\u5728 \ No newline at end of file +resource.root.dir.not.allow.delete=根目录不允许删除 +resource.dir.or.file.not.exist=资源目录或文件不存在 \ No newline at end of file diff --git a/dinky-core/src/main/java/org/dinky/job/JobContextHolder.java b/dinky-core/src/main/java/org/dinky/job/JobContextHolder.java deleted file mode 100644 index bed303a5a7..0000000000 --- a/dinky-core/src/main/java/org/dinky/job/JobContextHolder.java +++ /dev/null @@ -1,42 +0,0 @@ -/* - * - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -package org.dinky.job; - -/** - * JobContextHolder - * - * @since 2021/6/26 23:29 - */ -public class JobContextHolder { - - private static final ThreadLocal CONTEXT = new ThreadLocal<>(); - - public static void setJob(Job job) { - CONTEXT.set(job); - } - - public static Job getJob() { - return CONTEXT.get(); - } - - public static void clear() { - CONTEXT.remove(); - } -} diff --git a/dinky-scheduler/src/main/java/org/dinky/scheduler/client/TaskClient.java b/dinky-scheduler/src/main/java/org/dinky/scheduler/client/TaskClient.java index fdb9e71ac6..feea21fc78 100644 --- a/dinky-scheduler/src/main/java/org/dinky/scheduler/client/TaskClient.java +++ b/dinky-scheduler/src/main/java/org/dinky/scheduler/client/TaskClient.java @@ -46,6 +46,7 @@ import cn.hutool.core.util.StrUtil; import cn.hutool.http.HttpRequest; import cn.hutool.json.JSONObject; +import cn.hutool.json.JSONUtil; /** 任务定义 */ @Component @@ -61,8 +62,8 @@ public class TaskClient { * @param taskName 任务定义名称 * @return {@link TaskMainInfo} */ - public TaskMainInfo getTaskMainInfo(Long projectCode, String processName, String taskName) { - List lists = getTaskMainInfos(projectCode, processName, taskName); + public TaskMainInfo getTaskMainInfo(Long projectCode, String processName, String taskName, String taskType) { + List lists = getTaskMainInfos(projectCode, processName, taskName, taskType); for (TaskMainInfo list : lists) { if (list.getTaskName().equalsIgnoreCase(taskName)) { return list; @@ -79,7 +80,7 @@ public TaskMainInfo getTaskMainInfo(Long projectCode, String processName, String * @param taskName 任务定义名称 * @return {@link List} */ - public List getTaskMainInfos(Long projectCode, String processName, String taskName) { + public List getTaskMainInfos(Long projectCode, String processName, String taskName, String taskType) { Map map = new HashMap<>(); map.put("projectCode", projectCode); String format = StrUtil.format( @@ -90,7 +91,7 @@ public List getTaskMainInfos(Long projectCode, String processName, Map pageParams = ParamUtil.getPageParams(); pageParams.put("searchTaskName", taskName); pageParams.put("searchWorkflowName", processName); - pageParams.put("taskType", "DINKY"); + pageParams.put("taskType", taskType); String content = HttpRequest.get(format) .header( @@ -110,9 +111,7 @@ public List getTaskMainInfos(Long projectCode, String processName, } for (JSONObject jsonObject : data.getTotalList()) { - if (processName.equalsIgnoreCase(jsonObject.getStr("processDefinitionName"))) { - lists.add(MyJSONUtil.toBean(jsonObject, TaskMainInfo.class)); - } + lists.add(JSONUtil.toBean(jsonObject, TaskMainInfo.class)); } return lists; } diff --git a/dinky-scheduler/src/main/java/org/dinky/scheduler/model/DinkyTaskRequest.java b/dinky-scheduler/src/main/java/org/dinky/scheduler/model/DinkyTaskRequest.java new file mode 100644 index 0000000000..d2750222be --- /dev/null +++ b/dinky-scheduler/src/main/java/org/dinky/scheduler/model/DinkyTaskRequest.java @@ -0,0 +1,104 @@ +/* + * + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +package org.dinky.scheduler.model; + +import org.dinky.data.model.SystemConfiguration; +import org.dinky.scheduler.enums.TaskExecuteType; + +import java.util.Arrays; +import java.util.List; + +import javax.validation.constraints.NotNull; + +import io.swagger.annotations.ApiModelProperty; +import lombok.Data; + +@Data +public class DinkyTaskRequest { + + @ApiModelProperty(value = "自定义参数") + private List localParams; + + @ApiModelProperty(value = "dinky地址") + @NotNull + private String address = SystemConfiguration.getInstances().getDinkyAddr().getValue(); + + @ApiModelProperty(value = "上游任务编号") + List upstreamCodes = Arrays.asList(); + + @ApiModelProperty(value = "dinky任务id", required = true) + @NotNull + private String taskId; + + @ApiModelProperty(value = "编号") + private Long code; + + @ApiModelProperty(value = "名称") + private String name; + + @ApiModelProperty(value = "描述") + private String description; + + @ApiModelProperty(value = "环境编号") + private Long environmentCode; + + @ApiModelProperty(value = "延迟执行时间") + private Integer delayTime; + + @ApiModelProperty(value = "重试间隔") + private Integer failRetryInterval; + + @ApiModelProperty(value = "重试次数") + private Integer failRetryTimes; + + @ApiModelProperty(value = "运行标志 yes 正常/no 禁止执行") + private String flag; + + @ApiModelProperty(value = "任务参数 默认DINKY参数") + private String taskParams; + + @NotNull + @ApiModelProperty(value = "优先级") + private String taskPriority; + + @ApiModelProperty(value = "任务类型 默认DINKY") + private String taskType = "DINKY"; + + @ApiModelProperty(value = "超时时间(分钟)") + private Integer timeout; + + @ApiModelProperty(value = "超时告警") + private String timeoutFlag; + + @ApiModelProperty(value = "超时通知策略") + private String timeoutNotifyStrategy; + + @ApiModelProperty(value = "worker分组 默认default") + private String workerGroup = "default"; + + @ApiModelProperty(value = "cpu 配额 默认-1") + private Integer cpuQuota = -1; + + @ApiModelProperty(value = "最大内存 默认-1") + private Integer memoryMax = -1; + + @ApiModelProperty(value = "执行类型 默认BATCH") + private TaskExecuteType taskExecuteType = TaskExecuteType.BATCH; +} diff --git a/dinky-scheduler/src/main/java/org/dinky/scheduler/model/TaskRequest.java b/dinky-scheduler/src/main/java/org/dinky/scheduler/model/TaskRequest.java index 492daa1b1e..4e1dc395b9 100644 --- a/dinky-scheduler/src/main/java/org/dinky/scheduler/model/TaskRequest.java +++ b/dinky-scheduler/src/main/java/org/dinky/scheduler/model/TaskRequest.java @@ -19,11 +19,7 @@ package org.dinky.scheduler.model; -import org.dinky.scheduler.enums.Flag; -import org.dinky.scheduler.enums.Priority; import org.dinky.scheduler.enums.TaskExecuteType; -import org.dinky.scheduler.enums.TaskTimeoutStrategy; -import org.dinky.scheduler.enums.TimeoutFlag; import javax.validation.constraints.NotNull; @@ -55,14 +51,14 @@ public class TaskRequest { private Integer failRetryTimes; @ApiModelProperty(value = "运行标志 yes 正常/no 禁止执行") - private Flag flag; + private String flag; @ApiModelProperty(value = "任务参数 默认DINKY参数") private String taskParams; @NotNull @ApiModelProperty(value = "优先级") - private Priority taskPriority; + private String taskPriority; @ApiModelProperty(value = "任务类型 默认DINKY") private String taskType = "DINKY"; @@ -71,10 +67,10 @@ public class TaskRequest { private Integer timeout; @ApiModelProperty(value = "超时告警") - private TimeoutFlag timeoutFlag; + private String timeoutFlag; @ApiModelProperty(value = "超时通知策略") - private TaskTimeoutStrategy timeoutNotifyStrategy; + private String timeoutNotifyStrategy; @ApiModelProperty(value = "worker分组 默认default") private String workerGroup = "default"; diff --git a/dinky-web/src/locales/en-US/pages.ts b/dinky-web/src/locales/en-US/pages.ts index 2132dfbf93..dd11a2af4f 100644 --- a/dinky-web/src/locales/en-US/pages.ts +++ b/dinky-web/src/locales/en-US/pages.ts @@ -86,6 +86,28 @@ export default { 'datastudio.middle.qg.gitprojects': 'Git Projects', 'datastudio.middle.qg.resource': 'Resource', 'datastudio.middle.qg.udf': 'UDF', + 'datastudio.header.pushdolphin.title': 'Push task [ {name} ] to DolphinScheduler', + 'datastudio.header.pushdolphin.taskId': 'Dinky task encoding', + 'datastudio.header.pushdolphin.taskName': 'Task name: {name}', + 'datastudio.header.pushdolphin.taskNameExt': 'Task type: {type} Process definition: {processDefinitionName}', + 'datastudio.header.pushdolphin.upstreamCodes': 'pre-task', + 'datastudio.header.pushdolphin.upstreamCodesTip': 'After selecting the pre-task, the task will not be executed until the pre-task is successfully executed. Please choose wisely to avoid task circular dependencies. This platform does not do dependency checking', + 'datastudio.header.pushdolphin.taskPriority': 'Task Priority', + 'datastudio.header.pushdolphin.failRetryTimes': 'Number of retries', + 'datastudio.header.pushdolphin.failRetryInterval': 'Failure retry interval (minutes)', + 'datastudio.header.pushdolphin.failRetryIntervalPlaceholder': 'Please enter the failure retry interval (minutes)', + 'datastudio.header.pushdolphin.delayTime': 'Delayed execution time (minutes)', + 'datastudio.header.pushdolphin.delayTimePlaceholder': 'Please enter the delay execution time (minutes)', + 'datastudio.header.pushdolphin.timeoutFlag': 'Timeout alarm', + 'datastudio.header.pushdolphin.timeoutFlag.warn': 'Timeout warning', + 'datastudio.header.pushdolphin.timeoutFlag.failed': 'Timeout failed', + 'datastudio.header.pushdolphin.timeoutFlagTip': 'Please select a timeout warning', + 'datastudio.header.pushdolphin.flag': 'Run flag', + 'datastudio.header.pushdolphin.flagTip': 'Please select the run flag', + 'datastudio.header.pushdolphin.timeoutNotifyStrategy': 'Timeout notification strategy', + 'datastudio.header.pushdolphin.timeoutNotifyStrategyTip': 'Please select a timeout notification strategy', + 'datastudio.header.pushdolphin.timeout': 'Timeout (minutes)', + 'datastudio.header.pushdolphin.timeoutPlaceholder': 'Please enter the timeout time (minutes)', 'datastudio.project.create.folder.name': 'Folder Name', 'datastudio.project.create.folder.name.placeholder': 'Please enter the folder name', 'datastudio.project.create.folder.tip': diff --git a/dinky-web/src/locales/zh-CN/pages.ts b/dinky-web/src/locales/zh-CN/pages.ts index 96a5a95f45..d306274948 100644 --- a/dinky-web/src/locales/zh-CN/pages.ts +++ b/dinky-web/src/locales/zh-CN/pages.ts @@ -81,6 +81,28 @@ export default { 'datastudio.middle.qg.gitprojects': 'Git 项目', 'datastudio.middle.qg.resource': '资源', 'datastudio.middle.qg.udf': 'UDF', + 'datastudio.header.pushdolphin.title': '将任务 [ {name} ]推送至 DolphinScheduler', + 'datastudio.header.pushdolphin.taskId': 'Dinky任务编码', + 'datastudio.header.pushdolphin.taskName': 'Task名称: {name}', + 'datastudio.header.pushdolphin.taskNameExt': 'Task类型: {type} 所属进程定义: {processDefinitionName}', + 'datastudio.header.pushdolphin.upstreamCodes': '前置任务', + 'datastudio.header.pushdolphin.upstreamCodesTip': '选择前置任务后,任务将会在前置任务执行成功后才会执行,请合理选择,避免任务循环依赖,本平台不做依赖检查', + 'datastudio.header.pushdolphin.taskPriority': '任务优先级', + 'datastudio.header.pushdolphin.failRetryTimes': '重试次数', + 'datastudio.header.pushdolphin.failRetryInterval': '失败重试间隔(分钟)', + 'datastudio.header.pushdolphin.failRetryIntervalPlaceholder': '请输入失败重试间隔(分钟)', + 'datastudio.header.pushdolphin.delayTime': '延时执行时间(分钟)', + 'datastudio.header.pushdolphin.delayTimePlaceholder': '请输入延时执行时间(分钟)', + 'datastudio.header.pushdolphin.timeoutFlag': '超时告警', + 'datastudio.header.pushdolphin.timeoutFlag.warn': '超时告警', + 'datastudio.header.pushdolphin.timeoutFlag.failed': '超时失败', + 'datastudio.header.pushdolphin.timeoutFlagTip': '请选择超时警告', + 'datastudio.header.pushdolphin.flag': '运行标志', + 'datastudio.header.pushdolphin.flagTip': '请选择运行标志', + 'datastudio.header.pushdolphin.timeoutNotifyStrategy': '超时通知策略', + 'datastudio.header.pushdolphin.timeoutNotifyStrategyTip': '请选择超时通知策略', + 'datastudio.header.pushdolphin.timeout': '超时时间(分钟)', + 'datastudio.header.pushdolphin.timeoutPlaceholder': '请输入超时时间(分钟)', 'datastudio.project.create.folder.name': '目录名称', 'datastudio.project.create.folder.name.placeholder': '请输入目录名称', 'datastudio.project.create.folder.tip': '暂无作业,请点击左上角新建目录', diff --git a/dinky-web/src/pages/DataStudio/HeaderContainer/PushDolphin/constants.tsx b/dinky-web/src/pages/DataStudio/HeaderContainer/PushDolphin/constants.tsx index c266dfb432..3c87113021 100644 --- a/dinky-web/src/pages/DataStudio/HeaderContainer/PushDolphin/constants.tsx +++ b/dinky-web/src/pages/DataStudio/HeaderContainer/PushDolphin/constants.tsx @@ -17,51 +17,74 @@ * */ -import {Badge, Space} from "antd"; -import React from "react"; -import {DefaultOptionType} from "antd/es/select"; -import {CheckboxOptionType} from "antd/es/checkbox/Group"; - +import { Badge, Space } from 'antd'; +import { CheckboxOptionType } from 'antd/es/checkbox/Group'; +import { DefaultOptionType } from 'antd/es/select'; +import {l} from "@/utils/intl"; /** * priority list for select | 优先级列表 */ -export const PriorityList :DefaultOptionType[] = [ +export const PriorityList: DefaultOptionType[] = [ { - label: Highest, - value: 0, - key: 0, + label: ( + + + Highest + + ), + value: 'HIGHEST', + key: 'HIGHEST' }, { - label: High, - value: 1, - key: 1, + label: ( + + + High + + ), + value: 'HIGH', + key: 'HIGH' }, { - label: Medium, - value: 2, - key: 2, + label: ( + + + Medium + + ), + value: 'MEDIUM', + key: 'MEDIUM' }, { - label: Low, - value: 3, - key: 3, + label: ( + + + Low + + ), + value: 'LOW', + key: 'LOW' }, { - label: Lowest, - value: 4, - key: 4, + label: ( + + + Lowest + + ), + value: 'LOWEST', + key: 'LOWEST' } -] - +]; -export const TimeoutNotifyStrategy:CheckboxOptionType[] = [ +export const TimeoutNotifyStrategy: CheckboxOptionType[] = [ { - label: '失败告警', - value: 'WARN', + label: l('datastudio.header.pushdolphin.timeoutFlag.warn'), + value: 'WARN' }, { - label: '失败报错', - value: 'FAILED', - }, -] + label: l('datastudio.header.pushdolphin.timeoutFlag.failed'), + value: 'FAILED' + } +]; diff --git a/dinky-web/src/pages/DataStudio/HeaderContainer/PushDolphin/function.tsx b/dinky-web/src/pages/DataStudio/HeaderContainer/PushDolphin/function.tsx new file mode 100644 index 0000000000..e560718ce1 --- /dev/null +++ b/dinky-web/src/pages/DataStudio/HeaderContainer/PushDolphin/function.tsx @@ -0,0 +1,50 @@ +/* + * + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +import { DolphinTaskDefinition, PushDolphinParams } from '@/types/Studio/data.d'; + +export const transformPushDolphinParams = ( + dolphinTaskDefinition: DolphinTaskDefinition, + pushDolphinParams: PushDolphinParams, + toFormValues: boolean +) => { + + + if (toFormValues && dolphinTaskDefinition) { + const transformValue : PushDolphinParams = { + ...pushDolphinParams, + description: dolphinTaskDefinition.description, + timeoutFlag: dolphinTaskDefinition.timeoutFlag === 'OPEN', + flag: dolphinTaskDefinition.flag === 'YES', + upstreamCodes: dolphinTaskDefinition.upstreamTaskMap ? Object.keys(dolphinTaskDefinition.upstreamTaskMap) : [], + timeoutNotifyStrategy: dolphinTaskDefinition.timeoutNotifyStrategy === 'WARNFAILED' ? ['WARN', 'FAILED'] : [dolphinTaskDefinition.timeoutNotifyStrategy] + }; + return transformValue; + } else { + const falseTransformValue : DolphinTaskDefinition = { + ...dolphinTaskDefinition, + ...pushDolphinParams, + description: pushDolphinParams.description, + timeoutFlag: pushDolphinParams.timeoutFlag ? 'OPEN' : 'CLOSE', + flag: pushDolphinParams.flag ? 'YES' : 'NO', + timeoutNotifyStrategy: (pushDolphinParams.timeoutNotifyStrategy as string[]).join('') + }; + return falseTransformValue; + } +}; diff --git a/dinky-web/src/pages/DataStudio/HeaderContainer/PushDolphin/index.tsx b/dinky-web/src/pages/DataStudio/HeaderContainer/PushDolphin/index.tsx index 8aec72e150..2f661ad5e7 100644 --- a/dinky-web/src/pages/DataStudio/HeaderContainer/PushDolphin/index.tsx +++ b/dinky-web/src/pages/DataStudio/HeaderContainer/PushDolphin/index.tsx @@ -17,66 +17,43 @@ * */ -import { FormContextValue } from "@/components/Context/FormContext"; -import { NORMAL_MODAL_OPTIONS,SWITCH_OPTIONS } from "@/services/constants"; -import { l } from "@/utils/intl"; +import { FormContextValue } from '@/components/Context/FormContext'; +import { NORMAL_MODAL_OPTIONS,SWITCH_OPTIONS } from '@/services/constants'; +import { l } from '@/utils/intl'; import { - ModalForm, ProFormCheckbox, - ProFormDigit, - ProFormGroup, - ProFormRadio, - ProFormSelect, - ProFormSwitch, - ProFormTextArea -} from "@ant-design/pro-components"; - -import { PriorityList,TimeoutNotifyStrategy } from "@/pages/DataStudio/HeaderContainer/PushDolphin/constants"; -import { queryDataByParams } from "@/services/BusinessCrud"; -import {DolphinTaskDefinition, DolphinTaskMinInfo} from "@/types/Studio/data.d"; -import { Button,Form,Space } from "antd"; -import { DefaultOptionType } from "antd/es/select"; -import React,{ useEffect } from "react"; +ModalForm, +ProFormCheckbox, +ProFormDigit, +ProFormGroup, +ProFormSelect, +ProFormSwitch, +ProFormText, +ProFormTextArea +} from '@ant-design/pro-components'; + +import { PriorityList,TimeoutNotifyStrategy } from '@/pages/DataStudio/HeaderContainer/PushDolphin/constants'; +import { transformPushDolphinParams } from "@/pages/DataStudio/HeaderContainer/PushDolphin/function"; +import { DolphinTaskDefinition,DolphinTaskMinInfo,PushDolphinParams } from '@/types/Studio/data.d'; +import {Button, Form, Tag} from 'antd'; +import { DefaultOptionType } from 'antd/es/select'; +import React from 'react'; +import {InitPushDolphinParams} from "@/types/Studio/init.d"; +import {TaskDataType} from "@/pages/DataStudio/model"; type PushDolphinProps = { onCancel: () => void; - value: any; + dolphinTaskList: DolphinTaskMinInfo[]; + dolphinDefinitionTask : Partial; modalVisible: boolean; + currentDinkyTaskValue: Partial; loading: boolean; + onSubmit: (values: DolphinTaskDefinition) => void; }; -interface PushDolphinParams { - upstreamCodes: string[]; - taskPriority: number; - failRetryTimes: number; - failRetryInterval: number; - delayTime: number; - timeout: number; - timeoutFlag: boolean; - flag: boolean; - timeoutNotifyStrategy: string[]; - description: string; - timeoutNotifyStrategyType: string; -} - export const PushDolphin: React.FC = (props) => { - const { onCancel, value, modalVisible, loading } = props; - - const [dolphinTaskList, setDolphinTaskList] = React.useState([]); - - const [formValues, setFormValues] = React.useState({ - upstreamCodes: [], - taskPriority: 0, - failRetryTimes: 0, - failRetryInterval: 0, - delayTime: 0, - timeout: 0, - timeoutFlag: false, - flag: false, - timeoutNotifyStrategy: [], - description: '', - timeoutNotifyStrategyType: 'WARN', - }) + const { onCancel,onSubmit, modalVisible,dolphinTaskList,dolphinDefinitionTask,currentDinkyTaskValue, loading } = props; + const [formValues, setFormValues] = React.useState(transformPushDolphinParams(dolphinDefinitionTask as DolphinTaskDefinition,{...InitPushDolphinParams,taskId: currentDinkyTaskValue?.id ?? ''},true) as PushDolphinParams); /** * init form @@ -101,46 +78,55 @@ export const PushDolphin: React.FC = (props) => { formContext.resetForm(); }; + const handlePushDolphinSubmit = async () => { + const values = form.validateFields(); + if (!values) { + return; + } + const transformPushDolphinParamsValue : DolphinTaskDefinition = transformPushDolphinParams(dolphinDefinitionTask as DolphinTaskDefinition, formValues, false) as DolphinTaskDefinition; + onSubmit(transformPushDolphinParamsValue); + console.log('transformPushDolphinParamsValue',transformPushDolphinParamsValue) + }; + const renderFooter = () => { return [ , - ]; }; - useEffect(() => { - queryDataByParams('/api/scheduler/upstream/tasks', { - dinkyTaskId: value.id - }).then((res) => setDolphinTaskList(res as DolphinTaskMinInfo[])); - queryDataByParams('/api/scheduler/task', { - dinkyTaskId: value.id - }).then((res) => { - console.log(res) - }); - - }, [modalVisible]); - const buildUpstreamTaskOptions = ( data: DolphinTaskMinInfo[] | undefined ): DefaultOptionType[] => { if (data && data.length > 0) { return data.map((item) => { const label = ( - - {item.taskName} {item.taskType} - - {item.processDefinitionName} [{item.taskVersion}] + <> + + {l('datastudio.header.pushdolphin.taskName','',{name: item.taskName})} + + + {l('datastudio.header.pushdolphin.taskNameExt','',{ + type: item.taskType, + processDefinitionName: item.processDefinitionName + })} - + ); return { label: label, - value: item.id, - key: item.id + value: item.taskCode.toString(), + key: item.taskCode }; }); } @@ -148,95 +134,127 @@ export const PushDolphin: React.FC = (props) => { }; const handleValueChange = (changedValues: any, allValues: any) => { - setFormValues({...formValues,...allValues}); - console.log(changedValues, allValues); + if (allValues) { + setFormValues({...formValues,...allValues}); + } }; + const pushDolphinForm = () => { + return ( + <> + + + + + + + + + + + + + + + + + {/*如果是失败告警,则需要设置告警策略*/} + {formValues.timeoutFlag && ( + <> + + + + + + )} + + + + ); + }; return ( {...NORMAL_MODAL_OPTIONS} - title={`将任务 [ ${value.name} ]推送至 DolphinScheduler`} + title={l('datastudio.header.pushdolphin.title','', {name : currentDinkyTaskValue?.name ?? ''})} open={modalVisible} form={form} initialValues={formValues} - modalProps={{ onCancel: handleCancel, ...NORMAL_MODAL_OPTIONS }} + modalProps={{ + onCancel: handleCancel, + destroyOnClose: true + }} submitter={{ render: () => [...renderFooter()] }} - syncToInitialValues onValuesChange={handleValueChange} loading={loading} > - - - - - - - - - - - - - - - - - - {/*如果是失败告警,则需要设置告警策略*/} - - - + {pushDolphinForm()} ); }; diff --git a/dinky-web/src/pages/DataStudio/HeaderContainer/index.tsx b/dinky-web/src/pages/DataStudio/HeaderContainer/index.tsx index 01d3cf3a86..7b81c2769d 100644 --- a/dinky-web/src/pages/DataStudio/HeaderContainer/index.tsx +++ b/dinky-web/src/pages/DataStudio/HeaderContainer/index.tsx @@ -18,31 +18,35 @@ */ import { LoadingBtn } from '@/components/CallBackButton/LoadingBtn'; +import { PushpinIcon } from '@/components/Icons/CustomIcons'; import { FlexCenterDiv } from '@/components/StyledComponents'; import { getCurrentData, getCurrentTab, mapDispatchToProps } from '@/pages/DataStudio/function'; import Explain from '@/pages/DataStudio/HeaderContainer/Explain'; import FlinkGraph from '@/pages/DataStudio/HeaderContainer/FlinkGraph'; import { - buildBreadcrumbItems, isCanPushDolphin, + buildBreadcrumbItems, + isCanPushDolphin, isOnline, isRunning, projectCommonShow } from '@/pages/DataStudio/HeaderContainer/function'; +import PushDolphin from '@/pages/DataStudio/HeaderContainer/PushDolphin'; import { cancelTask, changeTaskLife, debugTask, executeSql, - getJobPlan + getJobPlan, + isSql } from '@/pages/DataStudio/HeaderContainer/service'; -import { DataStudioTabsItemType, StateType, TabsPageType, VIEW } from '@/pages/DataStudio/model'; +import {DataStudioTabsItemType, StateType, TabsPageType, TaskDataType, VIEW} from '@/pages/DataStudio/model'; import { JOB_LIFE_CYCLE, JOB_STATUS } from '@/pages/DevOps/constants'; -import {SysConfigStateType} from '@/pages/SettingCenter/GlobalSetting/model'; +import { SysConfigStateType } from '@/pages/SettingCenter/GlobalSetting/model'; import { SettingConfigKeyEnum } from '@/pages/SettingCenter/GlobalSetting/SettingOverView/constants'; -import { handlePutDataJson } from '@/services/BusinessCrud'; +import {handleOption, handlePutDataJson, queryDataByParams} from '@/services/BusinessCrud'; import { DIALECT } from '@/services/constants'; import { l } from '@/utils/intl'; -import { SuccessMessageAsync } from '@/utils/messages'; +import {SuccessMessageAsync} from '@/utils/messages'; import { ApartmentOutlined, BugOutlined, @@ -54,14 +58,13 @@ import { PauseOutlined, RotateRightOutlined, SaveOutlined, - ScheduleOutlined, + ScheduleOutlined } from '@ant-design/icons'; +import { connect } from '@umijs/max'; import { Breadcrumb, Descriptions, Modal, Space } from 'antd'; import { ButtonProps } from 'antd/es/button/button'; -import React, {memo, useEffect, useState} from 'react'; -import {PushpinIcon} from "@/components/Icons/CustomIcons"; -import PushDolphin from "@/pages/DataStudio/HeaderContainer/PushDolphin"; -import {connect} from "@umijs/max"; +import React, { memo, useEffect, useState } from 'react'; +import {DolphinTaskDefinition, DolphinTaskMinInfo} from "@/types/Studio/data.d"; const headerStyle: React.CSSProperties = { display: 'inline-flex', @@ -91,21 +94,25 @@ const HeaderContainer = (props: connect) => { saveTabs, updateJobRunningMsg, queryDsConfig, - enabledDs, + enabledDs } = props; - const [modal, contextHolder] = Modal.useModal(); const [pushDolphinState, setPushDolphinState] = useState<{ modalVisible: boolean; - loading: boolean; - value: any; - + buttonLoading: boolean; + confirmLoading: boolean; + dolphinTaskList: DolphinTaskMinInfo[]; + dolphinDefinitionTask: Partial; + currentDinkyTaskValue: Partial; }>({ modalVisible: false, - loading: false, - value: {}, + buttonLoading: false, + confirmLoading: false, + dolphinTaskList: [], + dolphinDefinitionTask: {}, + currentDinkyTaskValue: {} }); useEffect(() => { @@ -113,17 +120,40 @@ const HeaderContainer = (props: connect) => { }, []); + + const currentData = getCurrentData(panes, activeKey); const currentTab = getCurrentTab(panes, activeKey) as DataStudioTabsItemType; + const handlePushDolphinOpen = async () => { + const dinkyTaskId = currentData?.id + const dolphinTaskList: DolphinTaskMinInfo[] | undefined = await queryDataByParams('/api/scheduler/queryUpstreamTasks', {dinkyTaskId}); + const dolphinTaskDefinition: DolphinTaskDefinition | undefined = await queryDataByParams('/api/scheduler/queryTaskDefinition', {dinkyTaskId}); + setPushDolphinState((prevState) => ({ + ...prevState, + buttonLoading: true, + confirmLoading: false, + modalVisible: true, + dolphinTaskList: dolphinTaskList ?? [], + dolphinDefinitionTask: dolphinTaskDefinition ?? {}, + currentDinkyTaskValue: currentData as TaskDataType, + })); + }; + + + + const handlePushDolphinCancel = async () => { - setPushDolphinState(prevState => ({ + setPushDolphinState((prevState) => ({ ...prevState, modalVisible: false, - loading: false, - value: {} - })) + buttonLoading: false, + dolphinTaskList: [], + confirmLoading: false, + dolphinDefinitionTask: {}, + currentDinkyTaskValue: {} + })); }; const handleSave = async () => { @@ -192,6 +222,11 @@ const HeaderContainer = (props: connect) => { }); await SuccessMessageAsync(l('pages.datastudio.editor.exec.success')); currentData.status = JOB_STATUS.RUNNING; + // Common sql task is synchronized, so it needs to automatically update the status to finished. + if (isSql(currentData.dialect)) { + currentData.status = JOB_STATUS.FINISHED; + } + if (currentTab) currentTab.console.result = res.data.result; saveTabs({ ...props.tabs }); }; @@ -280,17 +315,11 @@ const HeaderContainer = (props: connect) => { }, { // 推送海豚, 此处需要将系统设置中的 ds 的配置拿出来做判断 启用才展示 - icon: , + icon: , title: l('button.push'), hotKey: (e: KeyboardEvent) => e.ctrlKey && e.key === 's', isShow: enabledDs && isCanPushDolphin(currentData), - click: () => { - setPushDolphinState(prevState => ({ - ...prevState, - modalVisible: true, - value: currentData - })) - }, + click: () => handlePushDolphinOpen() }, { // 发布按钮 @@ -433,6 +462,17 @@ const HeaderContainer = (props: connect) => { ); }; + + const handlePushDolphinSubmit = async (value : DolphinTaskDefinition) => { + setPushDolphinState((prevState) => ({ ...prevState, loading: true })); + await handleOption( + '/api/scheduler/createOrUpdateTaskDefinition', + `推送任务[${currentData?.name}]至 DolphinScheduler`, + value, + ); + await handlePushDolphinCancel(); + }; + /** * render */ @@ -441,7 +481,17 @@ const HeaderContainer = (props: connect) => { {renderBreadcrumbItems()} {renderRightButtons()} - {pushDolphinState.modalVisible && handlePushDolphinCancel()} value={pushDolphinState.value} modalVisible={pushDolphinState.modalVisible} loading={pushDolphinState.loading} />} + {pushDolphinState.modalVisible && ( + handlePushDolphinCancel()} + currentDinkyTaskValue={pushDolphinState.currentDinkyTaskValue} + modalVisible={pushDolphinState.modalVisible} + loading={pushDolphinState.confirmLoading} + dolphinDefinitionTask={pushDolphinState.dolphinDefinitionTask} + dolphinTaskList={pushDolphinState.dolphinTaskList} + onSubmit={(values) => handlePushDolphinSubmit(values)} + /> + )} ); diff --git a/dinky-web/src/types/Studio/data.d.ts b/dinky-web/src/types/Studio/data.d.ts index c8e1d88d0f..befd454d1e 100644 --- a/dinky-web/src/types/Studio/data.d.ts +++ b/dinky-web/src/types/Studio/data.d.ts @@ -248,7 +248,6 @@ export interface DolphinTaskMinInfo { upstreamTaskName: string; } - export interface TaskParamProperty { prop: string; direct: string; @@ -265,19 +264,19 @@ export interface DolphinTaskDefinition { projectCode: number; userId: number; taskType: string; - taskParams: string; + taskParams: Map; taskParamList: TaskParamProperty[]; taskParamMap: Map; - flag: number; // 0 no 1 yes - taskPriority: number; // 0 highest 1 high 2 medium 3 low 4 lowest + flag: string; // 0 no 1 yes + taskPriority: string; // 0 highest 1 high 2 medium 3 low 4 lowest userName: string; projectName: string; workerGroup: string; environmentCode: number; failRetryTimes: number; failRetryInterval: number; - timeoutFlag: number; // 0 close 1 open - timeoutNotifyStrategy: number; // 0 warning 1 failure 2 warning and failure + timeoutFlag: string; // 0 close 1 open + timeoutNotifyStrategy: string; // 0 warning 1 failure 2 warning and failure timeout: number; delayTime: number; resourceIds: string; @@ -288,9 +287,23 @@ export interface DolphinTaskDefinition { taskGroupPriority: number; cpuQuota: number; memoryMax: number; - taskExecuteType: number;// 0 batch 1 stream + taskExecuteType: number; // 0 batch 1 stream processDefinitionCode: number; processDefinitionVersion: number; processDefinitionName: string; upstreamTaskMap: Map; } + +export interface PushDolphinParams { + taskId: number | string; + upstreamCodes: string[]; + taskPriority: string; + failRetryTimes: number; + failRetryInterval: number; + delayTime: number; + timeout: number; + timeoutFlag: boolean | string; + flag: boolean | string; + timeoutNotifyStrategy: string[] | string; + description: string; +}