From b4b18f73d63bcf793b4c060f38c26156483b6b34 Mon Sep 17 00:00:00 2001 From: gaoyan Date: Wed, 11 Oct 2023 09:12:57 +0800 Subject: [PATCH 1/2] Refactor task submit (#2368) * fix dag npe * refactor task submit * merge dev * merge dev and formate code * fix some problem * merge dev * remove useSession * formate code * formate code * fix task vesion time * change task version create * formate code --- .../org/dinky/controller/APIController.java | 284 +---- .../dinky/controller/StudioController.java | 122 +- .../org/dinky/controller/TaskController.java | 289 ++--- .../org/dinky/data/dto/APIExecuteSqlDTO.java | 39 +- .../org/dinky/data/dto/APIExplainSqlDTO.java | 10 +- .../dinky/data/dto/AbstractStatementDTO.java | 34 +- .../java/org/dinky/data/dto/StudioDDLDTO.java | 5 +- .../dinky/data/dto/StudioMetaStoreDTO.java | 32 +- .../{StudioExecuteDTO.java => TaskDTO.java} | 236 ++-- .../NotSupportExplainExcepition.java | 26 + .../data/exception/SqlExplainExcepition.java | 28 + .../data/exception/TaskNotDoneException.java | 28 + .../main/java/org/dinky/data/model/Task.java | 144 +-- .../org/dinky/data/model/TaskVersion.java | 4 +- .../dinky/job/handler/JobAlertHandler.java | 4 +- .../java/org/dinky/service/APIService.java | 28 +- .../org/dinky/service/DataBaseService.java | 8 + .../org/dinky/service/SavepointsService.java | 3 + .../java/org/dinky/service/StudioService.java | 19 - .../java/org/dinky/service/TaskService.java | 56 +- .../org/dinky/service/TaskVersionService.java | 3 + .../java/org/dinky/service/UserService.java | 2 + .../dinky/service/impl/APIServiceImpl.java | 96 +- .../service/impl/DataBaseServiceImpl.java | 60 + .../service/impl/SavepointsServiceImpl.java | 21 + .../dinky/service/impl/StudioServiceImpl.java | 333 +----- .../dinky/service/impl/TaskServiceImpl.java | 1049 ++++------------- .../service/impl/TaskVersionServiceImpl.java | 46 + .../dinky/service/impl/UserServiceImpl.java | 17 + .../utils/TaskOneClickOperatingUtil.java | 194 --- .../main/java/org/dinky/config/Dialect.java | 2 +- .../org/dinky/data/enums/JobLifeCycle.java | 8 +- .../java/org/dinky/data/enums/Status.java | 5 + .../resources/i18n/messages_en_US.properties | 8 +- .../resources/i18n/messages_zh_CN.properties | 7 + .../org/dinky/data/result/ResultPool.java | 12 + .../java/org/dinky/explainer/Explainer.java | 17 +- .../main/java/org/dinky/job/JobConfig.java | 275 +---- .../main/java/org/dinky/job/JobManager.java | 2 +- .../java/org/dinky/core/JobManagerTest.java | 40 +- .../process/exception/ExcuteException.java | 48 + .../dinky/process/model/ProcessEntity.java | 9 + .../components/CallBackButton/LoadingBtn.tsx | 26 + dinky-web/src/locales/en-US/global.ts | 2 + dinky-web/src/locales/en-US/pages.ts | 2 +- dinky-web/src/locales/zh-CN/global.ts | 4 +- dinky-web/src/locales/zh-CN/pages.ts | 6 +- .../HeaderContainer/Explain/index.tsx | 2 +- .../DataStudio/HeaderContainer/function.tsx | 19 +- .../DataStudio/HeaderContainer/index.tsx | 380 +++--- .../DataStudio/HeaderContainer/service.tsx | 37 +- dinky-web/src/pages/DevOps/constants.tsx | 8 +- 52 files changed, 1288 insertions(+), 2851 deletions(-) rename dinky-admin/src/main/java/org/dinky/data/dto/{StudioExecuteDTO.java => TaskDTO.java} (53%) create mode 100644 dinky-admin/src/main/java/org/dinky/data/exception/NotSupportExplainExcepition.java create mode 100644 dinky-admin/src/main/java/org/dinky/data/exception/SqlExplainExcepition.java create mode 100644 dinky-admin/src/main/java/org/dinky/data/exception/TaskNotDoneException.java delete mode 100644 dinky-admin/src/main/java/org/dinky/utils/TaskOneClickOperatingUtil.java create mode 100644 dinky-process/src/main/java/org/dinky/process/exception/ExcuteException.java create mode 100644 dinky-web/src/components/CallBackButton/LoadingBtn.tsx diff --git a/dinky-admin/src/main/java/org/dinky/controller/APIController.java b/dinky-admin/src/main/java/org/dinky/controller/APIController.java index b5dc6eeedbc..5f33d090af8 100644 --- a/dinky-admin/src/main/java/org/dinky/controller/APIController.java +++ b/dinky-admin/src/main/java/org/dinky/controller/APIController.java @@ -19,25 +19,22 @@ package org.dinky.controller; -import org.dinky.data.dto.APICancelDTO; -import org.dinky.data.dto.APIExecuteJarDTO; -import org.dinky.data.dto.APIExecuteSqlDTO; -import org.dinky.data.dto.APIExplainSqlDTO; -import org.dinky.data.dto.APISavePointDTO; -import org.dinky.data.dto.APISavePointTaskDTO; +import org.dinky.data.annotation.Log; +import org.dinky.data.dto.TaskDTO; +import org.dinky.data.enums.BusinessType; import org.dinky.data.enums.Status; +import org.dinky.data.exception.NotSupportExplainExcepition; import org.dinky.data.model.JobInstance; -import org.dinky.data.result.APIJobResult; -import org.dinky.data.result.ExplainResult; import org.dinky.data.result.Result; -import org.dinky.data.result.SelectResult; +import org.dinky.data.result.SqlExplainResult; import org.dinky.gateway.result.SavePointResult; import org.dinky.job.JobResult; -import org.dinky.service.APIService; +import org.dinky.process.exception.ExcuteException; import org.dinky.service.JobInstanceService; -import org.dinky.service.StudioService; import org.dinky.service.TaskService; +import java.util.List; + import org.springframework.web.bind.annotation.GetMapping; import org.springframework.web.bind.annotation.PostMapping; import org.springframework.web.bind.annotation.RequestBody; @@ -49,15 +46,12 @@ import io.swagger.annotations.Api; import io.swagger.annotations.ApiImplicitParam; -import io.swagger.annotations.ApiImplicitParams; import io.swagger.annotations.ApiOperation; import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; /** * APIController - * - * @since 2021/12/11 21:44 */ @SuppressWarnings("AlibabaClassNamingShouldBeCamel") @Slf4j @@ -67,237 +61,71 @@ @RequiredArgsConstructor public class APIController { - private final APIService apiService; - private final StudioService studioService; private final TaskService taskService; private final JobInstanceService jobInstanceService; - @GetMapping("/submitTask") + @PostMapping("/submitTask") @ApiOperation("Submit Task") // @Log(title = "Submit Task", businessType = BusinessType.SUBMIT) - @ApiImplicitParam(name = "id", value = "Task Id", required = true, dataType = "Integer") - public Result submitTask(@RequestParam Integer id) { - taskService.initTenantByTaskId(id); - return Result.succeed(taskService.submitTask(id), Status.EXECUTE_SUCCESS); - } - - @PostMapping("/executeSql") - @ApiOperation("Execute Sql") - // @Log(title = "Execute Sql", businessType = BusinessType.EXECUTE) - @ApiImplicitParam( - name = "apiExecuteSqlDTO", - value = "API Execute Sql DTO", - required = true, - dataType = "APIExecuteSqlDTO", - dataTypeClass = APIExecuteSqlDTO.class) - public Result executeSql(@RequestBody APIExecuteSqlDTO apiExecuteSqlDTO) { - return Result.succeed(apiService.executeSql(apiExecuteSqlDTO), Status.EXECUTE_SUCCESS); - } - - @PostMapping("/explainSql") - @ApiOperation("Explain Sql") - // @Log(title = "Explain Sql", businessType = BusinessType.EXECUTE) - @ApiImplicitParam( - name = "apiExecuteSqlDTO", - value = "API Execute Sql DTO", - required = true, - dataType = "APIExecuteSqlDTO", - dataTypeClass = APIExecuteSqlDTO.class) - public Result explainSql(@RequestBody APIExplainSqlDTO apiExecuteSqlDTO) { - return Result.succeed(apiService.explainSql(apiExecuteSqlDTO), Status.EXECUTE_SUCCESS); + public Result submitTask(@RequestBody TaskDTO taskDTO) throws ExcuteException { + JobResult jobResult = taskService.submitTask(taskDTO.getId(), null); + if (jobResult.isSuccess()) { + return Result.succeed(jobResult, Status.EXECUTE_SUCCESS); + } else { + return Result.failed(jobResult, jobResult.getError()); + } } - @PostMapping("/getJobPlan") - @ApiOperation("Get Job Plan") - @ApiImplicitParam( - name = "apiExecuteSqlDTO", - value = "API Execute Sql DTO", - required = true, - dataType = "APIExecuteSqlDTO", - dataTypeClass = APIExecuteSqlDTO.class) - public Result getJobPlan(@RequestBody APIExplainSqlDTO apiExecuteSqlDTO) { - return Result.succeed(apiService.getJobPlan(apiExecuteSqlDTO), Status.EXECUTE_SUCCESS); - } - - @PostMapping("/getStreamGraph") - @ApiOperation("Get Stream Graph") - @ApiImplicitParam( - name = "apiExecuteSqlDTO", - value = "API Execute Sql DTO", - required = true, - dataType = "APIExecuteSqlDTO", - dataTypeClass = APIExecuteSqlDTO.class) - public Result getStreamGraph(@RequestBody APIExplainSqlDTO apiExecuteSqlDTO) { - return Result.succeed(apiService.getStreamGraph(apiExecuteSqlDTO), Status.EXECUTE_SUCCESS); - } - - @GetMapping("/getJobData") - @ApiOperation("Get Job Data") - @ApiImplicitParam( - name = "jobId", - value = "Job Id", - required = true, - dataType = "String", - dataTypeClass = String.class) - public Result getJobData(@RequestParam String jobId) { - return Result.succeed(studioService.getJobData(jobId)); - } - - @PostMapping("/cancel") + @GetMapping("/cancel") // @Log(title = "Cancel Flink Job", businessType = BusinessType.TRIGGER) @ApiOperation("Cancel Flink Job") - @ApiImplicitParam( - name = "apiCancelDTO", - value = "API Cancel DTO", - required = true, - dataType = "APICancelDTO", - dataTypeClass = APICancelDTO.class) - public Result cancel(@RequestBody APICancelDTO apiCancelDTO) { - return Result.succeed(apiService.cancel(apiCancelDTO), Status.EXECUTE_SUCCESS); + public Result cancel(@RequestParam Integer id) { + return Result.succeed(taskService.cancelTaskJob(taskService.getTaskInfoById(id)), Status.EXECUTE_SUCCESS); + } + + /** + * 重启任务 + */ + @GetMapping(value = "/restartTask") + @ApiOperation("Restart Task") + // @Log(title = "Restart Task", businessType = BusinessType.REMOTE_OPERATION) + public Result restartTask(@RequestParam Integer id, @RequestParam String savePointPath) + throws ExcuteException { + return Result.succeed(taskService.restartTask(id, savePointPath)); } @PostMapping("/savepoint") // @Log(title = "Savepoint Trigger", businessType = BusinessType.TRIGGER) @ApiOperation("Savepoint Trigger") - @ApiImplicitParam( - name = "apiSavePointDTO", - value = "API SavePoint DTO", - required = true, - dataType = "APISavePointDTO", - dataTypeClass = APISavePointDTO.class) - public Result savepoint(@RequestBody APISavePointDTO apiSavePointDTO) { - return Result.succeed(apiService.savepoint(apiSavePointDTO), Status.EXECUTE_SUCCESS); - } - - @PostMapping("/executeJar") - @ApiOperation("Execute Jar") - // @Log(title = "Execute Jar", businessType = BusinessType.EXECUTE) - @ApiImplicitParam( - name = "apiExecuteJarDTO", - value = "API Execute Jar DTO", - required = true, - dataType = "APIExecuteJarDTO", - dataTypeClass = APIExecuteJarDTO.class) - public Result executeJar(@RequestBody APIExecuteJarDTO apiExecuteJarDTO) { - return Result.succeed(apiService.executeJar(apiExecuteJarDTO), Status.EXECUTE_SUCCESS); - } - - @PostMapping("/savepointTask") - @ApiOperation("Savepoint Task") - // @Log(title = "Savepoint Task", businessType = BusinessType.TRIGGER) - @ApiImplicitParam( - name = "apiSavePointTaskDTO", - value = "API SavePoint Task DTO", - required = true, - dataType = "APISavePointTaskDTO", - dataTypeClass = APISavePointTaskDTO.class) - public Result savepointTask(@RequestBody APISavePointTaskDTO apiSavePointTaskDTO) { + public Result savepoint(@RequestParam Integer taskId, @RequestParam String savePointType) { return Result.succeed( - taskService.savepointTask(apiSavePointTaskDTO.getTaskId(), apiSavePointTaskDTO.getType()), "执行成功"); + taskService.savepointTaskJob(taskService.getTaskInfoById(taskId), savePointType), + Status.EXECUTE_SUCCESS); } - /** 重启任务 */ - @GetMapping("/restartTask") - @ApiOperation("Restart Task") - // @Log(title = "Restart Task", businessType = BusinessType.EXECUTE) - @ApiImplicitParam( - name = "id", - value = "Task Id", - required = true, - dataType = "Integer", - dataTypeClass = Integer.class) - public Result restartTask(@RequestParam Integer id) { - taskService.initTenantByTaskId(id); - return Result.succeed(taskService.restartTask(id, null), Status.RESTART_SUCCESS); - } - - /** 选择保存点重启任务 */ - @GetMapping("/selectSavePointRestartTask") - @ApiOperation("Select SavePoint Restart Task") - // @Log(title = "Select SavePoint Restart Task", businessType = BusinessType.EXECUTE) - @ApiImplicitParams({ - @ApiImplicitParam( - name = "id", - value = "Task Id", - required = true, - dataType = "Integer", - dataTypeClass = Integer.class), - @ApiImplicitParam( - name = "savePointPath", - value = "SavePoint Path", - required = true, - dataType = "String", - dataTypeClass = String.class) - }) - public Result restartTask(@RequestParam Integer id, @RequestParam String savePointPath) { - taskService.initTenantByTaskId(id); - return Result.succeed(taskService.restartTask(id, savePointPath), Status.RESTART_SUCCESS); - } - - /** 上线任务 */ - @GetMapping("/onLineTask") - @ApiOperation("Online Task") - // @Log(title = "Online Task", businessType = BusinessType.EXECUTE) - @ApiImplicitParam( - name = "id", - value = "Task Id", - required = true, - dataType = "Integer", - dataTypeClass = Integer.class) - public Result onLineTask(@RequestParam Integer id) { - taskService.initTenantByTaskId(id); - return taskService.onLineTask(id); - } - - /** 下线任务 */ - @GetMapping("/offLineTask") - @ApiOperation("Offline Task") - // @Log(title = "Offline Task", businessType = BusinessType.EXECUTE) - @ApiImplicitParam( - name = "id", - value = "Task Id", - required = true, - dataType = "Integer", - dataTypeClass = Integer.class) - public Result offLineTask(@RequestParam Integer id) { - taskService.initTenantByTaskId(id); - return taskService.offLineTask(id, null); + @PostMapping("/explainSql") + @ApiOperation("Explain Sql") + public Result> explainSql(@RequestBody TaskDTO taskDTO) throws NotSupportExplainExcepition { + return Result.succeed(taskService.explainTask(taskDTO), Status.EXECUTE_SUCCESS); } - /** 重新上线任务 */ - @GetMapping("/reOnLineTask") - @ApiOperation("ReOnline Task") - // @Log(title = "ReOnline Task", businessType = BusinessType.EXECUTE) - @ApiImplicitParam( - name = "id", - value = "Task Id", - required = true, - dataType = "Integer", - dataTypeClass = Integer.class) - public Result reOnLineTask(@RequestParam Integer id) { - taskService.initTenantByTaskId(id); - return taskService.reOnLineTask(id, null); + @PostMapping("/getJobPlan") + @ApiOperation("Get Job Plan") + public Result getJobPlan(@RequestBody TaskDTO taskDTO) { + return Result.succeed(taskService.getJobPlan(taskDTO), Status.EXECUTE_SUCCESS); } - /** 选择保存点重新上线任务 */ - @GetMapping("/selectSavePointReOnLineTask") - @ApiOperation("Select SavePoint ReOnline Task") - // @Log(title = "Select SavePoint ReOnline Task", businessType = BusinessType.EXECUTE) - @ApiImplicitParam( - name = "id", - value = "Task Id", - required = true, - dataType = "Integer", - dataTypeClass = Integer.class) - public Result selectSavePointReOnLineTask(@RequestParam Integer id, @RequestParam String savePointPath) { - taskService.initTenantByTaskId(id); - return taskService.reOnLineTask(id, savePointPath); + @PostMapping("/getStreamGraph") + @ApiOperation("Get Stream Graph") + public Result getStreamGraph(@RequestBody TaskDTO taskDTO) { + return Result.succeed(taskService.getStreamGraph(taskDTO), Status.EXECUTE_SUCCESS); } - /** 获取Job实例的信息 */ + /** + * 获取Job实例的信息 + */ @GetMapping("/getJobInstance") @ApiOperation("Get Job Instance") - // @Log(title = "Get Job Instance", businessType = BusinessType.QUERY) @ApiImplicitParam( name = "id", value = "Job Instance Id", @@ -309,10 +137,8 @@ public Result getJobInstance(@RequestParam Integer id) { return Result.succeed(jobInstanceService.getById(id)); } - /** 通过 taskId 获取 Task 对应的 Job 实例的信息 */ @GetMapping("/getJobInstanceByTaskId") @ApiOperation("Get Job Instance By Task Id") - // @Log(title = "Get Job Instance By Task Id", businessType = BusinessType.QUERY) @ApiImplicitParam( name = "id", value = "Task Id", @@ -323,4 +149,18 @@ public Result getJobInstanceByTaskId(@RequestParam Integer id) { taskService.initTenantByTaskId(id); return Result.succeed(jobInstanceService.getJobInstanceByTaskId(id)); } + + @GetMapping(value = "/exportSql") + @ApiOperation("Export Sql") + @Log(title = "Export Sql", businessType = BusinessType.EXPORT) + @ApiImplicitParam( + name = "id", + value = "Task Id", + required = true, + dataType = "Integer", + paramType = "query", + dataTypeClass = Integer.class) + public Result exportSql(@RequestParam Integer id) { + return Result.succeed(taskService.exportSql(id)); + } } diff --git a/dinky-admin/src/main/java/org/dinky/controller/StudioController.java b/dinky-admin/src/main/java/org/dinky/controller/StudioController.java index d8298b1d859..cd755893432 100644 --- a/dinky-admin/src/main/java/org/dinky/controller/StudioController.java +++ b/dinky-admin/src/main/java/org/dinky/controller/StudioController.java @@ -23,7 +23,6 @@ import org.dinky.data.annotation.Log; import org.dinky.data.dto.StudioCADTO; import org.dinky.data.dto.StudioDDLDTO; -import org.dinky.data.dto.StudioExecuteDTO; import org.dinky.data.dto.StudioMetaStoreDTO; import org.dinky.data.enums.BusinessType; import org.dinky.data.enums.Status; @@ -33,9 +32,7 @@ import org.dinky.data.result.IResult; import org.dinky.data.result.Result; import org.dinky.data.result.SelectResult; -import org.dinky.data.result.SqlExplainResult; import org.dinky.explainer.lineage.LineageResult; -import org.dinky.job.JobResult; import org.dinky.metadata.result.JdbcSelectResult; import org.dinky.service.StudioService; @@ -49,7 +46,6 @@ import org.springframework.web.bind.annotation.RestController; import com.fasterxml.jackson.databind.JsonNode; -import com.fasterxml.jackson.databind.node.ObjectNode; import io.swagger.annotations.Api; import io.swagger.annotations.ApiImplicitParam; @@ -72,75 +68,6 @@ public class StudioController { private final StudioService studioService; - /** 执行Sql */ - @PostMapping("/executeSql") - @ApiOperation("Execute Sql") - @Log(title = "Execute Sql", businessType = BusinessType.EXECUTE) - @ApiImplicitParam( - name = "studioExecuteDTO", - value = "Execute Sql", - required = true, - dataType = "StudioExecuteDTO", - paramType = "body") - public Result executeSql(@RequestBody StudioExecuteDTO studioExecuteDTO) { - try { - JobResult jobResult = studioService.executeSql(studioExecuteDTO); - return Result.succeed(jobResult, Status.EXECUTE_SUCCESS); - } catch (Exception ex) { - JobResult jobResult = new JobResult(); - jobResult.setJobConfig(studioExecuteDTO.getJobConfig()); - jobResult.setSuccess(false); - jobResult.setStatement(studioExecuteDTO.getStatement()); - jobResult.setError(ex.toString()); - return Result.failed(jobResult, ex.toString()); - } - } - - /** 解释Sql */ - @PostMapping("/explainSql") - @ApiOperation("Explain Sql") - @ApiImplicitParam( - name = "studioExecuteDTO", - value = "Explain Sql", - required = true, - dataType = "StudioExecuteDTO", - paramType = "body") - public Result> explainSql(@RequestBody StudioExecuteDTO studioExecuteDTO) { - return Result.succeed(studioService.explainSql(studioExecuteDTO), "解释成功"); - } - - /** 获取执行图 */ - @PostMapping("/getStreamGraph") - @ApiOperation("Get Stream Graph") - @ApiImplicitParam( - name = "studioExecuteDTO", - value = "Get Stream Graph", - required = true, - dataType = "StudioExecuteDTO", - paramType = "body") - public Result getStreamGraph(@RequestBody StudioExecuteDTO studioExecuteDTO) { - return Result.succeed(studioService.getStreamGraph(studioExecuteDTO)); - } - - /** 获取sql的jobplan */ - @PostMapping("/getJobPlan") - @ApiOperation("Get Job Execute Plan") - @ApiImplicitParam( - name = "studioExecuteDTO", - value = "Get Job Execute Plan", - required = true, - dataType = "StudioExecuteDTO", - paramType = "body") - public Result getJobPlan(@RequestBody StudioExecuteDTO studioExecuteDTO) { - try { - return Result.succeed(studioService.getJobPlan(studioExecuteDTO)); - } catch (Exception e) { - e.printStackTrace(); - return Result.failed(e.getMessage()); - } - } - - /** 进行DDL操作 */ @PostMapping("/executeDDL") @ApiOperation("Execute SQL DDL") @Log(title = "Execute SQL DDL", businessType = BusinessType.EXECUTE) @@ -162,6 +89,7 @@ public Result executeDDL(@RequestBody StudioDDLDTO studioDDLDTO) { public Result getJobData(@RequestParam String jobId) { return Result.succeed(studioService.getJobData(jobId)); } + /** 根据jobId获取数据 */ @GetMapping("/getCommonSqlData") @ApiOperation("Get Common Sql Data") @@ -203,54 +131,6 @@ public Result listFlinkJobs(@RequestParam Integer clusterId) { return Result.succeed(jobs.toArray(new JsonNode[0])); } - /** 停止任务 */ - @GetMapping("/cancel") - @ApiOperation("Cancel Flink Job") - @Log(title = "Cancel Flink Job", businessType = BusinessType.REMOTE_OPERATION) - @ApiImplicitParams({ - @ApiImplicitParam( - name = "clusterId", - value = "clusterId", - required = true, - dataType = "Integer", - paramType = "query"), - @ApiImplicitParam(name = "jobId", value = "jobId", required = true, dataType = "String", paramType = "query") - }) - public Result cancelFlinkJob(@RequestParam Integer clusterId, @RequestParam String jobId) { - return Result.succeed(studioService.cancelFlinkJob(clusterId, jobId), Status.STOP_SUCCESS); - } - - /** savepoint */ - @GetMapping("/savepoint") - @ApiOperation("Savepoint Trigger") - @Log(title = "Savepoint Trigger", businessType = BusinessType.REMOTE_OPERATION) - @ApiImplicitParams({ - @ApiImplicitParam( - name = "clusterId", - value = "clusterId", - required = true, - dataType = "Integer", - paramType = "query"), - @ApiImplicitParam(name = "jobId", value = "jobId", required = true, dataType = "String", paramType = "query"), - @ApiImplicitParam( - name = "savePointType", - value = "savePointType", - required = true, - dataType = "String", - paramType = "query"), - @ApiImplicitParam(name = "name", value = "name", required = true, dataType = "String", paramType = "query"), - @ApiImplicitParam(name = "taskId", value = "taskId", required = true, dataType = "Integer", paramType = "query") - }) - public Result savepointTrigger( - @RequestParam Integer clusterId, - @RequestParam String jobId, - @RequestParam String savePointType, - @RequestParam String name, - @RequestParam Integer taskId) { - return Result.succeed( - studioService.savepointTrigger(taskId, clusterId, jobId, savePointType, name), "savepoint 成功"); - } - /** 获取 Meta Store Catalog 和 Database */ @PostMapping("/getMSCatalogs") @ApiOperation("Get Catalog List") diff --git a/dinky-admin/src/main/java/org/dinky/controller/TaskController.java b/dinky-admin/src/main/java/org/dinky/controller/TaskController.java index d67846df582..096909e9d27 100644 --- a/dinky-admin/src/main/java/org/dinky/controller/TaskController.java +++ b/dinky-admin/src/main/java/org/dinky/controller/TaskController.java @@ -20,21 +20,21 @@ package org.dinky.controller; import org.dinky.data.annotation.Log; +import org.dinky.data.dto.TaskDTO; import org.dinky.data.dto.TaskRollbackVersionDTO; import org.dinky.data.enums.BusinessType; import org.dinky.data.enums.JobLifeCycle; -import org.dinky.data.enums.JobStatus; import org.dinky.data.enums.Status; -import org.dinky.data.enums.TaskOperatingSavepointSelect; +import org.dinky.data.exception.NotSupportExplainExcepition; import org.dinky.data.model.Task; import org.dinky.data.result.ProTableResult; import org.dinky.data.result.Result; +import org.dinky.data.result.SqlExplainResult; +import org.dinky.gateway.result.SavePointResult; import org.dinky.job.JobResult; +import org.dinky.process.exception.ExcuteException; import org.dinky.service.TaskService; -import org.dinky.utils.TaskOneClickOperatingUtil; -import java.util.ArrayList; -import java.util.Collections; import java.util.List; import org.springframework.web.bind.annotation.GetMapping; @@ -47,8 +47,8 @@ import org.springframework.web.multipart.MultipartFile; import com.fasterxml.jackson.databind.JsonNode; +import com.fasterxml.jackson.databind.node.ObjectNode; -import cn.hutool.core.lang.Dict; import cn.hutool.core.lang.tree.Tree; import io.swagger.annotations.Api; import io.swagger.annotations.ApiImplicitParam; @@ -56,11 +56,6 @@ import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; -/** - * 任务 Controller - * - * @since 2021-05-24 - */ @Slf4j @RestController @Api(tags = "Task Controller") @@ -70,7 +65,69 @@ public class TaskController { private final TaskService taskService; - /** 新增或者更新 */ + @GetMapping("/submitTask") + @ApiOperation("Submit Task") + @Log(title = "Submit Task", businessType = BusinessType.SUBMIT) + public Result submitTask(@RequestParam Integer id) throws ExcuteException { + JobResult jobResult = taskService.submitTask(id, null); + if (jobResult.isSuccess()) { + return Result.succeed(jobResult, Status.EXECUTE_SUCCESS); + } else { + return Result.failed(jobResult, jobResult.getError()); + } + } + + @GetMapping("/cancel") + @Log(title = "Cancel Flink Job", businessType = BusinessType.TRIGGER) + @ApiOperation("Cancel Flink Job") + public Result cancel(@RequestParam Integer id) { + return Result.succeed(taskService.cancelTaskJob(taskService.getTaskInfoById(id)), Status.EXECUTE_SUCCESS); + } + + /** 重启任务 */ + @GetMapping(value = "/restartTask") + @ApiOperation("Restart Task") + @Log(title = "Restart Task", businessType = BusinessType.REMOTE_OPERATION) + public Result restartTask(@RequestParam Integer id, @RequestParam String savePointPath) + throws ExcuteException { + return Result.succeed(taskService.restartTask(id, savePointPath)); + } + + @GetMapping("/savepoint") + @Log(title = "Savepoint Trigger", businessType = BusinessType.TRIGGER) + @ApiOperation("Savepoint Trigger") + public Result savepoint(@RequestParam Integer taskId, @RequestParam String savePointType) { + return Result.succeed( + taskService.savepointTaskJob(taskService.getTaskInfoById(taskId), savePointType), + Status.EXECUTE_SUCCESS); + } + + @GetMapping("/onLineTask") + @Log(title = "onLineTask", businessType = BusinessType.TRIGGER) + @ApiOperation("onLineTask") + public Result onLineTask(@RequestParam Integer taskId) { + return Result.succeed(taskService.changeTaskLifeRecyle(taskId, JobLifeCycle.ONLINE)); + } + + @GetMapping("/offLineTask") + @Log(title = "offLineTask", businessType = BusinessType.TRIGGER) + @ApiOperation("offLineTask") + public Result offLineTask(@RequestParam Integer taskId) { + return Result.succeed(taskService.changeTaskLifeRecyle(taskId, JobLifeCycle.DEVELOP)); + } + + @PostMapping("/explainSql") + @ApiOperation("Explain Sql") + public Result> explainSql(@RequestBody TaskDTO taskDTO) throws NotSupportExplainExcepition { + return Result.succeed(taskService.explainTask(taskDTO), Status.EXECUTE_SUCCESS); + } + + @PostMapping("/getJobPlan") + @ApiOperation("Get Job Plan") + public Result getJobPlan(@RequestBody TaskDTO taskDTO) { + return Result.succeed(taskService.getJobPlan(taskDTO), Status.EXECUTE_SUCCESS); + } + @PutMapping @ApiOperation("Insert Or Update Task") @Log(title = "Insert Or Update Task", businessType = BusinessType.INSERT_OR_UPDATE) @@ -89,7 +146,6 @@ public Result saveOrUpdateTask(@RequestBody Task task) { } } - /** 动态查询列表 */ @PostMapping @ApiOperation("Query Task List") @ApiImplicitParam( @@ -103,41 +159,6 @@ public ProTableResult listTasks(@RequestBody JsonNode para) { return taskService.selectForProTable(para); } - /** 批量执行 */ - @PostMapping(value = "/submit") - @ApiOperation("Batch Execute Task") - @Log(title = "Batch Execute Task", businessType = BusinessType.UPDATE) - @ApiImplicitParam( - name = "para", - value = "Task Id List", - required = true, - dataType = "JsonNode", - paramType = "body", - dataTypeClass = JsonNode.class) - public Result> submit(@RequestBody JsonNode para) { - // todo: 没有批量提交 此处需要重构 - if (para.size() > 0) { - List results = new ArrayList<>(); - List error = new ArrayList<>(); - for (final JsonNode item : para) { - Integer id = item.asInt(); - JobResult result = taskService.submitTask(id); - if (!result.isSuccess()) { - error.add(id); - } - results.add(result); - } - if (error.size() == 0) { - return Result.succeed(results, "执行成功"); - } else { - return Result.succeed(results, "执行部分成功,但" + error + "执行失败,共" + error.size() + "次失败。"); - } - } else { - return Result.failed("请选择要执行的记录"); - } - } - - /** 获取指定ID的信息 */ @GetMapping @ApiOperation("Get Task Info By Id") @ApiImplicitParam( @@ -147,41 +168,16 @@ public Result> submit(@RequestBody JsonNode para) { dataType = "Integer", paramType = "query", dataTypeClass = Integer.class) - public Result getOneById(@RequestParam Integer id) { - Task task = taskService.getTaskInfoById(id); - return Result.succeed(task); + public Result getOneById(@RequestParam Integer id) { + return Result.succeed(taskService.getTaskInfoById(id)); } - /** 获取所有可用的 FlinkSQLEnv */ @GetMapping(value = "/listFlinkSQLEnv") @ApiOperation("Get All FlinkSQLEnv") public Result> listFlinkSQLEnv() { return Result.succeed(taskService.listFlinkSQLEnv()); } - /** 导出 sql */ - @GetMapping(value = "/exportSql") - @ApiOperation("Export Sql") - @Log(title = "Export Sql", businessType = BusinessType.EXPORT) - @ApiImplicitParam( - name = "id", - value = "Task Id", - required = true, - dataType = "Integer", - paramType = "query", - dataTypeClass = Integer.class) - public Result exportSql(@RequestParam Integer id) { - return Result.succeed(taskService.exportSql(id)); - } - - /** 发布任务 */ - @GetMapping(value = "/releaseTask") - @ApiOperation("Release Task") - @Log(title = "Release Task", businessType = BusinessType.UPDATE) - public Result releaseTask(@RequestParam Integer id) { - return taskService.releaseTask(id); - } - @PostMapping("/rollbackTask") @ApiOperation("Rollback Task") @Log(title = "Rollback Task", businessType = BusinessType.UPDATE) @@ -189,79 +185,12 @@ public Result rollbackTask(@RequestBody TaskRollbackVersionDTO dto) { return taskService.rollbackTask(dto); } - /** 维护任务 */ - @GetMapping(value = "/developTask") - @ApiOperation("Develop Task") - @Log(title = "Develop Task", businessType = BusinessType.UPDATE) - public Result developTask(@RequestParam Integer id) { - return Result.succeed(taskService.developTask(id), Status.OPERATE_SUCCESS); - } - - /** 上线任务 */ - @GetMapping(value = "/onLineTask") - @ApiOperation("OnLine Task") - @Log(title = "OnLine Task", businessType = BusinessType.REMOTE_OPERATION) - public Result onLineTask(@RequestParam Integer id) { - return taskService.onLineTask(id); - } - - /** 下线任务 */ - @GetMapping(value = "/offLineTask") - @ApiOperation("OffLine Task") - @Log(title = "OffLine Task", businessType = BusinessType.REMOTE_OPERATION) - public Result offLineTask(@RequestParam Integer id, @RequestParam String type) { - return taskService.offLineTask(id, type); - } - - /** 注销任务 */ - @GetMapping(value = "/cancelTask") - @ApiOperation("Cancel Task") - @Log(title = "Cancel Task", businessType = BusinessType.REMOTE_OPERATION) - public Result cancelTask(@RequestParam Integer id) { - return taskService.cancelTask(id); - } - - /** 恢复任务 */ - @GetMapping(value = "/recoveryTask") - @ApiOperation("Recovery Task") - @Log(title = "Recovery Task", businessType = BusinessType.REMOTE_OPERATION) - public Result recoveryTask(@RequestParam Integer id) { - return Result.succeed(taskService.recoveryTask(id), Status.OPERATE_SUCCESS); - } - - /** 重启任务 */ - @GetMapping(value = "/restartTask") - @ApiOperation("Restart Task") - @Log(title = "Restart Task", businessType = BusinessType.REMOTE_OPERATION) - public Result restartTask(@RequestParam Integer id, @RequestParam Boolean isOnLine) { - if (isOnLine) { - return taskService.reOnLineTask(id, null); - } else { - return Result.succeed(taskService.restartTask(id, null), Status.RESTART_SUCCESS); - } - } - - /** 选择保存点重启任务 */ - @GetMapping(value = "/selectSavePointRestartTask") - @ApiOperation("Select Save Point Restart Task") - @Log(title = "Select Save Point Restart Task", businessType = BusinessType.REMOTE_OPERATION) - public Result selectSavePointRestartTask( - @RequestParam Integer id, @RequestParam Boolean isOnLine, @RequestParam String savePointPath) { - if (isOnLine) { - return taskService.reOnLineTask(id, savePointPath); - } else { - return Result.succeed(taskService.restartTask(id, savePointPath), Status.RESTART_SUCCESS); - } - } - - /** 获取当前的 API 的地址 */ @GetMapping(value = "/getTaskAPIAddress") @ApiOperation("Get Task API Address") public Result getTaskAPIAddress() { return Result.succeed(taskService.getTaskAPIAddress(), Status.RESTART_SUCCESS); } - /** 导出json */ @GetMapping(value = "/exportJsonByTaskId") @ApiOperation("Export Task To Sign Json") @Log(title = "Export Task To Sign Json", businessType = BusinessType.EXPORT) @@ -269,7 +198,6 @@ public Result exportJsonByTaskId(@RequestParam Integer id) { return Result.succeed(taskService.exportJsonByTaskId(id)); } - /** 导出json数组 */ @PostMapping(value = "/exportJsonByTaskIds") @ApiOperation("Export Task To Array Json") @Log(title = "Export Task To Array Json", businessType = BusinessType.EXPORT) @@ -277,7 +205,6 @@ public Result exportJsonByTaskIds(@RequestBody JsonNode para) { return Result.succeed(taskService.exportJsonByTaskIds(para)); } - /** json文件上传 导入task */ @PostMapping(value = "/uploadTaskJson") @ApiOperation("Upload Task Json") @Log(title = "Upload Task Json", businessType = BusinessType.UPLOAD) @@ -285,83 +212,9 @@ public Result uploadTaskJson(@RequestParam("file") MultipartFile file) thr return taskService.uploadTaskJson(file); } - /** - * 查询所有目录 - * - * @return {@link Result}<{@link Tree}<{@link Integer}>> - */ @GetMapping("/queryAllCatalogue") @ApiOperation("Query All Catalogue") public Result> queryAllCatalogue() { return taskService.queryAllCatalogue(); } - - /** - * 查询对应操作的任务列表 - * - * @param operating 操作 - * @param catalogueId 目录id - * @return {@link Result}<{@link List}<{@link Task}>> - */ - @GetMapping("/queryOnClickOperatingTask") - @ApiOperation("Query On Click Operating Task") - public Result> queryOnClickOperatingTask( - @RequestParam("operating") Integer operating, @RequestParam("catalogueId") Integer catalogueId) { - if (operating == null) { - return Result.failed(Status.OPERATE_FAILED); - } - switch (operating) { - case 1: - return taskService.queryOnLineTaskByDoneStatus( - Collections.singletonList(JobLifeCycle.RELEASE), - JobStatus.getAllDoneStatus(), - true, - catalogueId); - case 2: - return taskService.queryOnLineTaskByDoneStatus( - Collections.singletonList(JobLifeCycle.ONLINE), - Collections.singletonList(JobStatus.RUNNING), - false, - catalogueId); - default: - return Result.failed(Status.OPERATE_FAILED); - } - } - - /** - * 一键操作任务 - * - * @param operating 操作 - * @return {@link Result}<{@link Void}> - */ - @PostMapping("/onClickOperatingTask") - @ApiOperation("On Click Operating Task") - public Result onClickOperatingTask(@RequestBody JsonNode operating) { - if (operating == null || operating.get("operating") == null) { - return Result.failed(Status.OPERATE_FAILED); - } - switch (operating.get("operating").asInt()) { - case 1: - final JsonNode savepointSelect = operating.get("taskOperatingSavepointSelect"); - return TaskOneClickOperatingUtil.oneClickOnline( - TaskOneClickOperatingUtil.parseJsonNode(operating), - TaskOperatingSavepointSelect.valueByCode( - savepointSelect == null ? 0 : savepointSelect.asInt())); - case 2: - return TaskOneClickOperatingUtil.onClickOffline(TaskOneClickOperatingUtil.parseJsonNode(operating)); - default: - return Result.failed(Status.OPERATE_FAILED); - } - } - - /** - * 查询一键操作任务状态 - * - * @return {@link Result}<{@link Dict}> - */ - @GetMapping("/queryOneClickOperatingTaskStatus") - @ApiOperation("Query One Click Operating Task Status") - public Result queryOneClickOperatingTaskStatus() { - return TaskOneClickOperatingUtil.queryOneClickOperatingTaskStatus(); - } } diff --git a/dinky-admin/src/main/java/org/dinky/data/dto/APIExecuteSqlDTO.java b/dinky-admin/src/main/java/org/dinky/data/dto/APIExecuteSqlDTO.java index 78208cdd617..d3c5c3e95fe 100644 --- a/dinky-admin/src/main/java/org/dinky/data/dto/APIExecuteSqlDTO.java +++ b/dinky-admin/src/main/java/org/dinky/data/dto/APIExecuteSqlDTO.java @@ -21,6 +21,7 @@ import org.dinky.assertion.Asserts; import org.dinky.gateway.config.GatewayConfig; +import org.dinky.gateway.enums.SavePointStrategy; import org.dinky.job.JobConfig; import java.util.Map; @@ -119,25 +120,23 @@ public class APIExecuteSqlDTO extends AbstractStatementDTO { public JobConfig getJobConfig() { int savePointStrategy = Asserts.isNotNullString(savePointPath) ? 3 : 0; - - return new JobConfig( - type, - useResult, - useChangeLog, - useAutoCancel, - false, - null, - true, - address, - jobName, - isFragment(), - useStatementSet, - maxRowNum, - checkPoint, - parallelism, - savePointStrategy, - savePointPath, - configuration, - gatewayConfig); + return JobConfig.builder() + .type(type) + .useResult(useResult) + .useChangeLog(useChangeLog) + .useAutoCancel(useAutoCancel) + .useRemote(true) + .address(address) + .jobName(jobName) + .fragment(isFragment()) + .statementSet(useStatementSet) + .maxRowNum(maxRowNum) + .checkpoint(checkPoint) + .parallelism(parallelism) + .savePointStrategy(SavePointStrategy.get(savePointStrategy)) + .savePointPath(savePointPath) + .configJson(configuration) + .gatewayConfig(gatewayConfig) + .build(); } } diff --git a/dinky-admin/src/main/java/org/dinky/data/dto/APIExplainSqlDTO.java b/dinky-admin/src/main/java/org/dinky/data/dto/APIExplainSqlDTO.java index 1797e72e4ab..df361e63835 100644 --- a/dinky-admin/src/main/java/org/dinky/data/dto/APIExplainSqlDTO.java +++ b/dinky-admin/src/main/java/org/dinky/data/dto/APIExplainSqlDTO.java @@ -19,6 +19,7 @@ package org.dinky.data.dto; +import org.dinky.gateway.enums.GatewayType; import org.dinky.job.JobConfig; import java.util.Map; @@ -55,6 +56,13 @@ public class APIExplainSqlDTO extends AbstractStatementDTO { private Map configuration; public JobConfig getJobConfig() { - return new JobConfig("local", false, false, isFragment(), useStatementSet, parallelism, configuration); + return JobConfig.builder() + .type(GatewayType.LOCAL.getLongValue()) + .useRemote(false) + .fragment(isFragment()) + .statementSet(useStatementSet) + .parallelism(parallelism) + .configJson(configuration) + .build(); } } diff --git a/dinky-admin/src/main/java/org/dinky/data/dto/AbstractStatementDTO.java b/dinky-admin/src/main/java/org/dinky/data/dto/AbstractStatementDTO.java index ef0850455d4..bd45bd60992 100644 --- a/dinky-admin/src/main/java/org/dinky/data/dto/AbstractStatementDTO.java +++ b/dinky-admin/src/main/java/org/dinky/data/dto/AbstractStatementDTO.java @@ -23,6 +23,7 @@ import io.swagger.annotations.ApiModel; import io.swagger.annotations.ApiModelProperty; +import lombok.Data; /** * AbstractStatementDTO @@ -30,6 +31,7 @@ * @since 2021/12/29 */ @ApiModel(value = "AbstractStatementDTO", description = "Abstract Statement Data Transfer Object") +@Data public class AbstractStatementDTO { @ApiModelProperty(value = "Statement", dataType = "String", example = "SELECT * FROM table", notes = "SQL语句") @@ -47,36 +49,4 @@ public class AbstractStatementDTO { example = "{\"key\": \"value\"}", notes = "变量集合") private Map variables; - - public String getStatement() { - return statement; - } - - public void setStatement(String statement) { - this.statement = statement; - } - - public Integer getEnvId() { - return envId; - } - - public void setEnvId(Integer envId) { - this.envId = envId; - } - - public boolean isFragment() { - return fragment; - } - - public void setFragment(boolean fragment) { - this.fragment = fragment; - } - - public Map getVariables() { - return variables; - } - - public void setVariables(Map variables) { - this.variables = variables; - } } diff --git a/dinky-admin/src/main/java/org/dinky/data/dto/StudioDDLDTO.java b/dinky-admin/src/main/java/org/dinky/data/dto/StudioDDLDTO.java index af314d2f3b1..c034107a34c 100644 --- a/dinky-admin/src/main/java/org/dinky/data/dto/StudioDDLDTO.java +++ b/dinky-admin/src/main/java/org/dinky/data/dto/StudioDDLDTO.java @@ -21,6 +21,7 @@ import org.dinky.job.JobConfig; +import cn.hutool.core.bean.BeanUtil; import io.swagger.annotations.ApiModel; import io.swagger.annotations.ApiModelProperty; import lombok.Getter; @@ -85,6 +86,8 @@ public class StudioDDLDTO { private Integer maxRowNum = 10000; public JobConfig getJobConfig() { - return new JobConfig(type, useResult, useSession, session, useRemote, clusterId, maxRowNum); + JobConfig jobConfig = new JobConfig(); + BeanUtil.copyProperties(this, jobConfig); + return jobConfig; } } diff --git a/dinky-admin/src/main/java/org/dinky/data/dto/StudioMetaStoreDTO.java b/dinky-admin/src/main/java/org/dinky/data/dto/StudioMetaStoreDTO.java index 0196e49c648..ee934e0cb03 100644 --- a/dinky-admin/src/main/java/org/dinky/data/dto/StudioMetaStoreDTO.java +++ b/dinky-admin/src/main/java/org/dinky/data/dto/StudioMetaStoreDTO.java @@ -65,27 +65,15 @@ public class StudioMetaStoreDTO extends AbstractStatementDTO { private Integer databaseId; public JobConfig getJobConfig() { - return new JobConfig( - GatewayType.LOCAL.getLongValue(), - true, - false, - false, - false, - null, - null, - null, - null, - null, - null, - isFragment(), - false, - false, - 0, - null, - null, - null, - null, - null, - null); + return JobConfig.builder() + .type(GatewayType.LOCAL.getLongValue()) + .useResult(true) + .useChangeLog(false) + .useAutoCancel(false) + .fragment(isFragment()) + .statementSet(false) + .batchModel(false) + .maxRowNum(0) + .build(); } } diff --git a/dinky-admin/src/main/java/org/dinky/data/dto/StudioExecuteDTO.java b/dinky-admin/src/main/java/org/dinky/data/dto/TaskDTO.java similarity index 53% rename from dinky-admin/src/main/java/org/dinky/data/dto/StudioExecuteDTO.java rename to dinky-admin/src/main/java/org/dinky/data/dto/TaskDTO.java index 045597dd28a..914736f6500 100644 --- a/dinky-admin/src/main/java/org/dinky/data/dto/StudioExecuteDTO.java +++ b/dinky-admin/src/main/java/org/dinky/data/dto/TaskDTO.java @@ -19,12 +19,20 @@ package org.dinky.data.dto; +import org.dinky.config.Dialect; +import org.dinky.data.model.Task; import org.dinky.data.model.TaskExtConfig; +import org.dinky.data.typehandler.TaskExtConfigTypeHandler; import org.dinky.job.JobConfig; +import org.apache.ibatis.type.JdbcType; + import java.util.HashMap; import java.util.Map; +import com.baomidou.mybatisplus.annotation.TableField; + +import cn.hutool.core.bean.BeanUtil; import io.swagger.annotations.ApiModel; import io.swagger.annotations.ApiModelProperty; import lombok.Getter; @@ -34,13 +42,18 @@ /** * StudioExecuteDTO * - * @since 2021/5/30 11:09 */ @Getter @Setter @Slf4j @ApiModel(value = "StudioExecuteDTO", description = "DTO for executing SQL queries") -public class StudioExecuteDTO extends AbstractStatementDTO { +public class TaskDTO extends AbstractStatementDTO { + + @ApiModelProperty(value = "Name", required = true, dataType = "String", example = "Name") + private String name; + + @ApiModelProperty(value = "Dialect", dataType = "String", notes = "Dialect for the task") + private String dialect; @ApiModelProperty( value = "Run Mode", @@ -49,33 +62,32 @@ public class StudioExecuteDTO extends AbstractStatementDTO { notes = "The execution mode for the SQL query") private String type; - @ApiModelProperty( - value = "Dialect", - dataType = "String", - example = "MySQL", - notes = "The SQL dialect for the query") - private String dialect; + @ApiModelProperty(value = "Check Point", dataType = "Integer", example = "1", notes = "Check point for the task") + private Integer checkPoint; @ApiModelProperty( - value = "Use Result", - dataType = "boolean", - example = "true", - notes = "Flag indicating whether to use the query result") - private boolean useResult; + value = "Save Point Strategy", + dataType = "Integer", + example = "1", + notes = "The save point strategy") + private Integer savePointStrategy; @ApiModelProperty( - value = "Use Change Log", - dataType = "boolean", - example = "false", - notes = "Flag indicating whether to use change logs") - private boolean useChangeLog; + value = "Save Point Path", + dataType = "String", + example = "/savepoints", + notes = "The path for save points") + private String savePointPath; + + @ApiModelProperty(value = "Parallelism", dataType = "Integer", example = "4", notes = "The parallelism level") + private Integer parallelism; @ApiModelProperty( - value = "Use Auto Cancel", - dataType = "boolean", - example = "false", - notes = "Flag indicating whether to use auto-canceling") - private boolean useAutoCancel; + value = "Fragment", + dataType = "Boolean", + example = "true", + notes = "Fragment option for the task") + private Boolean fragment; @ApiModelProperty( value = "Use Statement Set", @@ -91,16 +103,6 @@ public class StudioExecuteDTO extends AbstractStatementDTO { notes = "Flag indicating whether to use batch processing") private boolean batchModel; - @ApiModelProperty( - value = "Use Session", - dataType = "boolean", - example = "false", - notes = "Flag indicating whether to use a session") - private boolean useSession; - - @ApiModelProperty(value = "Session", dataType = "String", example = "session_id", notes = "The session identifier") - private String session; - @ApiModelProperty( value = "Cluster ID", dataType = "Integer", @@ -125,79 +127,143 @@ public class StudioExecuteDTO extends AbstractStatementDTO { @ApiModelProperty(value = "JAR ID", dataType = "Integer", example = "4", notes = "The identifier of the JAR") private Integer jarId; - @ApiModelProperty(value = "Job Name", dataType = "String", example = "MyJob", notes = "The name of the job") - private String jobName; - - @ApiModelProperty(value = "Task ID", dataType = "Integer", example = "5", notes = "The identifier of the task") - private Integer taskId; - - @ApiModelProperty(value = "ID", dataType = "Integer", example = "6", notes = "The identifier of the execution") - private Integer id; - @ApiModelProperty( - value = "Max Row Number", + value = "Alert Group ID", dataType = "Integer", - example = "100", - notes = "The maximum number of rows to return") - private Integer maxRowNum; + example = "7001", + notes = "ID of the alert group associated with the task") + private Integer alertGroupId; - @ApiModelProperty(value = "Check Point", dataType = "Integer", example = "0", notes = "The check point value") - private Integer checkPoint; + @ApiModelProperty(value = "Note", dataType = "String", notes = "Additional notes for the task") + private String note; - @ApiModelProperty(value = "Parallelism", dataType = "Integer", example = "4", notes = "The parallelism level") - private Integer parallelism; + @ApiModelProperty(value = "Step", dataType = "Integer", example = "1", notes = "Step for the task") + private Integer step; @ApiModelProperty( - value = "Save Point Strategy", + value = "Job Instance ID", dataType = "Integer", - example = "1", - notes = "The save point strategy") - private Integer savePointStrategy; + example = "8001", + notes = "ID of the job instance associated with the task") + private Integer jobInstanceId; @ApiModelProperty( - value = "Save Point Path", + value = "Job status", dataType = "String", - example = "/savepoints", - notes = "The path for save points") - private String savePointPath; + example = "RUNNING", + notes = "THE_RUNNING_STATUS_OF_THE_CURRENT_TASK") + private String status; + + @ApiModelProperty( + value = "Version ID", + dataType = "Integer", + example = "9001", + notes = "ID of the version associated with the task") + private Integer versionId; + + @ApiModelProperty(value = "Enabled", required = true, dataType = "Boolean", example = "true") + private Boolean enabled; + + @ApiModelProperty(value = "Statement", dataType = "String", notes = "SQL statement for the task") + private String statement; + + @ApiModelProperty(value = "Cluster Name", dataType = "String", notes = "Name of the associated cluster") + @TableField(exist = false) + private String clusterName; @ApiModelProperty( value = "Configuration JSON", - dataType = "Object", - example = "{}", - notes = "The JSON configuration for the query") + dataType = "TaskExtConfig", + notes = "Extended configuration in JSON format for the task") + @TableField(typeHandler = TaskExtConfigTypeHandler.class, jdbcType = JdbcType.VARCHAR) private TaskExtConfig configJson; + @ApiModelProperty(value = "Path", dataType = "String", notes = "Path associated with the task") + @TableField(exist = false) + private String path; + + @ApiModelProperty(value = "JAR Name", dataType = "String", notes = "Name of the associated JAR") + @TableField(exist = false) + private String jarName; + + @ApiModelProperty( + value = "Cluster Configuration Name", + dataType = "String", + notes = "Name of the associated cluster configuration") + @TableField(exist = false) + private String clusterConfigurationName; + + @ApiModelProperty(value = "Database Name", dataType = "String", notes = "Name of the associated database") + @TableField(exist = false) + private String databaseName; + + @ApiModelProperty(value = "Environment Name", dataType = "String", notes = "Name of the associated environment") + @TableField(exist = false) + private String envName; + + @ApiModelProperty(value = "Alert Group Name", dataType = "String", notes = "Name of the associated alert group") + @TableField(exist = false) + private String alertGroupName; + + @ApiModelProperty( + value = "UseResult", + dataType = "boolean", + example = "true", + notes = "Flagindicatingwhethertousethequeryresult") + private boolean useResult; + + @ApiModelProperty( + value = "UseChangeLog", + dataType = "boolean", + example = "false", + notes = "Flagindicatingwhethertousechangelogs") + private boolean useChangeLog; + + @ApiModelProperty( + value = "Use Auto Cancel", + dataType = "boolean", + example = "false", + notes = "Flag indicating whether to use auto-canceling") + private boolean useAutoCancel; + + @ApiModelProperty(value = "Session", dataType = "String", example = "session_id", notes = "The session identifier") + private String session; + + @ApiModelProperty(value = "Job Name", dataType = "String", example = "MyJob", notes = "The name of the job") + private String jobName; + + @ApiModelProperty(value = "ID", dataType = "Integer", example = "6", notes = "The identifier of the execution") + private Integer id; + + @ApiModelProperty( + value = "Max Row Number", + dataType = "Integer", + example = "100", + notes = "The maximum number of rows to return") + private Integer maxRowNum; + public JobConfig getJobConfig() { Map parsedConfig = this.configJson == null ? new HashMap<>(0) : this.configJson.getCustomConfigMaps(); - return new JobConfig( - type, - useResult, - useChangeLog, - useAutoCancel, - useSession, - session, - clusterId, - clusterConfigurationId, - jarId, - taskId, - jobName, - isFragment(), - statementSet, - batchModel, - maxRowNum, - checkPoint, - parallelism, - savePointStrategy, - savePointPath, - getVariables(), - parsedConfig); + JobConfig jobConfig = new JobConfig(); + BeanUtil.copyProperties(this, jobConfig); + jobConfig.setConfigJson(parsedConfig); + jobConfig.setJarTask(isJarTask()); + jobConfig.setTaskId(id); + jobConfig.setJobName(name); + + return jobConfig; + } + + public Task buildTask() { + Task task = new Task(); + BeanUtil.copyProperties(this, task); + return task; } - public Integer getTaskId() { - return taskId == null ? getId() : taskId; + public boolean isJarTask() { + return Dialect.isJarDialect(dialect); } } diff --git a/dinky-admin/src/main/java/org/dinky/data/exception/NotSupportExplainExcepition.java b/dinky-admin/src/main/java/org/dinky/data/exception/NotSupportExplainExcepition.java new file mode 100644 index 00000000000..6ea8e31b210 --- /dev/null +++ b/dinky-admin/src/main/java/org/dinky/data/exception/NotSupportExplainExcepition.java @@ -0,0 +1,26 @@ +/* + * + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +package org.dinky.data.exception; + +public class NotSupportExplainExcepition extends SqlExplainExcepition { + public NotSupportExplainExcepition(String message) { + super(message); + } +} diff --git a/dinky-admin/src/main/java/org/dinky/data/exception/SqlExplainExcepition.java b/dinky-admin/src/main/java/org/dinky/data/exception/SqlExplainExcepition.java new file mode 100644 index 00000000000..4cac0bf9ac1 --- /dev/null +++ b/dinky-admin/src/main/java/org/dinky/data/exception/SqlExplainExcepition.java @@ -0,0 +1,28 @@ +/* + * + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +package org.dinky.data.exception; + +import org.dinky.process.exception.ExcuteException; + +public class SqlExplainExcepition extends ExcuteException { + public SqlExplainExcepition(String message) { + super(message); + } +} diff --git a/dinky-admin/src/main/java/org/dinky/data/exception/TaskNotDoneException.java b/dinky-admin/src/main/java/org/dinky/data/exception/TaskNotDoneException.java new file mode 100644 index 00000000000..c680ee60489 --- /dev/null +++ b/dinky-admin/src/main/java/org/dinky/data/exception/TaskNotDoneException.java @@ -0,0 +1,28 @@ +/* + * + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +package org.dinky.data.exception; + +import org.dinky.process.exception.ExcuteException; + +public class TaskNotDoneException extends ExcuteException { + public TaskNotDoneException(String message) { + super(message); + } +} diff --git a/dinky-admin/src/main/java/org/dinky/data/model/Task.java b/dinky-admin/src/main/java/org/dinky/data/model/Task.java index 4e76e8a211d..f51314cc4b3 100644 --- a/dinky-admin/src/main/java/org/dinky/data/model/Task.java +++ b/dinky-admin/src/main/java/org/dinky/data/model/Task.java @@ -19,23 +19,13 @@ package org.dinky.data.model; -import org.dinky.assertion.Asserts; -import org.dinky.config.Dialect; import org.dinky.data.typehandler.TaskExtConfigTypeHandler; -import org.dinky.job.JobConfig; import org.dinky.mybatis.model.SuperEntity; import org.apache.ibatis.type.JdbcType; -import java.util.List; -import java.util.Map; -import java.util.stream.Collectors; - import com.baomidou.mybatisplus.annotation.TableField; import com.baomidou.mybatisplus.annotation.TableName; -import com.fasterxml.jackson.databind.JsonNode; -import com.fasterxml.jackson.databind.ObjectMapper; -import com.fasterxml.jackson.databind.node.ObjectNode; import io.swagger.annotations.ApiModel; import io.swagger.annotations.ApiModelProperty; @@ -73,11 +63,7 @@ public class Task extends SuperEntity { @ApiModelProperty(value = "Check Point", dataType = "Integer", example = "1", notes = "Check point for the task") private Integer checkPoint; - @ApiModelProperty( - value = "Save Point Strategy", - dataType = "Integer", - example = "2", - notes = "Save point strategy for the task") + @ApiModelProperty(value = "Save point strategy", dataType = "SavePointStrategy", notes = "Save point strategy") private Integer savePointStrategy; @ApiModelProperty(value = "Save Point Path", dataType = "String", notes = "Save point path for the task") @@ -149,6 +135,13 @@ public class Task extends SuperEntity { notes = "ID of the alert group associated with the task") private Integer alertGroupId; + @ApiModelProperty( + value = "Configuration JSON", + dataType = "TaskExtConfig", + notes = "Extended configuration in JSON format for the task") + @TableField(typeHandler = TaskExtConfigTypeHandler.class, jdbcType = JdbcType.VARCHAR) + private TaskExtConfig configJson; + @ApiModelProperty(value = "Note", dataType = "String", notes = "Additional notes for the task") private String note; @@ -170,126 +163,5 @@ public class Task extends SuperEntity { private Integer versionId; @ApiModelProperty(value = "Statement", dataType = "String", notes = "SQL statement for the task") - @TableField(exist = false) private String statement; - - @ApiModelProperty(value = "Cluster Name", dataType = "String", notes = "Name of the associated cluster") - @TableField(exist = false) - private String clusterName; - - @ApiModelProperty( - value = "Save Points", - dataType = "List", - notes = "List of save points associated with the task") - @TableField(exist = false) - private List savePoints; - - @ApiModelProperty( - value = "Configuration JSON", - dataType = "TaskExtConfig", - notes = "Extended configuration in JSON format for the task") - @TableField(typeHandler = TaskExtConfigTypeHandler.class, jdbcType = JdbcType.VARCHAR) - private TaskExtConfig configJson; - - @ApiModelProperty(value = "Path", dataType = "String", notes = "Path associated with the task") - @TableField(exist = false) - private String path; - - @ApiModelProperty(value = "JAR Name", dataType = "String", notes = "Name of the associated JAR") - @TableField(exist = false) - private String jarName; - - @ApiModelProperty( - value = "Cluster Configuration Name", - dataType = "String", - notes = "Name of the associated cluster configuration") - @TableField(exist = false) - private String clusterConfigurationName; - - @ApiModelProperty(value = "Database Name", dataType = "String", notes = "Name of the associated database") - @TableField(exist = false) - private String databaseName; - - @ApiModelProperty(value = "Environment Name", dataType = "String", notes = "Name of the associated environment") - @TableField(exist = false) - private String envName; - - @ApiModelProperty(value = "Alert Group Name", dataType = "String", notes = "Name of the associated alert group") - @TableField(exist = false) - private String alertGroupName; - - public JobConfig buildSubmitConfig() { - boolean useRemote = clusterId != null && clusterId != 0; - - List extCustomConfig = this.configJson.getCustomConfig(); - - Map parsedConfig = - extCustomConfig.stream().collect(Collectors.toMap(ConfigItem::getKey, ConfigItem::getValue)); - - int jid = Asserts.isNull(jarId) ? 0 : jarId; - boolean fg = Asserts.isNotNull(fragment) && fragment; - boolean sts = Asserts.isNotNull(statementSet) && statementSet; - return new JobConfig( - type, - step, - false, - false, - useRemote, - clusterId, - clusterConfigurationId, - jid, - getId(), - getName(), - fg, - sts, - batchModel, - checkPoint, - parallelism, - savePointStrategy, - savePointPath, - parsedConfig, - isJarTask()); - } - - /** - * 根据key获取自定义配置的值 - * @param key - * @return String - */ - public String findCustomConfigKeyOfValue(String key) { - return this.configJson.containsKey(key) ? this.configJson.getCustomConfigValue(key) : null; - } - - /** - * 判断是否有自定义配置 - * @param key - * @return - */ - public boolean hasCustomConfigKey(String key) { - return this.configJson.containsKey(key); - } - - public JsonNode parseJsonNode(ObjectMapper mapper) { - ObjectNode jsonNode = mapper.createObjectNode(); - jsonNode.put("name", this.getName()); - jsonNode.put("dialect", this.dialect); - jsonNode.put("type", this.type); - jsonNode.put("statement", this.statement); - jsonNode.put("checkPoint", this.checkPoint); - jsonNode.put("savePointStrategy", this.savePointStrategy); - jsonNode.put("savePointPath", this.savePointPath); - jsonNode.put("parallelism", this.parallelism); - jsonNode.put("fragment", this.fragment); - jsonNode.put("statementSet", this.statementSet); - jsonNode.put("batchModel", this.batchModel); - jsonNode.put("clusterName", this.clusterName); - jsonNode.put("note", this.note); - jsonNode.put("step", this.step); - jsonNode.put("enabled", this.getEnabled()); - return jsonNode; - } - - public boolean isJarTask() { - return Dialect.isJarDialect(dialect); - } } diff --git a/dinky-admin/src/main/java/org/dinky/data/model/TaskVersion.java b/dinky-admin/src/main/java/org/dinky/data/model/TaskVersion.java index 1c5902ff2da..34efe5ac5da 100644 --- a/dinky-admin/src/main/java/org/dinky/data/model/TaskVersion.java +++ b/dinky-admin/src/main/java/org/dinky/data/model/TaskVersion.java @@ -22,7 +22,7 @@ import org.dinky.data.dto.TaskVersionConfigureDTO; import java.io.Serializable; -import java.util.Date; +import java.time.LocalDateTime; import com.baomidou.mybatisplus.annotation.TableField; import com.baomidou.mybatisplus.annotation.TableName; @@ -83,5 +83,5 @@ public class TaskVersion implements Serializable { @ApiModelProperty(value = "Create Time", dataType = "Date", notes = "Timestamp when the version was created") @TableField(value = "create_time") - private Date createTime; + private LocalDateTime createTime; } diff --git a/dinky-admin/src/main/java/org/dinky/job/handler/JobAlertHandler.java b/dinky-admin/src/main/java/org/dinky/job/handler/JobAlertHandler.java index 4ae9b6ae59a..dc645a7ed79 100644 --- a/dinky-admin/src/main/java/org/dinky/job/handler/JobAlertHandler.java +++ b/dinky-admin/src/main/java/org/dinky/job/handler/JobAlertHandler.java @@ -29,6 +29,7 @@ import org.dinky.context.SpringContextUtils; import org.dinky.daemon.pool.DefaultThreadPool; import org.dinky.data.dto.AlertRuleDTO; +import org.dinky.data.dto.TaskDTO; import org.dinky.data.enums.Status; import org.dinky.data.model.AlertGroup; import org.dinky.data.model.AlertHistory; @@ -36,7 +37,6 @@ import org.dinky.data.model.JobInfoDetail; import org.dinky.data.model.JobInstance; import org.dinky.data.model.SystemConfiguration; -import org.dinky.data.model.Task; import org.dinky.data.options.AlertRuleOptions; import org.dinky.service.AlertGroupService; import org.dinky.service.AlertHistoryService; @@ -214,7 +214,7 @@ private Rule buildRule(AlertRuleDTO alertRuleDTO) { private void executeAlertAction(Facts facts, AlertRuleDTO alertRuleDTO) { JobInfoDetail jobInfoDetail = facts.get(AlertRuleOptions.JOB_ALERT_RULE_JOB_DETAIL); JobInstance jobInstance = jobInfoDetail.getInstance(); - Task task = taskService.getById(jobInfoDetail.getInstance().getTaskId()); + TaskDTO task = taskService.getTaskInfoById(jobInfoDetail.getInstance().getTaskId()); String taskUrl = StrFormatter.format( "{}/#/devops/job-detail?id={}", diff --git a/dinky-admin/src/main/java/org/dinky/service/APIService.java b/dinky-admin/src/main/java/org/dinky/service/APIService.java index 00a2349ebb9..0c362b6b7e4 100644 --- a/dinky-admin/src/main/java/org/dinky/service/APIService.java +++ b/dinky-admin/src/main/java/org/dinky/service/APIService.java @@ -19,35 +19,9 @@ package org.dinky.service; -import org.dinky.data.dto.APICancelDTO; -import org.dinky.data.dto.APIExecuteJarDTO; -import org.dinky.data.dto.APIExecuteSqlDTO; -import org.dinky.data.dto.APIExplainSqlDTO; -import org.dinky.data.dto.APISavePointDTO; -import org.dinky.data.result.APIJobResult; -import org.dinky.data.result.ExplainResult; -import org.dinky.gateway.result.SavePointResult; - -import com.fasterxml.jackson.databind.node.ObjectNode; - /** * APIService * * @since 2021/12/11 21:45 */ -public interface APIService { - - APIJobResult executeSql(APIExecuteSqlDTO apiExecuteSqlDTO); - - ExplainResult explainSql(APIExplainSqlDTO apiExplainSqlDTO); - - ObjectNode getJobPlan(APIExplainSqlDTO apiExplainSqlDTO); - - ObjectNode getStreamGraph(APIExplainSqlDTO apiExplainSqlDTO); - - boolean cancel(APICancelDTO apiCancelDTO); - - SavePointResult savepoint(APISavePointDTO apiSavePointDTO); - - APIJobResult executeJar(APIExecuteJarDTO apiExecuteJarDTO); -} +public interface APIService {} diff --git a/dinky-admin/src/main/java/org/dinky/service/DataBaseService.java b/dinky-admin/src/main/java/org/dinky/service/DataBaseService.java index a53a55e1e81..db444e3b7b8 100644 --- a/dinky-admin/src/main/java/org/dinky/service/DataBaseService.java +++ b/dinky-admin/src/main/java/org/dinky/service/DataBaseService.java @@ -19,11 +19,15 @@ package org.dinky.service; +import org.dinky.data.dto.SqlDTO; +import org.dinky.data.dto.TaskDTO; import org.dinky.data.model.Column; import org.dinky.data.model.DataBase; import org.dinky.data.model.QueryData; import org.dinky.data.model.Schema; import org.dinky.data.model.SqlGeneration; +import org.dinky.data.result.SqlExplainResult; +import org.dinky.job.JobResult; import org.dinky.metadata.result.JdbcSelectResult; import org.dinky.mybatis.service.ISuperService; @@ -136,4 +140,8 @@ public interface DataBaseService extends ISuperService { * @return {@link Boolean} */ Boolean copyDatabase(DataBase database); + + List explainCommonSql(TaskDTO task); + + JobResult executeCommonSql(SqlDTO sqlDTO); } diff --git a/dinky-admin/src/main/java/org/dinky/service/SavepointsService.java b/dinky-admin/src/main/java/org/dinky/service/SavepointsService.java index a9b91b0bade..983dfa1fbc3 100644 --- a/dinky-admin/src/main/java/org/dinky/service/SavepointsService.java +++ b/dinky-admin/src/main/java/org/dinky/service/SavepointsService.java @@ -19,6 +19,7 @@ package org.dinky.service; +import org.dinky.data.dto.TaskDTO; import org.dinky.data.model.Savepoints; import org.dinky.mybatis.service.ISuperService; @@ -36,4 +37,6 @@ public interface SavepointsService extends ISuperService { Savepoints getLatestSavepointByTaskId(Integer taskId); Savepoints getEarliestSavepointByTaskId(Integer taskId); + + Savepoints getSavePointWithStrategy(TaskDTO task); } diff --git a/dinky-admin/src/main/java/org/dinky/service/StudioService.java b/dinky-admin/src/main/java/org/dinky/service/StudioService.java index be3eba454a3..a08bcecef7b 100644 --- a/dinky-admin/src/main/java/org/dinky/service/StudioService.java +++ b/dinky-admin/src/main/java/org/dinky/service/StudioService.java @@ -19,25 +19,20 @@ package org.dinky.service; -import org.dinky.data.dto.SqlDTO; import org.dinky.data.dto.StudioCADTO; import org.dinky.data.dto.StudioDDLDTO; -import org.dinky.data.dto.StudioExecuteDTO; import org.dinky.data.dto.StudioMetaStoreDTO; import org.dinky.data.model.Catalog; import org.dinky.data.model.FlinkColumn; import org.dinky.data.model.Schema; import org.dinky.data.result.IResult; import org.dinky.data.result.SelectResult; -import org.dinky.data.result.SqlExplainResult; import org.dinky.explainer.lineage.LineageResult; -import org.dinky.job.JobResult; import org.dinky.metadata.result.JdbcSelectResult; import java.util.List; import com.fasterxml.jackson.databind.JsonNode; -import com.fasterxml.jackson.databind.node.ObjectNode; /** * StudioService @@ -46,18 +41,8 @@ */ public interface StudioService { - JobResult executeSql(StudioExecuteDTO studioExecuteDTO); - - JobResult executeCommonSql(SqlDTO sqlDTO); - IResult executeDDL(StudioDDLDTO studioDDLDTO); - List explainSql(StudioExecuteDTO studioExecuteDTO); - - ObjectNode getStreamGraph(StudioExecuteDTO studioExecuteDTO); - - ObjectNode getJobPlan(StudioExecuteDTO studioExecuteDTO); - JdbcSelectResult getCommonSqlData(Integer taskId); SelectResult getJobData(String jobId); @@ -66,10 +51,6 @@ public interface StudioService { List listFlinkJobs(Integer clusterId); - boolean cancelFlinkJob(Integer clusterId, String jobId); - - boolean savepointTrigger(Integer taskId, Integer clusterId, String jobId, String savePointType, String name); - List getMSCatalogs(StudioMetaStoreDTO studioMetaStoreDTO); Schema getMSSchemaInfo(StudioMetaStoreDTO studioMetaStoreDTO); diff --git a/dinky-admin/src/main/java/org/dinky/service/TaskService.java b/dinky-admin/src/main/java/org/dinky/service/TaskService.java index fea869d4a73..86186a7b9da 100644 --- a/dinky-admin/src/main/java/org/dinky/service/TaskService.java +++ b/dinky-admin/src/main/java/org/dinky/service/TaskService.java @@ -19,24 +19,27 @@ package org.dinky.service; +import org.dinky.data.dto.AbstractStatementDTO; +import org.dinky.data.dto.TaskDTO; import org.dinky.data.dto.TaskRollbackVersionDTO; import org.dinky.data.enums.JobLifeCycle; -import org.dinky.data.enums.JobStatus; -import org.dinky.data.model.JobInfoDetail; +import org.dinky.data.exception.NotSupportExplainExcepition; import org.dinky.data.model.JobModelOverview; import org.dinky.data.model.JobTypeOverView; import org.dinky.data.model.Task; import org.dinky.data.result.Result; import org.dinky.data.result.SqlExplainResult; -import org.dinky.data.result.TaskOperatingResult; +import org.dinky.gateway.result.SavePointResult; import org.dinky.job.JobResult; import org.dinky.mybatis.service.ISuperService; +import org.dinky.process.exception.ExcuteException; import java.util.List; import org.springframework.web.multipart.MultipartFile; import com.fasterxml.jackson.databind.JsonNode; +import com.fasterxml.jackson.databind.node.ObjectNode; import cn.hutool.core.lang.tree.Tree; @@ -47,45 +50,37 @@ */ public interface TaskService extends ISuperService { - JobResult submitTask(Integer id); + String buildEnvSql(AbstractStatementDTO task); - JobResult submitTaskToOnline(Task dtoTask, Integer id); + JobResult submitTask(Integer id, String savePointPath) throws ExcuteException; - JobResult restartTask(Integer id, String savePointPath); + JobResult restartTask(Integer id, String savePointPath) throws ExcuteException; - List explainTask(Integer id); + SavePointResult savepointTaskJob(TaskDTO task, String savePointType); - Task getTaskInfoById(Integer id); + List explainTask(TaskDTO task) throws NotSupportExplainExcepition; - void initTenantByTaskId(Integer id); + boolean cancelTaskJob(TaskDTO task); - boolean saveOrUpdateTask(Task task); - - List listFlinkSQLEnv(); - - Task initDefaultFlinkSQLEnv(Integer tenantId); + ObjectNode getStreamGraph(TaskDTO taskDTO); String exportSql(Integer id); - Task getUDFByClassName(String className); - - List getAllUDF(); - - Result releaseTask(Integer id); + ObjectNode getJobPlan(TaskDTO task); - boolean developTask(Integer id); + TaskDTO getTaskInfoById(Integer id); - Result onLineTask(Integer id); + void initTenantByTaskId(Integer id); - Result reOnLineTask(Integer id, String savePointPath); + boolean changeTaskLifeRecyle(Integer taskId, JobLifeCycle lifeCycle); - Result offLineTask(Integer id, String type); + boolean saveOrUpdateTask(Task task); - Result cancelTask(Integer id); + List listFlinkSQLEnv(); - boolean recoveryTask(Integer id); + Task initDefaultFlinkSQLEnv(Integer tenantId); - boolean savepointTask(Integer taskId, String savePointType); + List getAllUDF(); String getTaskAPIAddress(); @@ -101,17 +96,8 @@ public interface TaskService extends ISuperService { Result> queryAllCatalogue(); - Result> queryOnLineTaskByDoneStatus( - List jobLifeCycle, List jobStatuses, boolean includeNull, Integer catalogueId); - - void selectSavepointOnLineTask(TaskOperatingResult taskOperatingResult); - - void selectSavepointOffLineTask(TaskOperatingResult taskOperatingResult); - Task getTaskByNameAndTenantId(String name, Integer tenantId); - JobStatus checkJobStatus(JobInfoDetail jobInfoDetail); - List getTaskOnlineRate(); JobModelOverview getJobStreamingOrBatchModelOverview(); diff --git a/dinky-admin/src/main/java/org/dinky/service/TaskVersionService.java b/dinky-admin/src/main/java/org/dinky/service/TaskVersionService.java index f85d010951b..0bb020309b6 100644 --- a/dinky-admin/src/main/java/org/dinky/service/TaskVersionService.java +++ b/dinky-admin/src/main/java/org/dinky/service/TaskVersionService.java @@ -19,6 +19,7 @@ package org.dinky.service; +import org.dinky.data.dto.TaskDTO; import org.dinky.data.model.TaskVersion; import org.dinky.mybatis.service.ISuperService; @@ -32,4 +33,6 @@ public interface TaskVersionService extends ISuperService { * @return java.util.List */ List getTaskVersionByTaskId(Integer taskId); + + void createTaskVersionSnapshot(TaskDTO task); } diff --git a/dinky-admin/src/main/java/org/dinky/service/UserService.java b/dinky-admin/src/main/java/org/dinky/service/UserService.java index 0556f7cc4c2..5fc499861de 100644 --- a/dinky-admin/src/main/java/org/dinky/service/UserService.java +++ b/dinky-admin/src/main/java/org/dinky/service/UserService.java @@ -149,6 +149,8 @@ public interface UserService extends ISuperService { */ List getCurrentRoleSelectPermissions(); + void buildRowPermission(); + /** user loginout */ void outLogin(); diff --git a/dinky-admin/src/main/java/org/dinky/service/impl/APIServiceImpl.java b/dinky-admin/src/main/java/org/dinky/service/impl/APIServiceImpl.java index fc3d6954ad0..99fdeec2611 100644 --- a/dinky-admin/src/main/java/org/dinky/service/impl/APIServiceImpl.java +++ b/dinky-admin/src/main/java/org/dinky/service/impl/APIServiceImpl.java @@ -19,25 +19,12 @@ package org.dinky.service.impl; -import org.dinky.data.dto.APICancelDTO; -import org.dinky.data.dto.APIExecuteJarDTO; -import org.dinky.data.dto.APIExecuteSqlDTO; -import org.dinky.data.dto.APIExplainSqlDTO; -import org.dinky.data.dto.APISavePointDTO; -import org.dinky.data.result.APIJobResult; -import org.dinky.data.result.ExplainResult; -import org.dinky.gateway.result.SavePointResult; -import org.dinky.job.JobConfig; -import org.dinky.job.JobManager; -import org.dinky.job.JobResult; import org.dinky.service.APIService; -import org.dinky.utils.RunTimeUtil; import org.springframework.stereotype.Service; -import com.fasterxml.jackson.core.JsonProcessingException; -import com.fasterxml.jackson.databind.ObjectMapper; -import com.fasterxml.jackson.databind.node.ObjectNode; +import lombok.RequiredArgsConstructor; +import lombok.extern.slf4j.Slf4j; /** * APIServiceImpl @@ -45,79 +32,6 @@ * @since 2021/12/11 21:46 */ @Service -public class APIServiceImpl implements APIService { - - @Override - public APIJobResult executeSql(APIExecuteSqlDTO apiExecuteSqlDTO) { - JobConfig config = apiExecuteSqlDTO.getJobConfig(); - JobManager jobManager = JobManager.build(config); - JobResult jobResult = jobManager.executeSql(apiExecuteSqlDTO.getStatement()); - APIJobResult apiJobResult = APIJobResult.build(jobResult); - RunTimeUtil.recovery(jobManager); - return apiJobResult; - } - - @Override - public ExplainResult explainSql(APIExplainSqlDTO apiExplainSqlDTO) { - JobConfig config = apiExplainSqlDTO.getJobConfig(); - JobManager jobManager = JobManager.buildPlanMode(config); - ExplainResult explainResult = jobManager.explainSql(apiExplainSqlDTO.getStatement()); - RunTimeUtil.recovery(jobManager); - return explainResult; - } - - @Override - public ObjectNode getJobPlan(APIExplainSqlDTO apiExplainSqlDTO) { - JobConfig config = apiExplainSqlDTO.getJobConfig(); - JobManager jobManager = JobManager.buildPlanMode(config); - String planJson = jobManager.getJobPlanJson(apiExplainSqlDTO.getStatement()); - ObjectMapper mapper = new ObjectMapper(); - ObjectNode objectNode = mapper.createObjectNode(); - try { - objectNode = (ObjectNode) mapper.readTree(planJson); - } catch (JsonProcessingException e) { - e.printStackTrace(); - } finally { - RunTimeUtil.recovery(jobManager); - return objectNode; - } - } - - @Override - public ObjectNode getStreamGraph(APIExplainSqlDTO apiExplainSqlDTO) { - JobConfig config = apiExplainSqlDTO.getJobConfig(); - JobManager jobManager = JobManager.buildPlanMode(config); - ObjectNode streamGraph = jobManager.getStreamGraph(apiExplainSqlDTO.getStatement()); - RunTimeUtil.recovery(jobManager); - return streamGraph; - } - - @Override - public boolean cancel(APICancelDTO apiCancelDTO) { - JobConfig jobConfig = apiCancelDTO.getJobConfig(); - JobManager jobManager = JobManager.build(jobConfig); - boolean cancel = jobManager.cancel(apiCancelDTO.getJobId()); - RunTimeUtil.recovery(jobManager); - return cancel; - } - - @Override - public SavePointResult savepoint(APISavePointDTO apiSavePointDTO) { - JobConfig jobConfig = apiSavePointDTO.getJobConfig(); - JobManager jobManager = JobManager.build(jobConfig); - SavePointResult savepoint = jobManager.savepoint( - apiSavePointDTO.getJobId(), apiSavePointDTO.getSavePointType(), apiSavePointDTO.getSavePoint()); - RunTimeUtil.recovery(jobManager); - return savepoint; - } - - @Override - public APIJobResult executeJar(APIExecuteJarDTO apiExecuteJarDTO) { - JobConfig config = apiExecuteJarDTO.getJobConfig(); - JobManager jobManager = JobManager.build(config); - JobResult jobResult = jobManager.executeJar(); - APIJobResult apiJobResult = APIJobResult.build(jobResult); - RunTimeUtil.recovery(jobManager); - return apiJobResult; - } -} +@RequiredArgsConstructor +@Slf4j +public class APIServiceImpl implements APIService {} diff --git a/dinky-admin/src/main/java/org/dinky/service/impl/DataBaseServiceImpl.java b/dinky-admin/src/main/java/org/dinky/service/impl/DataBaseServiceImpl.java index f6b20797ece..4ffb0dec13c 100644 --- a/dinky-admin/src/main/java/org/dinky/service/impl/DataBaseServiceImpl.java +++ b/dinky-admin/src/main/java/org/dinky/service/impl/DataBaseServiceImpl.java @@ -21,6 +21,8 @@ import org.dinky.assertion.Asserts; import org.dinky.data.constant.CommonConstant; +import org.dinky.data.dto.SqlDTO; +import org.dinky.data.dto.TaskDTO; import org.dinky.data.enums.Status; import org.dinky.data.model.Column; import org.dinky.data.model.DataBase; @@ -28,6 +30,8 @@ import org.dinky.data.model.Schema; import org.dinky.data.model.SqlGeneration; import org.dinky.data.model.Table; +import org.dinky.data.result.SqlExplainResult; +import org.dinky.job.JobResult; import org.dinky.mapper.DataBaseMapper; import org.dinky.metadata.driver.Driver; import org.dinky.metadata.result.JdbcSelectResult; @@ -38,6 +42,7 @@ import java.time.LocalDateTime; import java.util.ArrayList; +import java.util.Collections; import java.util.List; import java.util.UUID; @@ -240,4 +245,59 @@ public Boolean copyDatabase(DataBase database) { database.setCreateTime(null); return this.save(database); } + + @Override + public List explainCommonSql(TaskDTO task) { + if (Asserts.isNull(task.getDatabaseId())) { + return Collections.singletonList(SqlExplainResult.fail(task.getStatement(), "please assign data source.")); + } + + DataBase dataBase = getById(task.getDatabaseId()); + if (Asserts.isNull(dataBase)) { + return Collections.singletonList(SqlExplainResult.fail(task.getStatement(), "data source not exist.")); + } + + List sqlExplainResults; + try (Driver driver = Driver.build(dataBase.getDriverConfig())) { + sqlExplainResults = driver.explain(task.getStatement()); + } + return sqlExplainResults; + } + + @Override + public JobResult executeCommonSql(SqlDTO sqlDTO) { + JobResult result = new JobResult(); + result.setStatement(sqlDTO.getStatement()); + result.setStartTime(LocalDateTime.now()); + + if (Asserts.isNull(sqlDTO.getDatabaseId())) { + result.setSuccess(false); + result.setError("please assign data source"); + result.setEndTime(LocalDateTime.now()); + return result; + } + + DataBase dataBase = getById(sqlDTO.getDatabaseId()); + if (Asserts.isNull(dataBase)) { + result.setSuccess(false); + result.setError("data source not exist."); + result.setEndTime(LocalDateTime.now()); + return result; + } + + JdbcSelectResult selectResult; + try (Driver driver = Driver.build(dataBase.getDriverConfig())) { + selectResult = driver.executeSql(sqlDTO.getStatement(), sqlDTO.getMaxRowNum()); + } + + result.setResult(selectResult); + if (selectResult.isSuccess()) { + result.setSuccess(true); + } else { + result.setSuccess(false); + result.setError(selectResult.getError()); + } + result.setEndTime(LocalDateTime.now()); + return result; + } } diff --git a/dinky-admin/src/main/java/org/dinky/service/impl/SavepointsServiceImpl.java b/dinky-admin/src/main/java/org/dinky/service/impl/SavepointsServiceImpl.java index 50961605957..32bcd38c3dc 100644 --- a/dinky-admin/src/main/java/org/dinky/service/impl/SavepointsServiceImpl.java +++ b/dinky-admin/src/main/java/org/dinky/service/impl/SavepointsServiceImpl.java @@ -19,7 +19,9 @@ package org.dinky.service.impl; +import org.dinky.data.dto.TaskDTO; import org.dinky.data.model.Savepoints; +import org.dinky.gateway.enums.SavePointStrategy; import org.dinky.mapper.SavepointsMapper; import org.dinky.mybatis.service.impl.SuperServiceImpl; import org.dinky.service.SavepointsService; @@ -52,4 +54,23 @@ public Savepoints getLatestSavepointByTaskId(Integer taskId) { public Savepoints getEarliestSavepointByTaskId(Integer taskId) { return baseMapper.getEarliestSavepointByTaskId(taskId); } + + @Override + public Savepoints getSavePointWithStrategy(TaskDTO task) { + SavePointStrategy savePointStrategy = SavePointStrategy.get(task.getSavePointStrategy()); + switch (savePointStrategy) { + case LATEST: + return getLatestSavepointByTaskId(task.getId()); + case EARLIEST: + return getEarliestSavepointByTaskId(task.getId()); + case CUSTOM: + return new Savepoints() { + { + setPath(task.getSavePointPath()); + } + }; + default: + return null; + } + } } diff --git a/dinky-admin/src/main/java/org/dinky/service/impl/StudioServiceImpl.java b/dinky-admin/src/main/java/org/dinky/service/impl/StudioServiceImpl.java index f8864bb5196..332edf04d17 100644 --- a/dinky-admin/src/main/java/org/dinky/service/impl/StudioServiceImpl.java +++ b/dinky-admin/src/main/java/org/dinky/service/impl/StudioServiceImpl.java @@ -22,179 +22,60 @@ import org.dinky.api.FlinkAPI; import org.dinky.assertion.Asserts; import org.dinky.config.Dialect; -import org.dinky.context.RowLevelPermissionsContext; -import org.dinky.data.dto.AbstractStatementDTO; -import org.dinky.data.dto.SqlDTO; import org.dinky.data.dto.StudioCADTO; import org.dinky.data.dto.StudioDDLDTO; -import org.dinky.data.dto.StudioExecuteDTO; import org.dinky.data.dto.StudioMetaStoreDTO; import org.dinky.data.model.Catalog; import org.dinky.data.model.Cluster; import org.dinky.data.model.DataBase; import org.dinky.data.model.FlinkColumn; -import org.dinky.data.model.RowPermissions; -import org.dinky.data.model.Savepoints; import org.dinky.data.model.Schema; import org.dinky.data.model.Table; -import org.dinky.data.model.Task; import org.dinky.data.result.DDLResult; import org.dinky.data.result.IResult; +import org.dinky.data.result.ResultPool; import org.dinky.data.result.SelectResult; -import org.dinky.data.result.SqlExplainResult; import org.dinky.explainer.lineage.LineageBuilder; import org.dinky.explainer.lineage.LineageResult; -import org.dinky.gateway.model.FlinkClusterConfig; -import org.dinky.gateway.model.JobInfo; -import org.dinky.gateway.result.SavePointResult; import org.dinky.job.JobConfig; import org.dinky.job.JobManager; -import org.dinky.job.JobResult; import org.dinky.metadata.driver.Driver; import org.dinky.metadata.result.JdbcSelectResult; import org.dinky.process.context.ProcessContextHolder; import org.dinky.process.enums.ProcessType; import org.dinky.process.model.ProcessEntity; -import org.dinky.service.ClusterConfigurationService; import org.dinky.service.ClusterInstanceService; import org.dinky.service.DataBaseService; -import org.dinky.service.FragmentVariableService; -import org.dinky.service.SavepointsService; import org.dinky.service.StudioService; import org.dinky.service.TaskService; -import org.dinky.service.UserService; import org.dinky.sql.FlinkQuery; import org.dinky.utils.RunTimeUtil; import java.util.ArrayList; -import java.util.Collections; import java.util.List; import java.util.Map; -import java.util.concurrent.ConcurrentHashMap; -import java.util.concurrent.TimeUnit; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; import org.springframework.stereotype.Service; -import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.databind.JsonNode; -import com.fasterxml.jackson.databind.ObjectMapper; -import com.fasterxml.jackson.databind.node.ObjectNode; import cn.dev33.satoken.stp.StpUtil; -import cn.hutool.cache.Cache; -import cn.hutool.cache.impl.TimedCache; import lombok.RequiredArgsConstructor; +import lombok.extern.slf4j.Slf4j; /** StudioServiceImpl */ @Service @RequiredArgsConstructor +@Slf4j public class StudioServiceImpl implements StudioService { - private static final Logger logger = LoggerFactory.getLogger(StudioServiceImpl.class); - /** Common sql query result cache */ - private static final Cache COMMON_SQL_SEARCH_CACHE = - new TimedCache<>(TimeUnit.MINUTES.toMillis(10)); - private final ClusterInstanceService clusterInstanceService; - private final ClusterConfigurationService clusterConfigurationService; - private final SavepointsService savepointsService; private final DataBaseService dataBaseService; private final TaskService taskService; - private final FragmentVariableService fragmentVariableService; - private final UserService userService; - - private void addFlinkSQLEnv(AbstractStatementDTO statementDTO) { - ProcessEntity process = ProcessContextHolder.getProcess(); - process.info("Start initialize FlinkSQLEnv:"); - if (statementDTO.isFragment()) { - process.config("Variable opened."); - - // initialize global variables - process.info("Initializing global variables..."); - statementDTO.setVariables(fragmentVariableService.listEnabledVariables()); - process.infoSuccess(); - - // initialize database variables - process.info("Initializing database variables..."); - String flinkWithSql = dataBaseService.getEnabledFlinkWithSql(); - if (Asserts.isNotNullString(flinkWithSql)) { - statementDTO.setStatement(flinkWithSql + "\n" + statementDTO.getStatement()); - process.infoSuccess(); - } else { - process.info("No variables are loaded."); - } - } - - // initialize flinksql environment, such as flink catalog - if (Asserts.isNotNull(statementDTO.getEnvId()) - && !statementDTO.getEnvId().equals(0)) { - process.config("FlinkSQLEnv opened."); - process.info("Initializing FlinkSQLEnv..."); - Task task = taskService.getTaskInfoById(statementDTO.getEnvId()); - if (Asserts.isNotNull(task) && Asserts.isNotNullString(task.getStatement())) { - statementDTO.setStatement(task.getStatement() + "\n" + statementDTO.getStatement()); - process.infoSuccess(); - } else { - process.info("No FlinkSQLEnv are loaded."); - } - } - - process.info("Initializing data permissions..."); - List currentRoleSelectPermissions = userService.getCurrentRoleSelectPermissions(); - if (Asserts.isNotNullCollection(currentRoleSelectPermissions)) { - ConcurrentHashMap permission = new ConcurrentHashMap<>(); - for (RowPermissions roleSelectPermissions : currentRoleSelectPermissions) { - if (Asserts.isAllNotNullString( - roleSelectPermissions.getTableName(), roleSelectPermissions.getExpression())) { - permission.put(roleSelectPermissions.getTableName(), roleSelectPermissions.getExpression()); - } - } - RowLevelPermissionsContext.set(permission); - } - process.info("Finish initialize FlinkSQLEnv."); - } - - private void buildSession(JobConfig config) { - // If you are using a shared session, configure the current jobManager address - if (!config.isUseSession()) { - config.setAddress( - clusterInstanceService.buildEnvironmentAddress(config.isUseRemote(), config.getClusterId())); - } - } - - @Override - public JobResult executeSql(StudioExecuteDTO studioExecuteDTO) { - if (Dialect.notFlinkSql(studioExecuteDTO.getDialect())) { - JobResult jobResult = executeCommonSql(SqlDTO.build( - studioExecuteDTO.getStatement(), - studioExecuteDTO.getDatabaseId(), - studioExecuteDTO.getMaxRowNum())); - COMMON_SQL_SEARCH_CACHE.put(studioExecuteDTO.getTaskId(), (JdbcSelectResult) jobResult.getResult()); - return jobResult; - } else { - return executeFlinkSql(studioExecuteDTO); - } - } - - private JobResult executeFlinkSql(StudioExecuteDTO studioExecuteDTO) { - ProcessEntity process = ProcessContextHolder.registerProcess( - ProcessEntity.init(ProcessType.FLINK_EXECUTE, StpUtil.getLoginIdAsInt())); - addFlinkSQLEnv(studioExecuteDTO); - process.info("Initializing Flink job config..."); - JobConfig config = studioExecuteDTO.getJobConfig(); - buildSession(config); - JobManager jobManager = JobManager.build(config); - process.start(); - JobResult jobResult = jobManager.executeSql(studioExecuteDTO.getStatement()); - process.finish("Execute Flink SQL succeed."); - RunTimeUtil.recovery(jobManager); - return jobResult; - } private IResult executeMSFlinkSql(StudioMetaStoreDTO studioMetaStoreDTO) { - addFlinkSQLEnv(studioMetaStoreDTO); + String envSql = taskService.buildEnvSql(studioMetaStoreDTO); + studioMetaStoreDTO.setStatement(studioMetaStoreDTO.getStatement() + envSql); JobConfig config = studioMetaStoreDTO.getJobConfig(); JobManager jobManager = JobManager.build(config); IResult jobResult = jobManager.executeDDL(studioMetaStoreDTO.getStatement()); @@ -203,148 +84,17 @@ private IResult executeMSFlinkSql(StudioMetaStoreDTO studioMetaStoreDTO) { } @Override - public JobResult executeCommonSql(SqlDTO sqlDTO) { - ProcessEntity process = ProcessContextHolder.registerProcess( - ProcessEntity.init(ProcessType.SQL_EXECUTE, StpUtil.getLoginIdAsInt())); - JobResult result = new JobResult(); - result.setStatement(sqlDTO.getStatement()); - result.setStartTimeNow(); - process.info("Initializing database connection..."); - if (Asserts.isNull(sqlDTO.getDatabaseId())) { - result.setSuccess(false); - result.setError("please select a database."); - result.setEndTimeNow(); - return result; - } - DataBase dataBase = dataBaseService.getById(sqlDTO.getDatabaseId()); - if (Asserts.isNull(dataBase)) { - process.error("The database does not exist."); - result.setSuccess(false); - result.setError("The database does not exist."); - result.setEndTimeNow(); - return result; - } - JdbcSelectResult selectResult; - try (Driver driver = Driver.build(dataBase.getDriverConfig())) { - process.infoSuccess(); - process.start(); - selectResult = driver.executeSql(sqlDTO.getStatement(), sqlDTO.getMaxRowNum()); - } - process.finish("Execute sql succeed."); - result.setResult(selectResult); - if (selectResult.isSuccess()) { - result.setSuccess(true); - } else { - result.setSuccess(false); - result.setError(selectResult.getError()); - } - result.setEndTimeNow(); - return result; + public JdbcSelectResult getCommonSqlData(Integer taskId) { + return ResultPool.getCommonSqlCache(taskId); } @Override public IResult executeDDL(StudioDDLDTO studioDDLDTO) { JobConfig config = studioDDLDTO.getJobConfig(); - if (!config.isUseSession()) { - config.setAddress( - clusterInstanceService.buildEnvironmentAddress(config.isUseRemote(), studioDDLDTO.getClusterId())); - } JobManager jobManager = JobManager.build(config); return jobManager.executeDDL(studioDDLDTO.getStatement()); } - @Override - public List explainSql(StudioExecuteDTO studioExecuteDTO) { - if (Dialect.notFlinkSql(studioExecuteDTO.getDialect())) { - return explainCommonSql(studioExecuteDTO); - } else { - return explainFlinkSql(studioExecuteDTO); - } - } - - private List explainFlinkSql(StudioExecuteDTO studioExecuteDTO) { - ProcessEntity process = ProcessContextHolder.registerProcess( - ProcessEntity.init(ProcessType.FLINK_EXPLAIN, StpUtil.getLoginIdAsInt())); - addFlinkSQLEnv(studioExecuteDTO); - process.info("Initializing Flink job config..."); - JobConfig config = studioExecuteDTO.getJobConfig(); - // If you are using explainSql | getStreamGraph | getJobPlan, make the dialect change to - // local. - config.buildLocal(); - buildSession(config); - JobManager jobManager = JobManager.build(config); - process.start(); - List sqlExplainResults = - jobManager.explainSql(studioExecuteDTO.getStatement()).getSqlExplainResults(); - process.finish(); - return sqlExplainResults; - } - - private List explainCommonSql(StudioExecuteDTO studioExecuteDTO) { - ProcessEntity process = ProcessContextHolder.registerProcess( - ProcessEntity.init(ProcessType.SQL_EXPLAIN, StpUtil.getLoginIdAsInt())); - process.info("Initializing database connection..."); - if (Asserts.isNull(studioExecuteDTO.getDatabaseId())) { - process.error("The database does not exist."); - return Collections.singletonList( - SqlExplainResult.fail(studioExecuteDTO.getStatement(), "Please specify the database.")); - } - - DataBase dataBase = dataBaseService.getById(studioExecuteDTO.getDatabaseId()); - if (Asserts.isNull(dataBase)) { - process.error("The database does not exist."); - return Collections.singletonList( - SqlExplainResult.fail(studioExecuteDTO.getStatement(), "The database does not exist.")); - } - try (Driver driver = Driver.build(dataBase.getDriverConfig())) { - process.infoSuccess(); - process.start(); - List explain = driver.explain(studioExecuteDTO.getStatement()); - process.finish(); - return explain; - } - } - - @Override - public ObjectNode getStreamGraph(StudioExecuteDTO studioExecuteDTO) { - addFlinkSQLEnv(studioExecuteDTO); - JobConfig config = studioExecuteDTO.getJobConfig(); - // If you are using explainSql | getStreamGraph | getJobPlan, make the dialect change to - // local. - config.buildLocal(); - buildSession(config); - JobManager jobManager = JobManager.buildPlanMode(config); - return jobManager.getStreamGraph(studioExecuteDTO.getStatement()); - } - - @Override - public ObjectNode getJobPlan(StudioExecuteDTO studioExecuteDTO) { - addFlinkSQLEnv(studioExecuteDTO); - JobConfig config = studioExecuteDTO.getJobConfig(); - // If you are using explainSql | getStreamGraph | getJobPlan, make the dialect change to - // local. - config.buildLocal(); - buildSession(config); - - JobManager jobManager = JobManager.build(config); - - String planJson = jobManager.getJobPlanJson(studioExecuteDTO.getStatement()); - ObjectMapper mapper = new ObjectMapper(); - ObjectNode objectNode = mapper.createObjectNode(); - try { - objectNode = (ObjectNode) mapper.readTree(planJson); - } catch (JsonProcessingException e) { - e.printStackTrace(); - } finally { - return objectNode; - } - } - - @Override - public JdbcSelectResult getCommonSqlData(Integer taskId) { - return COMMON_SQL_SEARCH_CACHE.get(taskId); - } - @Override public SelectResult getJobData(String jobId) { return JobManager.getJobData(jobId); @@ -373,7 +123,8 @@ public LineageResult getLineage(StudioCADTO studioCADTO) { studioCADTO.getStatement(), studioCADTO.getDialect().toLowerCase(), dataBase.getDriverConfig()); } } else { - addFlinkSQLEnv(studioCADTO); + String envSql = taskService.buildEnvSql(studioCADTO); + studioCADTO.setStatement(studioCADTO.getStatement() + envSql); return LineageBuilder.getColumnLineageByLogicalPlan(studioCADTO.getStatement()); } } @@ -385,73 +136,15 @@ public List listFlinkJobs(Integer clusterId) { try { return FlinkAPI.build(cluster.getJobManagerHost()).listJobs(); } catch (Exception e) { - logger.info("查询作业时集群不存在"); + log.info("查询作业时集群不存在"); } return new ArrayList<>(); } - @Override - public boolean cancelFlinkJob(Integer clusterId, String jobId) { - Cluster cluster = clusterInstanceService.getById(clusterId); - Asserts.checkNotNull(cluster, "该集群不存在"); - JobConfig jobConfig = new JobConfig(); - jobConfig.setAddress(cluster.getJobManagerHost()); - if (Asserts.isNotNull(cluster.getClusterConfigurationId())) { - FlinkClusterConfig gatewayConfig = - clusterConfigurationService.getFlinkClusterCfg(cluster.getClusterConfigurationId()); - jobConfig.buildGatewayConfig(gatewayConfig); - } - JobManager jobManager = JobManager.build(jobConfig); - return jobManager.cancel(jobId); - } - - @Override - public boolean savepointTrigger( - Integer taskId, Integer clusterId, String jobId, String savePointType, String name) { - Cluster cluster = clusterInstanceService.getById(clusterId); - - Asserts.checkNotNull(cluster, "该集群不存在"); - JobConfig jobConfig = new JobConfig(); - jobConfig.setAddress(cluster.getJobManagerHost()); - jobConfig.setType(cluster.getType()); - if (Asserts.isNotNull(cluster.getClusterConfigurationId())) { - // 如果用户选择用dinky平台来托管集群信息 说明任务一定是从dinky发起提交的 - FlinkClusterConfig gatewayConfig = - clusterConfigurationService.getFlinkClusterCfg(cluster.getClusterConfigurationId()); - jobConfig.buildGatewayConfig(gatewayConfig); - jobConfig.getGatewayConfig().getClusterConfig().setAppId(cluster.getName()); - jobConfig.setTaskId(cluster.getTaskId()); - } else { - // 用户选择外部的平台来托管集群信息,但是集群上的任务不一定是通过dinky提交的 - jobConfig.setTaskId(taskId); - } - JobManager jobManager = JobManager.build(jobConfig); - - SavePointResult savePointResult = jobManager.savepoint(jobId, savePointType, null); - if (Asserts.isNotNull(savePointResult)) { - if (jobConfig.getTaskId().equals(0)) { - return true; - } - - for (JobInfo item : savePointResult.getJobInfos()) { - if (Asserts.isEqualsIgnoreCase(jobId, item.getJobId()) && Asserts.isNotNull(jobConfig.getTaskId())) { - Savepoints savepoints = new Savepoints(); - savepoints.setName(name); - savepoints.setType(savePointType); - savepoints.setPath(item.getSavePoint()); - savepoints.setTaskId(jobConfig.getTaskId()); - savepointsService.save(savepoints); - } - } - return true; - } - return false; - } - @Override public List getMSCatalogs(StudioMetaStoreDTO studioMetaStoreDTO) { List catalogs = new ArrayList<>(); - if (Dialect.notFlinkSql(studioMetaStoreDTO.getDialect())) { + if (Dialect.isCommonSql(studioMetaStoreDTO.getDialect())) { DataBase dataBase = dataBaseService.getById(studioMetaStoreDTO.getDatabaseId()); if (!Asserts.isNull(dataBase)) { Catalog defaultCatalog = Catalog.build(FlinkQuery.defaultCatalog()); @@ -496,7 +189,7 @@ public List getMSCatalogs(StudioMetaStoreDTO studioMetaStoreDTO) { public Schema getMSSchemaInfo(StudioMetaStoreDTO studioMetaStoreDTO) { Schema schema = Schema.build(studioMetaStoreDTO.getDatabase()); List tables = new ArrayList<>(); - if (Dialect.notFlinkSql(studioMetaStoreDTO.getDialect())) { + if (Dialect.isCommonSql(studioMetaStoreDTO.getDialect())) { DataBase dataBase = dataBaseService.getById(studioMetaStoreDTO.getDatabaseId()); if (Asserts.isNotNull(dataBase)) { Driver driver = Driver.build(dataBase.getDriverConfig()); @@ -539,7 +232,7 @@ public Schema getMSSchemaInfo(StudioMetaStoreDTO studioMetaStoreDTO) { @Override public List getMSFlinkColumns(StudioMetaStoreDTO studioMetaStoreDTO) { List columns = new ArrayList<>(); - if (!Dialect.notFlinkSql(studioMetaStoreDTO.getDialect())) { + if (!Dialect.isCommonSql(studioMetaStoreDTO.getDialect())) { String baseStatement = FlinkQuery.useCatalog(studioMetaStoreDTO.getCatalog()) + FlinkQuery.separator() + FlinkQuery.useDatabase(studioMetaStoreDTO.getDatabase()) diff --git a/dinky-admin/src/main/java/org/dinky/service/impl/TaskServiceImpl.java b/dinky-admin/src/main/java/org/dinky/service/impl/TaskServiceImpl.java index d49150ec066..169679cd3ef 100644 --- a/dinky-admin/src/main/java/org/dinky/service/impl/TaskServiceImpl.java +++ b/dinky-admin/src/main/java/org/dinky/service/impl/TaskServiceImpl.java @@ -19,22 +19,21 @@ package org.dinky.service.impl; -import org.dinky.assertion.Assert; import org.dinky.assertion.Asserts; import org.dinky.config.Dialect; -import org.dinky.context.RowLevelPermissionsContext; import org.dinky.context.TenantContextHolder; import org.dinky.data.constant.CommonConstant; -import org.dinky.data.dto.JobDataDto; +import org.dinky.data.dto.AbstractStatementDTO; import org.dinky.data.dto.SqlDTO; +import org.dinky.data.dto.TaskDTO; import org.dinky.data.dto.TaskRollbackVersionDTO; -import org.dinky.data.dto.TaskVersionConfigureDTO; import org.dinky.data.enums.JobLifeCycle; import org.dinky.data.enums.JobStatus; import org.dinky.data.enums.Status; -import org.dinky.data.enums.TaskOperatingSavepointSelect; -import org.dinky.data.enums.TaskOperatingStatus; import org.dinky.data.exception.BusException; +import org.dinky.data.exception.NotSupportExplainExcepition; +import org.dinky.data.exception.SqlExplainExcepition; +import org.dinky.data.exception.TaskNotDoneException; import org.dinky.data.model.AlertGroup; import org.dinky.data.model.Catalogue; import org.dinky.data.model.Cluster; @@ -45,25 +44,20 @@ import org.dinky.data.model.JobInstance; import org.dinky.data.model.JobModelOverview; import org.dinky.data.model.JobTypeOverView; -import org.dinky.data.model.RowPermissions; import org.dinky.data.model.Savepoints; -import org.dinky.data.model.Statement; import org.dinky.data.model.SystemConfiguration; import org.dinky.data.model.Task; import org.dinky.data.model.TaskExtConfig; import org.dinky.data.model.TaskVersion; import org.dinky.data.model.UDFTemplate; import org.dinky.data.result.Result; +import org.dinky.data.result.ResultPool; import org.dinky.data.result.SqlExplainResult; -import org.dinky.data.result.TaskOperatingResult; import org.dinky.function.compiler.CustomStringJavaCompiler; import org.dinky.function.pool.UdfCodePool; import org.dinky.function.util.UDFUtil; -import org.dinky.gateway.Gateway; -import org.dinky.gateway.config.GatewayConfig; import org.dinky.gateway.enums.GatewayType; import org.dinky.gateway.enums.SavePointStrategy; -import org.dinky.gateway.enums.SavePointType; import org.dinky.gateway.model.FlinkClusterConfig; import org.dinky.gateway.model.JobInfo; import org.dinky.gateway.result.SavePointResult; @@ -72,11 +66,11 @@ import org.dinky.job.JobManager; import org.dinky.job.JobResult; import org.dinky.mapper.TaskMapper; -import org.dinky.metadata.driver.Driver; import org.dinky.metadata.result.JdbcSelectResult; import org.dinky.mybatis.service.impl.SuperServiceImpl; import org.dinky.process.context.ProcessContextHolder; import org.dinky.process.enums.ProcessType; +import org.dinky.process.exception.ExcuteException; import org.dinky.process.model.ProcessEntity; import org.dinky.service.AlertGroupService; import org.dinky.service.CatalogueService; @@ -84,9 +78,7 @@ import org.dinky.service.ClusterInstanceService; import org.dinky.service.DataBaseService; import org.dinky.service.FragmentVariableService; -import org.dinky.service.HistoryService; import org.dinky.service.JarService; -import org.dinky.service.JobHistoryService; import org.dinky.service.JobInstanceService; import org.dinky.service.SavepointsService; import org.dinky.service.StatementService; @@ -95,6 +87,8 @@ import org.dinky.service.UDFTemplateService; import org.dinky.service.UserService; import org.dinky.utils.FragmentVariableUtils; +import org.dinky.utils.JsonUtils; +import org.dinky.utils.RunTimeUtil; import org.dinky.utils.UDFUtils; import org.apache.commons.collections4.CollectionUtils; @@ -104,19 +98,15 @@ import java.io.InputStreamReader; import java.io.Reader; import java.nio.charset.StandardCharsets; -import java.time.LocalDateTime; import java.util.ArrayList; -import java.util.Arrays; -import java.util.Collections; import java.util.List; import java.util.Map; import java.util.Objects; -import java.util.concurrent.ConcurrentHashMap; +import java.util.Optional; import java.util.stream.Collectors; import javax.annotation.Resource; -import org.springframework.beans.factory.annotation.Value; import org.springframework.boot.autoconfigure.jdbc.DataSourceProperties; import org.springframework.context.annotation.Lazy; import org.springframework.stereotype.Service; @@ -127,292 +117,305 @@ import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.ObjectMapper; -import com.fasterxml.jackson.databind.node.ArrayNode; import com.fasterxml.jackson.databind.node.ObjectNode; import cn.dev33.satoken.stp.StpUtil; import cn.hutool.core.bean.BeanUtil; +import cn.hutool.core.lang.Assert; import cn.hutool.core.lang.tree.Tree; import cn.hutool.core.lang.tree.TreeNode; import cn.hutool.core.lang.tree.TreeUtil; -import cn.hutool.core.util.StrUtil; +import cn.hutool.core.text.StrFormatter; import lombok.RequiredArgsConstructor; +import lombok.extern.slf4j.Slf4j; /** * TaskServiceImpl */ @Service @RequiredArgsConstructor +@Slf4j public class TaskServiceImpl extends SuperServiceImpl implements TaskService { + private final SavepointsService savepointsService; private final StatementService statementService; private final ClusterInstanceService clusterInstanceService; private final ClusterConfigurationService clusterCfgService; - private final SavepointsService savepointsService; private final JarService jarService; private final DataBaseService dataBaseService; private final JobInstanceService jobInstanceService; - private final JobHistoryService jobHistoryService; private final AlertGroupService alertGroupService; - private final HistoryService historyService; private final TaskVersionService taskVersionService; private final FragmentVariableService fragmentVariableService; private final UDFTemplateService udfTemplateService; - private final DataSourceProperties dataSourceProperties; + private final DataSourceProperties dsProperties; private final UserService userService; @Resource @Lazy private CatalogueService catalogueService; - private static final ObjectMapper mapper = new ObjectMapper(); - - private String driver() { - return dataSourceProperties.getDriverClassName(); + private String[] buildParams(int id) { + return String.format( + "--id %d --driver %s --url %s --username %s --password %s --dinkyAddr %s", + id, + dsProperties.getDriverClassName(), + dsProperties.getUrl(), + dsProperties.getUsername(), + dsProperties.getPassword(), + SystemConfiguration.getInstances().getDinkyAddr()) + .split(" "); } - private String url() { - return dataSourceProperties.getUrl(); - } + private void preCheckTask(TaskDTO task) throws TaskNotDoneException, SqlExplainExcepition { - private String username() { - return dataSourceProperties.getUsername(); - } + Assert.notNull(task, Status.TASK_NOT_EXIST.getMessage()); - private String password() { - return dataSourceProperties.getPassword(); - } + if (!Dialect.isCommonSql(task.getDialect()) + && Asserts.isNotNull(task.getJobInstanceId()) + && task.getJobInstanceId() > 0) { + String status = jobInstanceService.getById(task.getJobInstanceId()).getStatus(); + if (!JobStatus.isDone(status)) { + throw new TaskNotDoneException(Status.TASK_STATUS_IS_NOT_DONE.getMessage()); + } + } - @Value("server.port") - private String serverPort; + List sqlExplainResults = explainTask(task); + for (SqlExplainResult sqlExplainResult : sqlExplainResults) { + if (!sqlExplainResult.isParseTrue() || !sqlExplainResult.isExplainTrue()) { + throw new SqlExplainExcepition(StrFormatter.format( + "task [{}] sql explain failed, sql [{}], error: [{}]", + task.getName(), + sqlExplainResult.getSql(), + sqlExplainResult.getError())); + } + } + } - private String[] buildParas(Integer id) { - return buildParas(id, StrUtil.NULL); + public JobResult executeJob(TaskDTO task) { + ProcessEntity process = ProcessContextHolder.getProcess(); + if (Dialect.isCommonSql(task.getDialect())) { + process.info("Preparing to execute common sql..."); + JobResult jobResult = + dataBaseService.executeCommonSql(SqlDTO.build(task.getStatement(), task.getDatabaseId(), null)); + ResultPool.putCommonSqlCache(task.getId(), (JdbcSelectResult) jobResult.getResult()); + return jobResult; + } else { + process.info("Initializing Flink job config..."); + JobManager jobManager = JobManager.build(buildJobConfig(task)); + return jobManager.executeSql(task.getStatement()); + } } - private String[] buildParas(Integer id, String dinkyAddr) { - return String.format( - "--id %d --driver %s --url %s --username %s --password %s --dinkyAddr %s", - id, driver(), url(), username(), password(), dinkyAddr) - .split(" "); + private JobConfig buildJobConfig(TaskDTO task) { + task.setStatement(buildEnvSql(task) + task.getStatement()); + JobConfig config = task.getJobConfig(); + + Savepoints savepoints = savepointsService.getSavePointWithStrategy(task); + if (Asserts.isNotNull(savepoints)) { + config.setSavePointPath(savepoints.getPath()); + config.getConfigJson().put("execution.savepoint.path", savepoints.getPath()); // todo: 写工具类处理相关配置 + } + if (GatewayType.get(task.getType()).isDeployCluster()) { + FlinkClusterConfig flinkClusterCfg = + clusterCfgService.getFlinkClusterCfg(config.getClusterConfigurationId()); + flinkClusterCfg.getAppConfig().setUserJarParas(buildParams(config.getTaskId())); + config.buildGatewayConfig(flinkClusterCfg); + } else { + String address = clusterInstanceService.buildEnvironmentAddress(config.isUseRemote(), task.getClusterId()); + config.setAddress(address); + } + return config; } @Override - public JobResult submitTask(Integer id) { - Task task = this.getTaskInfoById(id); - Asserts.checkNull(task, Status.TASK_NOT_EXIST.getMessage()); - - if (Dialect.notFlinkSql(task.getDialect())) { - return executeCommonSql(SqlDTO.build(task.getStatement(), task.getDatabaseId(), null)); + public String buildEnvSql(AbstractStatementDTO task) { + ProcessEntity process = ProcessContextHolder.getProcess(); + process.info("Start initialize FlinkSQLEnv:"); + String sql = CommonConstant.LineSep; + if (task.isFragment()) { + String flinkWithSql = dataBaseService.getEnabledFlinkWithSql(); + if (Asserts.isNotNullString(flinkWithSql)) { + sql += flinkWithSql + CommonConstant.LineSep; + } + task.setVariables(fragmentVariableService.listEnabledVariables()); } + int envId = Optional.ofNullable(task.getEnvId()).orElse(-1); + if (envId != -1) { + TaskDTO envTask = this.getTaskInfoById(task.getEnvId()); + if (Asserts.isNotNull(envTask) && Asserts.isNotNullString(envTask.getStatement())) { + sql += envTask.getStatement() + CommonConstant.LineSep; + } + } + process.info("Initializing data permissions..."); + userService.buildRowPermission(); + process.info("Finish initialize FlinkSQLEnv."); + return sql; + } + @Override + public JobResult submitTask(Integer id, String savePointPath) throws ExcuteException { + initTenantByTaskId(id); ProcessEntity process = StpUtil.isLogin() ? ProcessContextHolder.registerProcess( ProcessEntity.init(ProcessType.FLINK_SUBMIT, StpUtil.getLoginIdAsInt())) : ProcessEntity.NULL_PROCESS; + process.start(); - process.info("Initializing Flink job config..."); - JobConfig config = buildJobConfig(task); + TaskDTO task = this.getTaskInfoById(id); - // if (GatewayType.KUBERNETES_APPLICATION.equalsValue(config.getType())) { - // loadDocker(id, config.getClusterConfigurationId(), config.getGatewayConfig()); - // } + if (StringUtils.isNotBlank(savePointPath)) { + task.setSavePointStrategy(SavePointStrategy.CUSTOM.getValue()); + task.setSavePointPath(savePointPath); + } - JobManager jobManager = JobManager.build(config); - process.start(); - JobResult jobResult; - if (config.isJarTask()) { - jobResult = jobManager.executeJar(); - if (jobResult.isSuccess()) { - process.finish("Submit Flink SQL finished, JobManager Web Interface: http://" - + jobResult.getJobManagerAddress()); - } else { - // 如果提交失败,则只打印出关键的错误信息 - process.error("Submit Flink SQL " + jobResult.getStatus()); - if (Asserts.isNotNull(jobResult.getError())) { - process.error(jobResult.getError().split("\n")[0]); - Arrays.stream(jobResult.getError().split("\n")) - .filter(row -> row.contains("Caused by")) - .forEach(row -> process.error(row)); - } + preCheckTask(task); + + JobResult jobResult = executeJob(task); + process.info("execute job finished,status is ", jobResult.getStatus()); + + if (Job.JobStatus.SUCCESS == jobResult.getStatus()) { + process.info("Job Submit success"); + task.setJobInstanceId(jobResult.getJobInstanceId()); + if (!this.updateById(task.buildTask())) { + throw new BusException(Status.TASK_UPDATE_FAILED.getMessage()); } } else { - jobResult = jobManager.executeSql(task.getStatement()); - process.finish("Submit Flink SQL finished."); + process.error("Job Submit failed, error: " + jobResult.getError()); } - return jobResult; - } - private void loadDocker(Integer taskId, Integer clusterConfigurationId, GatewayConfig gatewayConfig) { - // Map dockerConfig = clusterCfgService - // .getClusterConfigById(clusterConfigurationId) - // .getFlinkClusterCfg() - // .getKubernetesConfig() - // .getDockerConfig(); - // - // if (dockerConfig == null) { - // return; - // } - // - // String[] params = buildParas( - // taskId, dockerConfig.getOrDefault("dinky.remote.addr", "").toString()); - // - // gatewayConfig.getAppConfig().setUserJarParas(params); - // - // Docker docker = Docker.build(dockerConfig); - // if (docker == null || StringUtils.isBlank(docker.getInstance())) { - // return; - // } - // - // DockerClientUtils dockerClientUtils = new DockerClientUtils(docker); - // String tag = dockerClientUtils.getDocker().getTag(); - // if (StrUtil.isNotBlank(tag)) { - // gatewayConfig.getFlinkConfig().getConfiguration().put("kubernetes.container.image", tag); - // } + process.finish(); + return jobResult; } @Override - public JobResult submitTaskToOnline(Task dtoTask, Integer id) { - final Task task = dtoTask == null ? this.getTaskInfoById(id) : dtoTask; + public JobResult restartTask(Integer id, String savePointPath) throws ExcuteException { + TaskDTO task = this.getTaskInfoById(id); Asserts.checkNull(task, Status.TASK_NOT_EXIST.getMessage()); - task.setStep(JobLifeCycle.ONLINE.getValue()); - - if (Dialect.notFlinkSql(task.getDialect())) { - return executeCommonSql(SqlDTO.build(task.getStatement(), task.getDatabaseId(), null)); - } - - JobConfig config = buildJobConfig(task); - JobManager jobManager = JobManager.build(config); - if (config.isJarTask()) { - return jobManager.executeJar(); + if (!Dialect.isCommonSql(task.getDialect()) && Asserts.isNotNull(task.getJobInstanceId())) { + String status = jobInstanceService.getById(task.getJobInstanceId()).getStatus(); + if (!JobStatus.isDone(status)) { + cancelTaskJob(task); + } } - return jobManager.executeSql(task.getStatement()); + return submitTask(id, savePointPath); } @Override - public JobResult restartTask(Integer id, String savePointPath) { - Task task = this.getTaskInfoById(id); - Asserts.checkNull(task, Status.TASK_NOT_EXIST.getMessage()); - if (checkJobInstanceId(task)) { - savepointJobInstance(task.getJobInstanceId(), SavePointType.CANCEL.getValue()); - } - - if (Dialect.notFlinkSql(task.getDialect())) { - return executeCommonSql(SqlDTO.build(task.getStatement(), task.getDatabaseId(), null)); - } - - if (StringUtils.isBlank(savePointPath)) { - task.setSavePointStrategy(SavePointStrategy.LATEST.getValue()); - } else { - task.setSavePointStrategy(SavePointStrategy.CUSTOM.getValue()); - task.setSavePointPath(savePointPath); - updateById(task); - } + public boolean cancelTaskJob(TaskDTO task) { + JobInstance jobInstance = jobInstanceService.getById(task.getJobInstanceId()); + Assert.notNull(jobInstance, Status.JOB_INSTANCE_NOT_EXIST.getMessage()); + Cluster cluster = clusterInstanceService.getById(jobInstance.getClusterId()); + Assert.notNull(cluster, Status.CLUSTER_NOT_EXIST.getMessage()); - JobConfig config = buildJobConfig(task); - JobManager jobManager = JobManager.build(config); - if (!config.isJarTask()) { - return jobManager.executeSql(task.getStatement()); - } else { - return jobManager.executeJar(); - } + JobManager jobManager = JobManager.build(buildJobConfig(task)); + boolean cancelled = jobManager.cancel(jobInstance.getJid()); + JobInfoDetail jobInfoDetail = jobInstanceService.refreshJobInfoDetail(jobInstance.getId()); + return cancelled; } - private JobResult executeCommonSql(SqlDTO sqlDTO) { - JobResult result = new JobResult(); - result.setStatement(sqlDTO.getStatement()); - result.setStartTime(LocalDateTime.now()); - - if (Asserts.isNull(sqlDTO.getDatabaseId())) { - result.setSuccess(false); - result.setError("please assign data source"); - result.setEndTime(LocalDateTime.now()); - return result; - } - - DataBase dataBase = dataBaseService.getById(sqlDTO.getDatabaseId()); - if (Asserts.isNull(dataBase)) { - result.setSuccess(false); - result.setError("data source not exist."); - result.setEndTime(LocalDateTime.now()); - return result; - } + @Override + public SavePointResult savepointTaskJob(TaskDTO task, String savePointType) { + JobInstance jobInstance = jobInstanceService.getById(task.getJobInstanceId()); + Assert.notNull(jobInstance, Status.JOB_INSTANCE_NOT_EXIST.getMessage()); - JdbcSelectResult selectResult; - try (Driver driver = Driver.build(dataBase.getDriverConfig())) { - selectResult = driver.executeSql(sqlDTO.getStatement(), sqlDTO.getMaxRowNum()); - } + JobManager jobManager = JobManager.build(buildJobConfig(task)); + String jobId = jobInstance.getJid(); - result.setResult(selectResult); - if (selectResult.isSuccess()) { - result.setSuccess(true); - } else { - result.setSuccess(false); - result.setError(selectResult.getError()); + SavePointResult savePointResult = jobManager.savepoint(jobId, savePointType, null); + Assert.notNull(savePointResult.getJobInfos()); + for (JobInfo item : savePointResult.getJobInfos()) { + if (Asserts.isEqualsIgnoreCase(jobId, item.getJobId()) && Asserts.isNotNull(jobInstance.getTaskId())) { + Savepoints savepoints = new Savepoints(); + savepoints.setName(savePointType); + savepoints.setType(savePointType); + savepoints.setPath(item.getSavePoint()); + savepoints.setTaskId(task.getId()); + savepointsService.save(savepoints); + } } - result.setEndTime(LocalDateTime.now()); - return result; + return savePointResult; } @Override - public List explainTask(Integer id) { - Task task = getTaskInfoById(id); - if (Dialect.notFlinkSql(task.getDialect())) { - return explainCommonSqlTask(task); + public List explainTask(TaskDTO task) throws NotSupportExplainExcepition { + if (Dialect.isCommonSql(task.getDialect())) { + return dataBaseService.explainCommonSql(task); + } else if (task.getDialect().equals(Dialect.FLINK_SQL.getValue())) { + JobConfig config = buildJobConfig(task); + config.buildLocal(); + JobManager jobManager = JobManager.buildPlanMode(config); + return jobManager.explainSql(task.getStatement()).getSqlExplainResults(); } + throw new NotSupportExplainExcepition(StrFormatter.format( + "task [{}] dialect [{}] is can not explain, skip sqlExplain verify", + task.getName(), + task.getDialect())); + } - return explainFlinkSqlTask(task); + @Override + public ObjectNode getJobPlan(TaskDTO task) { + JobManager jobManager = JobManager.buildPlanMode(buildJobConfig(task)); + String planJson = jobManager.getJobPlanJson(task.getStatement()); + return JsonUtils.parseObject(planJson); } - private List explainFlinkSqlTask(Task task) { - JobConfig config = buildJobConfig(task); - config.buildLocal(); + @Override + public ObjectNode getStreamGraph(TaskDTO taskDTO) { + JobConfig config = taskDTO.getJobConfig(); JobManager jobManager = JobManager.buildPlanMode(config); - return jobManager.explainSql(task.getStatement()).getSqlExplainResults(); + ObjectNode streamGraph = jobManager.getStreamGraph(taskDTO.getStatement()); + RunTimeUtil.recovery(jobManager); + return streamGraph; } - private List explainCommonSqlTask(Task task) { - if (Asserts.isNull(task.getDatabaseId())) { - return Collections.singletonList(SqlExplainResult.fail(task.getStatement(), "please assign data source.")); + @Override + public String exportSql(Integer id) { + TaskDTO task = this.getTaskInfoById(id); + Asserts.checkNull(task, Status.TASK_NOT_EXIST.getMessage()); + if (Dialect.isCommonSql(task.getDialect())) { + return task.getStatement(); } - DataBase dataBase = dataBaseService.getById(task.getDatabaseId()); - if (Asserts.isNull(dataBase)) { - return Collections.singletonList(SqlExplainResult.fail(task.getStatement(), "data source not exist.")); + JobConfig config = buildJobConfig(task); + + // 加密敏感信息 + if (config.getVariables() != null) { + for (Map.Entry entry : config.getVariables().entrySet()) { + if (FragmentVariableUtils.isSensitive(entry.getKey())) { + entry.setValue(FragmentVariableUtils.HIDDEN_CONTENT); + } + } } - List sqlExplainResults; - try (Driver driver = Driver.build(dataBase.getDriverConfig())) { - sqlExplainResults = driver.explain(task.getStatement()); + JobManager jobManager = JobManager.build(config); + if (config.isJarTask()) { + return ""; } - return sqlExplainResults; + + return jobManager.exportSql(task.getStatement()); } @Override - public Task getTaskInfoById(Integer id) { - Task task = this.getById(id); - if (task == null) { - return null; - } - if (task.getClusterId() != null) { - Cluster cluster = clusterInstanceService.getById(task.getClusterId()); + public TaskDTO getTaskInfoById(Integer id) { + Task mTask = this.getById(id); + Assert.notNull(mTask, Status.TASK_NOT_EXIST.getMessage()); + TaskDTO taskDTO = new TaskDTO(); + BeanUtil.copyProperties(mTask, taskDTO); + + if (taskDTO.getClusterId() != null) { + Cluster cluster = clusterInstanceService.getById(taskDTO.getClusterId()); if (cluster != null) { - task.setClusterName(cluster.getAlias()); + taskDTO.setClusterName(cluster.getAlias()); } } - - Statement statement = statementService.getById(id); - if (statement != null) { - task.setStatement(statement.getStatement()); - } - - JobInstance jobInstance = jobInstanceService.getJobInstanceByTaskId(id); - if (Asserts.isNotNull(jobInstance) && !JobStatus.isDone(jobInstance.getStatus())) { - task.setJobInstanceId(jobInstance.getId()); - } else { - task.setJobInstanceId(0); + if (taskDTO.getJobInstanceId() != null) { + JobInstance jobInstance = jobInstanceService.getById(taskDTO.getJobInstanceId()); + if (jobInstance != null) { + taskDTO.setStatus(jobInstance.getStatus()); + } } - return task; + return taskDTO; } @Override @@ -422,8 +425,23 @@ public void initTenantByTaskId(Integer id) { TenantContextHolder.set(tenantId); } + @Override + public boolean changeTaskLifeRecyle(Integer taskId, JobLifeCycle lifeCycle) { + TaskDTO taskInfoById = getTaskInfoById(taskId); + taskInfoById.setStep(lifeCycle.getValue()); + if (lifeCycle == JobLifeCycle.ONLINE) { + taskVersionService.createTaskVersionSnapshot(taskInfoById); + } + return saveOrUpdate(taskInfoById.buildTask()); + } + @Override public boolean saveOrUpdateTask(Task task) { + + if (JobLifeCycle.ONLINE.equalsValue(task.getStep())) { + throw new BusException(Status.TASK_IS_ONLINE.getMessage()); + } + if (Dialect.isUDF(task.getDialect())) { TaskExtConfig taskConfigJson = task.getConfigJson(); @@ -456,56 +474,20 @@ public boolean saveOrUpdateTask(Task task) { UdfCodePool.addOrUpdate(UDFUtils.taskToUDF(task)); } - // if modify task else create task - if (task.getId() != null) { - Task taskInfo = getById(task.getId()); - Assert.check(taskInfo); - if (JobLifeCycle.RELEASE.equalsValue(taskInfo.getStep()) - || JobLifeCycle.ONLINE.equalsValue(taskInfo.getStep()) - || JobLifeCycle.CANCEL.equalsValue(taskInfo.getStep())) { - throw new BusException( - "该作业已" + JobLifeCycle.get(taskInfo.getStep()).getLabel() + ",禁止修改!"); - } - task.setStep(JobLifeCycle.DEVELOP.getValue()); - this.updateById(task); - if (task.getStatement() != null) { - Statement statement = new Statement(); - statement.setId(task.getId()); - statement.setStatement(task.getStatement()); - statementService.updateById(statement); - } - } else { - task.setStep(JobLifeCycle.CREATE.getValue()); - if (task.getCheckPoint() == null) { - task.setCheckPoint(0); - } - if (task.getParallelism() == null) { - task.setParallelism(1); - } - if (task.getClusterId() == null) { - task.setClusterId(0); - } - this.save(task); - Statement statement = new Statement(); - statement.setId(task.getId()); - if (task.getStatement() == null) { - task.setStatement(""); - } - statement.setStatement(task.getStatement()); - statementService.insert(statement); - } - return true; + return this.saveOrUpdate(task); } @Override public List listFlinkSQLEnv() { return this.list(new QueryWrapper() - .eq("dialect", Dialect.FLINK_SQL_ENV.getValue()) - .eq("enabled", 1)); + .lambda() + .eq(Task::getDialect, Dialect.FLINK_SQL_ENV.getValue()) + .eq(Task::getEnabled, 1)); } @Override public Task initDefaultFlinkSQLEnv(Integer tenantId) { + TenantContextHolder.set(tenantId); String separator = SystemConfiguration.getInstances().getSqlSeparator(); separator = separator.replace("\\r", "\r").replace("\\n", "\n"); String name = "DefaultCatalog"; @@ -520,15 +502,16 @@ public Task initDefaultFlinkSQLEnv(Integer tenantId) { + "'password' = '%s',\n" + " 'url' = '%s'\n" + ")%suse catalog my_catalog%s", - username(), password(), url(), separator, separator); + dsProperties.getUsername(), dsProperties.getPassword(), dsProperties.getUrl(), separator, separator); if (null != defaultFlinkSQLEnvTask) { - statementEquals(tenantId, defaultFlinkSQLEnvTask, sql); + defaultFlinkSQLEnvTask.setStatement(sql); + saveOrUpdateTask(defaultFlinkSQLEnvTask); return defaultFlinkSQLEnvTask; } defaultFlinkSQLEnvTask = new Task(); - defaultFlinkSQLEnvTask.setName("DefaultCatalog"); + defaultFlinkSQLEnvTask.setName(name); defaultFlinkSQLEnvTask.setDialect(Dialect.FLINK_SQL_ENV.getValue()); defaultFlinkSQLEnvTask.setStatement(sql); defaultFlinkSQLEnvTask.setFragment(true); @@ -536,59 +519,14 @@ public Task initDefaultFlinkSQLEnv(Integer tenantId) { defaultFlinkSQLEnvTask.setEnabled(true); saveOrUpdate(defaultFlinkSQLEnvTask); - Statement statement = new Statement(); - statement.setId(defaultFlinkSQLEnvTask.getId()); - statement.setTenantId(tenantId); - statement.setStatement(sql); - statementService.saveOrUpdate(statement); - return defaultFlinkSQLEnvTask; } - /** - * 数据库信息发生修改后,catalog ddl也随之改变 - * - * @param tenantId - * @param defaultFlinkSQLEnvTask - * @param sql - */ - private void statementEquals(Integer tenantId, Task defaultFlinkSQLEnvTask, String sql) { - TenantContextHolder.set(tenantId); - - // 对比catalog ddl,不相同则更新dinky_task_statement表 - boolean equals = StringUtils.equals( - sql, statementService.getById(defaultFlinkSQLEnvTask.getId()).getStatement()); - if (!equals) { - Statement statement = new Statement(); - statement.setId(defaultFlinkSQLEnvTask.getId()); - statement.setTenantId(tenantId); - statement.setStatement(sql); - statementService.saveOrUpdate(statement); - } - } - @Override public Task getTaskByNameAndTenantId(String name, Integer tenantId) { return baseMapper.getTaskByNameAndTenantId(name, tenantId); } - @Override - public JobStatus checkJobStatus(JobInfoDetail jobInfoDetail) { - if (Asserts.isNull(jobInfoDetail.getClusterConfiguration())) { - return JobStatus.UNKNOWN; - } - - Integer clusterId = jobInfoDetail.getClusterConfiguration().getId(); - String appId = jobInfoDetail.getCluster().getName(); - - FlinkClusterConfig clusterConfig = clusterCfgService.getFlinkClusterCfg(clusterId); - GatewayConfig gatewayConfig = GatewayConfig.build(clusterConfig); - gatewayConfig.getClusterConfig().setAppId(appId); - - Gateway gateway = Gateway.build(gatewayConfig); - return gateway.getJobStatusById(appId); - } - @Override public List getTaskOnlineRate() { return baseMapper.getTaskOnlineRate(); @@ -599,44 +537,6 @@ public JobModelOverview getJobStreamingOrBatchModelOverview() { return baseMapper.getJobStreamingOrBatchModelOverview(); } - @Override - public String exportSql(Integer id) { - Task task = getTaskInfoById(id); - Asserts.checkNull(task, Status.TASK_NOT_EXIST.getMessage()); - if (Dialect.notFlinkSql(task.getDialect())) { - return task.getStatement(); - } - - JobConfig config = buildJobConfig(task); - - // 加密敏感信息 - if (config.getVariables() != null) { - for (Map.Entry entry : config.getVariables().entrySet()) { - if (FragmentVariableUtils.isSensitive(entry.getKey())) { - entry.setValue(FragmentVariableUtils.HIDDEN_CONTENT); - } - } - } - - JobManager jobManager = JobManager.build(config); - if (config.isJarTask()) { - return ""; - } - - return jobManager.exportSql(task.getStatement()); - } - - @Override - public Task getUDFByClassName(String className) { - Task task = getOne(new QueryWrapper() - .in("dialect", Dialect.JAVA, Dialect.SCALA, Dialect.PYTHON) - .eq("enabled", 1) - .eq("save_point_path", className)); - Asserts.checkNull(task, StrUtil.format("class: {} ,not exists!", className)); - task.setStatement(statementService.getById(task.getId()).getStatement()); - return task; - } - @Override public List getAllUDF() { List tasks = list(new QueryWrapper() @@ -645,89 +545,18 @@ public List getAllUDF() { .isNotNull("save_point_path")); return tasks.stream() .peek(task -> { - Assert.check(task); + Assert.notNull(task, Status.TASK_NOT_EXIST.getMessage()); task.setStatement(statementService.getById(task.getId()).getStatement()); }) .collect(Collectors.toList()); } - @Override - public Result releaseTask(Integer id) { - Task task = getTaskInfoById(id); - Assert.check(task); - if (!JobLifeCycle.DEVELOP.equalsValue(task.getStep())) { - return Result.succeed("publish success!"); - } - - // KubernetesApplication is not sql, skip sqlExplain verify - if (!Dialect.KUBERNETES_APPLICATION.equalsVal(task.getDialect())) { - List sqlExplainResults = explainTask(id); - for (SqlExplainResult sqlExplainResult : sqlExplainResults) { - if (!sqlExplainResult.isParseTrue() || !sqlExplainResult.isExplainTrue()) { - return Result.failed("syntax or logic check error, publish failed"); - } - } - } - - task.setStep(JobLifeCycle.RELEASE.getValue()); - Task newTask = createTaskVersionSnapshot(task); - if (updateById(newTask)) { - return Result.succeed("publish success!"); - } else { - return Result.failed("publish failed, due to unknown reason"); - } - } - - public Task createTaskVersionSnapshot(Task task) { - List taskVersions = taskVersionService.getTaskVersionByTaskId(task.getId()); - List versionIds = - taskVersions.stream().map(TaskVersion::getVersionId).collect(Collectors.toList()); - Map versionMap = - taskVersions.stream().collect(Collectors.toMap(TaskVersion::getVersionId, t -> t)); - - TaskVersion taskVersion = new TaskVersion(); - BeanUtil.copyProperties(task, taskVersion); - TaskVersionConfigureDTO taskVersionConfigureDTO = new TaskVersionConfigureDTO(); - BeanUtil.copyProperties(task, taskVersionConfigureDTO); - taskVersion.setTaskConfigure(taskVersionConfigureDTO); - taskVersion.setTaskId(taskVersion.getId()); - taskVersion.setId(null); - if (Asserts.isNull(task.getVersionId())) { - // 首次发布,新增版本 - taskVersion.setVersionId(1); - task.setVersionId(1); - taskVersionService.save(taskVersion); - } else { - // 说明存在版本,需要判断是否 是回退后的老版本 - // 1、版本号存在 - // 2、md5值与上一个版本一致 - TaskVersion version = versionMap.get(task.getVersionId()); - version.setId(null); - - if (versionIds.contains(task.getVersionId()) && !taskVersion.equals(version)) { - // || !versionIds.contains(task.getVersionId()) && !taskVersion.equals(version) - taskVersion.setVersionId(Collections.max(versionIds) + 1); - task.setVersionId(Collections.max(versionIds) + 1); - taskVersionService.save(taskVersion); - } - } - return task; - } - @Override public Result rollbackTask(TaskRollbackVersionDTO dto) { if (Asserts.isNull(dto.getVersionId()) || Asserts.isNull(dto.getId())) { return Result.failed("the version is error"); } - Task taskInfo = getTaskInfoById(dto.getId()); - if (JobLifeCycle.RELEASE.equalsValue(taskInfo.getStep()) - || JobLifeCycle.ONLINE.equalsValue(taskInfo.getStep()) - || JobLifeCycle.CANCEL.equalsValue(taskInfo.getStep())) { - return Result.failed( - "this job had" + JobLifeCycle.get(taskInfo.getStep()).getLabel() + ", refuse to rollback!"); - } - LambdaQueryWrapper queryWrapper = new LambdaQueryWrapper() .eq(TaskVersion::getTaskId, dto.getId()) .eq(TaskVersion::getVersionId, dto.getVersionId()); @@ -740,270 +569,9 @@ public Result rollbackTask(TaskRollbackVersionDTO dto) { updateTask.setId(taskVersion.getTaskId()); updateTask.setStep(JobLifeCycle.DEVELOP.getValue()); baseMapper.updateById(updateTask); - - Statement statement = new Statement(); - statement.setStatement(taskVersion.getStatement()); - statement.setId(taskVersion.getTaskId()); - statementService.updateById(statement); return Result.succeed("version rollback success!"); } - @Override - public boolean developTask(Integer id) { - Task task = getTaskInfoById(id); - Assert.check(task); - if (JobLifeCycle.RELEASE.equalsValue(task.getStep())) { - task.setStep(JobLifeCycle.DEVELOP.getValue()); - return updateById(task); - } - return false; - } - - @Override - public Result onLineTask(Integer id) { - final Task task = getTaskInfoById(id); - Assert.check(task); - if (JobLifeCycle.RELEASE.equalsValue(task.getStep())) { - if (checkJobInstanceId(task)) { - return Result.failed("当前发布状态下有作业正在运行,上线失败,请停止后上线"); - } - - final JobResult jobResult = submitTaskToOnline(task, id); - if (Job.JobStatus.SUCCESS == jobResult.getStatus()) { - task.setStep(JobLifeCycle.ONLINE.getValue()); - task.setJobInstanceId(jobResult.getJobInstanceId()); - if (updateById(task)) { - return Result.succeed(jobResult, "上线成功"); - } - - return Result.failed("由于未知原因,上线失败"); - } - - return Result.failed("上线失败,原因:" + jobResult.getError()); - } - - if (JobLifeCycle.ONLINE.equalsValue(task.getStep())) { - return Result.failed("上线失败,当前作业已上线。"); - } - return Result.failed("上线失败,当前作业未发布。"); - } - - private static boolean checkJobInstanceId(Task task) { - return Asserts.isNotNull(task.getJobInstanceId()) && task.getJobInstanceId() != 0; - } - - @Override - public Result reOnLineTask(Integer id, String savePointPath) { - final Task task = this.getTaskInfoById(id); - Asserts.checkNull(task, Status.TASK_NOT_EXIST.getMessage()); - if (checkJobInstanceId(task)) { - savepointJobInstance(task.getJobInstanceId(), SavePointType.CANCEL.getValue()); - } - - if (StringUtils.isNotBlank(savePointPath)) { - task.setSavePointStrategy(SavePointStrategy.CUSTOM.getValue()); - task.setSavePointPath(savePointPath); - } - - final JobResult jobResult = submitTaskToOnline(task, id); - if (Job.JobStatus.SUCCESS == jobResult.getStatus()) { - task.setStep(JobLifeCycle.ONLINE.getValue()); - task.setJobInstanceId(jobResult.getJobInstanceId()); - if (updateById(task)) { - return Result.succeed(jobResult, "重新上线成功"); - } - return Result.failed("由于未知原因,重新上线失败"); - } - return Result.failed("重新上线失败,原因:" + jobResult.getError()); - } - - @Override - public Result offLineTask(Integer id, String type) { - Task task = getTaskInfoById(id); - Assert.check(task); - - if (Asserts.isNullString(type)) { - type = SavePointType.CANCEL.getValue(); - } - - savepointTask(id, type); - if (!JobLifeCycle.ONLINE.equalsValue(task.getStep())) { - return Result.succeed("停止成功"); - } - - task.setStep(JobLifeCycle.RELEASE.getValue()); - updateById(task); - return Result.succeed("下线成功"); - } - - @Override - public Result cancelTask(Integer id) { - Task task = getTaskInfoById(id); - Assert.check(task); - if (JobLifeCycle.ONLINE != JobLifeCycle.get(task.getStep())) { - if (checkJobInstanceId(task)) { - return Result.failed("当前有作业正在运行,注销失败,请停止后注销"); - } - - task.setStep(JobLifeCycle.CANCEL.getValue()); - if (updateById(task)) { - return Result.succeed("注销成功"); - } - return Result.failed("由于未知原因,注销失败"); - } - return Result.failed("当前有作业已上线,无法注销,请下线后注销"); - } - - @Override - public boolean recoveryTask(Integer id) { - Task task = getTaskInfoById(id); - Assert.check(task); - if (JobLifeCycle.CANCEL == JobLifeCycle.get(task.getStep())) { - task.setStep(JobLifeCycle.DEVELOP.getValue()); - return updateById(task); - } - return false; - } - - private boolean savepointJobInstance(Integer jobInstanceId, String savePointType) { - JobInstance jobInstance = jobInstanceService.getById(jobInstanceId); - if (Asserts.isNull(jobInstance)) { - return true; - } - - Cluster cluster = clusterInstanceService.getById(jobInstance.getClusterId()); - Asserts.checkNotNull(cluster, "该集群不存在"); - - Task task = this.getTaskInfoById(jobInstance.getTaskId()); - JobConfig jobConfig = task.buildSubmitConfig(); - jobConfig.setType(cluster.getType()); - - if (Asserts.isNotNull(cluster.getClusterConfigurationId())) { - FlinkClusterConfig flinkClusterConfig = buildGatewayCfgObj(jobConfig); - jobConfig.buildGatewayConfig(flinkClusterConfig); - } - jobConfig.setAddress(cluster.getJobManagerHost()); - - JobManager jobManager = JobManager.build(jobConfig); - - String jobId = jobInstance.getJid(); - if ("canceljob".equals(savePointType)) { - return jobManager.cancel(jobId); - } - - SavePointResult savePointResult = jobManager.savepoint(jobId, savePointType, null); - if (Asserts.isNotNull(savePointResult.getJobInfos())) { - for (JobInfo item : savePointResult.getJobInfos()) { - if (Asserts.isEqualsIgnoreCase(jobId, item.getJobId()) && Asserts.isNotNull(jobConfig.getTaskId())) { - Savepoints savepoints = new Savepoints(); - savepoints.setName(savePointType); - savepoints.setType(savePointType); - savepoints.setPath(item.getSavePoint()); - savepoints.setTaskId(jobConfig.getTaskId()); - savepointsService.save(savepoints); - } - } - return true; - } - return false; - } - - @Override - public boolean savepointTask(Integer taskId, String savePointType) { - Task task = getTaskInfoById(taskId); - return savepointJobInstance(task.getJobInstanceId(), savePointType); - } - - private JobConfig buildJobConfig(Task task) { - if (!task.isJarTask()) { - String envSql = buildEnvSql(task); - task.setStatement(envSql + task.getStatement()); - } - - JobConfig config = task.buildSubmitConfig(); - - Savepoints savepoints = buildSavepoint(config); - if (Asserts.isNotNull(savepoints)) { - config.setSavePointPath(savepoints.getPath()); - config.getConfigJson().put("execution.savepoint.path", savepoints.getPath()); // todo: 写工具类处理相关配置 - } - - if (!GatewayType.get(task.getType()).isDeployCluster()) { - String address = clusterInstanceService.buildEnvironmentAddress(config.isUseRemote(), task.getClusterId()); - config.setAddress(address); - } else { - config.buildGatewayConfig(buildGatewayCfgObj(config)); - } - - config.setVariables(fragmentVariableService.listEnabledVariables()); - buildRowPermission(); - return config; - } - - private Savepoints buildSavepoint(JobConfig config) { - switch (config.getSavePointStrategy()) { - case LATEST: - return savepointsService.getLatestSavepointByTaskId(config.getTaskId()); - case EARLIEST: - return savepointsService.getEarliestSavepointByTaskId(config.getTaskId()); - case CUSTOM: - return new Savepoints() { - { - setPath(config.getSavePointPath()); - } - }; - default: - return null; - } - } - - private FlinkClusterConfig buildGatewayCfgObj(JobConfig config) { - FlinkClusterConfig flinkClusterCfg = clusterCfgService.getFlinkClusterCfg(config.getClusterConfigurationId()); - flinkClusterCfg.getAppConfig().setUserJarParas(buildParas(config.getTaskId())); - flinkClusterCfg.getFlinkConfig().getConfiguration().putAll(config.getConfigJson()); - - // if (config.isJarTask()) { - // JSONObject clusterObj = new JSONObject(flinkClusterCfg); - // JSONObject taskObj = new JSONObject(task.getStatement()); - // return JsonUtils.merge(clusterObj,taskObj).toBean(FlinkClusterConfig.class); - // } - return flinkClusterCfg; - } - - private String buildEnvSql(Task task) { - String sql = CommonConstant.LineSep; - boolean fragment = Asserts.isNotNull(task.getFragment()) ? task.getFragment() : false; - if (fragment) { - String flinkWithSql = dataBaseService.getEnabledFlinkWithSql(); - if (Asserts.isNotNullString(flinkWithSql)) { - sql += flinkWithSql + CommonConstant.LineSep; - } - } - - boolean isEnvIdValid = Asserts.isNotNull(task.getEnvId()) && task.getEnvId() != 0; - if (isEnvIdValid) { - Task envTask = getTaskInfoById(task.getEnvId()); - if (Asserts.isNotNull(envTask) && Asserts.isNotNullString(envTask.getStatement())) { - sql += envTask.getStatement() + CommonConstant.LineSep; - } - } - return sql; - } - - private void buildRowPermission() { - List currentRoleSelectPermissions = userService.getCurrentRoleSelectPermissions(); - if (Asserts.isNotNullCollection(currentRoleSelectPermissions)) { - ConcurrentHashMap permission = new ConcurrentHashMap<>(); - for (RowPermissions roleSelectPermissions : currentRoleSelectPermissions) { - if (Asserts.isAllNotNullString( - roleSelectPermissions.getTableName(), roleSelectPermissions.getExpression())) { - permission.put(roleSelectPermissions.getTableName(), roleSelectPermissions.getExpression()); - } - } - RowLevelPermissionsContext.set(permission); - } - } - @Override public String getTaskAPIAddress() { return SystemConfiguration.getInstances().getDinkyAddr().getValue(); @@ -1016,7 +584,7 @@ public Integer queryAllSizeByName(String name) { @Override public String exportJsonByTaskId(Integer taskId) { - Task task = getTaskInfoById(taskId); + TaskDTO task = getTaskInfoById(taskId); if (Asserts.isNotNull(task.getClusterId())) { Cluster cluster = clusterInstanceService.getById(task.getClusterId()); if (Asserts.isNotNull(cluster)) { @@ -1025,7 +593,7 @@ public String exportJsonByTaskId(Integer taskId) { } // path - ObjectNode jsonNode = (ObjectNode) task.parseJsonNode(mapper); + ObjectNode jsonNode = (ObjectNode) JsonUtils.toJsonNode(task); jsonNode.put("path", getTaskPathByTaskId(taskId)); // clusterConfigurationName @@ -1085,7 +653,7 @@ public Result uploadTaskJson(MultipartFile file) throws Exception { if (file.isEmpty() || file.getSize() <= 0 || fileName == null || "".equals(fileName)) { return Result.failed("传入的文件数据为空"); } - + ObjectMapper mapper = new ObjectMapper(); JsonNode jsonNode = mapper.readTree(getStrByJsonFile(file)); return buildTaskByJsonNode(jsonNode, mapper); } @@ -1101,9 +669,9 @@ public Result buildTaskByJsonNode(JsonNode jsonNode, ObjectMapper mapper) } int errorNumber = 0; - List tasks = new ArrayList<>(); + List tasks = new ArrayList<>(); for (JsonNode json : jsonNodes) { - Task task = mapper.treeToValue(json, Task.class); + TaskDTO task = mapper.treeToValue(json, TaskDTO.class); if (Asserts.isNotNull(task.getClusterName())) { Cluster cluster = clusterInstanceService.getOne(new QueryWrapper().eq("name", task.getClusterName())); @@ -1152,12 +720,7 @@ public Result buildTaskByJsonNode(JsonNode jsonNode, ObjectMapper mapper) continue; } - Integer step = task.getStep(); - this.saveOrUpdateTask(task); - if (!JobLifeCycle.CREATE.getValue().equals(step)) { - task.setStep(step); - updateById(task); - } + this.saveOrUpdateTask(task.buildTask()); if (Asserts.isNotNull(task.getEnvName())) { tasks.add(task); } @@ -1165,11 +728,11 @@ public Result buildTaskByJsonNode(JsonNode jsonNode, ObjectMapper mapper) catalogueService.saveOrUpdate(catalogue); } - for (Task task : tasks) { + for (TaskDTO task : tasks) { Task task1 = getOne(new QueryWrapper().eq("name", task.getEnvName())); if (Asserts.isNotNull(task1)) { task.setEnvId(task1.getId()); - this.saveOrUpdateTask(task); + this.saveOrUpdateTask(task.buildTask()); } } @@ -1249,116 +812,4 @@ private List> dealWithCatalogue(List catalogueList) } return treeNodes; } - - @Override - public Result> queryOnLineTaskByDoneStatus( - List jobLifeCycle, List jobStatuses, boolean includeNull, Integer catalogueId) { - final Tree node = - queryAllCatalogue().getDatas().getNode(Objects.isNull(catalogueId) ? 0 : catalogueId); - final List parentIds = new ArrayList<>(0); - parentIds.add(node.getId()); - childrenNodeParse(node, parentIds); - final List taskList = getTasks(jobLifeCycle, jobStatuses, includeNull, parentIds); - return Result.succeed(taskList); - } - - private List getTasks( - List jobLifeCycle, - List jobStatuses, - boolean includeNull, - List parentIds) { - return this.baseMapper.queryOnLineTaskByDoneStatus( - parentIds, - jobLifeCycle.stream() - .filter(Objects::nonNull) - .map(JobLifeCycle::getValue) - .collect(Collectors.toList()), - includeNull, - jobStatuses.stream().map(JobStatus::name).collect(Collectors.toList())); - } - - private void childrenNodeParse(Tree node, List parentIds) { - final List> children = node.getChildren(); - if (CollectionUtils.isEmpty(children)) { - return; - } - - for (Tree child : children) { - parentIds.add(child.getId()); - if (!child.hasChild()) { - continue; - } - childrenNodeParse(child, parentIds); - } - } - - @Override - public void selectSavepointOnLineTask(TaskOperatingResult taskOperatingResult) { - final JobInstance jobInstanceByTaskId = jobInstanceService.getJobInstanceByTaskId( - taskOperatingResult.getTask().getId()); - if (jobInstanceByTaskId == null) { - startGoingLiveTask(taskOperatingResult, null); - return; - } - - if (!JobStatus.isDone(jobInstanceByTaskId.getStatus())) { - taskOperatingResult.setStatus(TaskOperatingStatus.TASK_STATUS_NO_DONE); - return; - } - - if (taskOperatingResult.getTaskOperatingSavepointSelect().equals(TaskOperatingSavepointSelect.DEFAULT_CONFIG)) { - startGoingLiveTask(taskOperatingResult, null); - return; - } - findTheConditionSavePointToOnline(taskOperatingResult, jobInstanceByTaskId); - } - - private void findTheConditionSavePointToOnline( - TaskOperatingResult taskOperatingResult, JobInstance jobInstanceByTaskId) { - final JobDataDto jobHistory = jobHistoryService.getJobHistoryDto(jobInstanceByTaskId.getId()); - if (jobHistory != null) { - final JsonNode jsonNodes = jobHistory.getCheckpoints(); - final ArrayNode history = jsonNodes.withArray("history"); - if (!history.isEmpty()) { - startGoingLiveTask(taskOperatingResult, findTheConditionSavePoint(history)); - return; - } - } - startGoingLiveTask(taskOperatingResult, null); - } - - private void startGoingLiveTask(TaskOperatingResult taskOperatingResult, String savepointPath) { - taskOperatingResult.setStatus(TaskOperatingStatus.OPERATING); - final Result result = reOnLineTask(taskOperatingResult.getTask().getId(), savepointPath); - taskOperatingResult.parseResult(result); - } - - private String findTheConditionSavePoint(ArrayNode history) { - JsonNode latestCompletedJsonNode = null; - for (JsonNode item : history) { - if (!"COMPLETED".equals(item.get("status").asText())) { - continue; - } - - if (latestCompletedJsonNode == null) { - latestCompletedJsonNode = item; - continue; - } - - if (latestCompletedJsonNode.get("id").asInt() < item.get("id").asInt(-1)) { - latestCompletedJsonNode = item; - } - } - - return latestCompletedJsonNode == null - ? null - : latestCompletedJsonNode.get("external_path").asText(); - } - - @Override - public void selectSavepointOffLineTask(TaskOperatingResult taskOperatingResult) { - taskOperatingResult.setStatus(TaskOperatingStatus.OPERATING); - final Result result = offLineTask(taskOperatingResult.getTask().getId(), SavePointType.CANCEL.getValue()); - taskOperatingResult.parseResult(result); - } } diff --git a/dinky-admin/src/main/java/org/dinky/service/impl/TaskVersionServiceImpl.java b/dinky-admin/src/main/java/org/dinky/service/impl/TaskVersionServiceImpl.java index 95d04530418..f7bce348ae0 100644 --- a/dinky-admin/src/main/java/org/dinky/service/impl/TaskVersionServiceImpl.java +++ b/dinky-admin/src/main/java/org/dinky/service/impl/TaskVersionServiceImpl.java @@ -19,17 +19,26 @@ package org.dinky.service.impl; +import org.dinky.assertion.Asserts; +import org.dinky.data.dto.TaskDTO; +import org.dinky.data.dto.TaskVersionConfigureDTO; import org.dinky.data.model.TaskVersion; import org.dinky.mapper.TaskVersionMapper; import org.dinky.mybatis.service.impl.SuperServiceImpl; import org.dinky.service.TaskVersionService; +import java.time.LocalDateTime; +import java.util.Collections; import java.util.List; +import java.util.Map; +import java.util.stream.Collectors; import org.springframework.stereotype.Service; import com.baomidou.mybatisplus.core.conditions.query.LambdaQueryWrapper; +import cn.hutool.core.bean.BeanUtil; + @Service public class TaskVersionServiceImpl extends SuperServiceImpl implements TaskVersionService { @@ -41,4 +50,41 @@ public List getTaskVersionByTaskId(Integer taskId) { .eq(TaskVersion::getTaskId, taskId) .orderByDesc(true, TaskVersion::getVersionId)); } + + @Override + public void createTaskVersionSnapshot(TaskDTO task) { + List taskVersions = getTaskVersionByTaskId(task.getId()); + List versionIds = + taskVersions.stream().map(TaskVersion::getVersionId).collect(Collectors.toList()); + Map versionMap = + taskVersions.stream().collect(Collectors.toMap(TaskVersion::getVersionId, t -> t)); + + TaskVersion taskVersion = new TaskVersion(); + BeanUtil.copyProperties(task, taskVersion); + + TaskVersionConfigureDTO taskVersionConfigureDTO = new TaskVersionConfigureDTO(); + BeanUtil.copyProperties(task, taskVersionConfigureDTO); + + taskVersion.setTaskConfigure(taskVersionConfigureDTO); + taskVersion.setTaskId(taskVersion.getId()); + taskVersion.setId(null); + + if (Asserts.isNull(task.getVersionId())) { + // FIRST RELEASE, ADD NEW VERSION + taskVersion.setVersionId(1); + task.setVersionId(1); + taskVersion.setCreateTime(LocalDateTime.now()); + save(taskVersion); + } else { + // Explain that there is a version, you need to determine whether it is an old version after fallback + TaskVersion version = versionMap.get(task.getVersionId()); + version.setId(null); + if (versionIds.contains(task.getVersionId()) && !taskVersion.equals(version)) { + taskVersion.setVersionId(Collections.max(versionIds) + 1); + task.setVersionId(Collections.max(versionIds) + 1); + taskVersion.setCreateTime(LocalDateTime.now()); + save(taskVersion); + } + } + } } diff --git a/dinky-admin/src/main/java/org/dinky/service/impl/UserServiceImpl.java b/dinky-admin/src/main/java/org/dinky/service/impl/UserServiceImpl.java index 3eb9b7b2279..21569f66196 100644 --- a/dinky-admin/src/main/java/org/dinky/service/impl/UserServiceImpl.java +++ b/dinky-admin/src/main/java/org/dinky/service/impl/UserServiceImpl.java @@ -20,6 +20,7 @@ package org.dinky.service.impl; import org.dinky.assertion.Asserts; +import org.dinky.context.RowLevelPermissionsContext; import org.dinky.context.TenantContextHolder; import org.dinky.context.UserInfoContextHolder; import org.dinky.data.dto.LoginDTO; @@ -55,6 +56,7 @@ import java.util.ArrayList; import java.util.LinkedList; import java.util.List; +import java.util.concurrent.ConcurrentHashMap; import java.util.stream.Collectors; import org.springframework.stereotype.Service; @@ -366,6 +368,21 @@ public List getCurrentRoleSelectPermissions() { return roleSelectPermissionsService.listRoleSelectPermissionsByRoleIds(roleIds); } + @Override + public void buildRowPermission() { + List currentRoleSelectPermissions = getCurrentRoleSelectPermissions(); + if (Asserts.isNotNullCollection(currentRoleSelectPermissions)) { + ConcurrentHashMap permission = new ConcurrentHashMap<>(); + for (RowPermissions roleSelectPermissions : currentRoleSelectPermissions) { + if (Asserts.isAllNotNullString( + roleSelectPermissions.getTableName(), roleSelectPermissions.getExpression())) { + permission.put(roleSelectPermissions.getTableName(), roleSelectPermissions.getExpression()); + } + } + RowLevelPermissionsContext.set(permission); + } + } + @Override public void outLogin() { StpUtil.logout(StpUtil.getLoginIdAsInt()); diff --git a/dinky-admin/src/main/java/org/dinky/utils/TaskOneClickOperatingUtil.java b/dinky-admin/src/main/java/org/dinky/utils/TaskOneClickOperatingUtil.java deleted file mode 100644 index 20ca0749433..00000000000 --- a/dinky-admin/src/main/java/org/dinky/utils/TaskOneClickOperatingUtil.java +++ /dev/null @@ -1,194 +0,0 @@ -/* - * - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -package org.dinky.utils; - -import org.dinky.context.SpringContextUtils; -import org.dinky.data.enums.CodeEnum; -import org.dinky.data.enums.JobLifeCycle; -import org.dinky.data.enums.JobStatus; -import org.dinky.data.enums.TaskOperatingSavepointSelect; -import org.dinky.data.enums.TaskOperatingStatus; -import org.dinky.data.model.Task; -import org.dinky.data.result.Result; -import org.dinky.data.result.TaskOperatingResult; -import org.dinky.service.TaskService; - -import org.apache.commons.collections4.CollectionUtils; - -import java.util.ArrayList; -import java.util.Collections; -import java.util.List; -import java.util.concurrent.atomic.AtomicBoolean; -import java.util.function.Consumer; -import java.util.stream.Collectors; - -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import com.fasterxml.jackson.databind.JsonNode; - -import cn.hutool.core.exceptions.ExceptionUtil; -import cn.hutool.core.lang.Dict; - -/** @version 1.0 */ -public class TaskOneClickOperatingUtil { - - private static List oneClickOnlineCache = new ArrayList<>(0); - - private static List oneClickOfflineCache = new ArrayList<>(0); - - private static final AtomicBoolean oneClickOnlineThreadStatus = new AtomicBoolean(false); - - private static final AtomicBoolean oneClickOfflineThreadStatus = new AtomicBoolean(false); - - public static synchronized Result oneClickOnline( - List tasks, TaskOperatingSavepointSelect taskOperatingSavepointSelect) { - if (oneClickOnlineThreadStatus.get() || oneClickOfflineThreadStatus.get()) { - return Result.failed("存在一键上线或者下线操作,请稍后重试"); - } - final TaskService taskService = SpringContextUtils.getBeanByClass(TaskService.class); - if (CollectionUtils.isEmpty(tasks)) { - final Result> listResult = taskService.queryOnLineTaskByDoneStatus( - Collections.singletonList(JobLifeCycle.RELEASE), JobStatus.getAllDoneStatus(), true, 0); - if (CollectionUtils.isEmpty(listResult.getDatas())) { - return Result.succeed("没有需要上线的任务"); - } - tasks = listResult.getDatas(); - } - oneClickOnlineCache = tasks.stream() - .map(task -> new TaskOperatingResult(task, taskOperatingSavepointSelect)) - .collect(Collectors.toList()); - new OneClickOperatingThread( - "oneClickOnlineThread", - oneClickOnlineCache, - oneClickOnlineThreadStatus, - taskService::selectSavepointOnLineTask) - .start(); - return Result.succeed("success"); - } - - public static synchronized Result onClickOffline(List tasks) { - if (oneClickOnlineThreadStatus.get() || oneClickOfflineThreadStatus.get()) { - return Result.failed("存在一键上线或者下线操作,请稍后重试"); - } - final TaskService taskService = SpringContextUtils.getBeanByClass(TaskService.class); - if (CollectionUtils.isEmpty(tasks)) { - final Result> listResult = taskService.queryOnLineTaskByDoneStatus( - Collections.singletonList(JobLifeCycle.ONLINE), - Collections.singletonList(JobStatus.RUNNING), - false, - 0); - if (CollectionUtils.isEmpty(listResult.getDatas())) { - return Result.succeed("没有需要下线的任务"); - } - tasks = listResult.getDatas(); - } - oneClickOfflineCache = tasks.stream().map(TaskOperatingResult::new).collect(Collectors.toList()); - new OneClickOperatingThread( - "oneClickOfflineThread", - oneClickOfflineCache, - oneClickOfflineThreadStatus, - taskService::selectSavepointOffLineTask) - .start(); - return Result.succeed("success"); - } - - public static Result queryOneClickOperatingTaskStatus() { - Dict dict = Dict.create() - .set("online", oneClickOnlineCache) - .set("onlineStatus", oneClickOnlineThreadStatus.get()) - .set("offline", oneClickOfflineCache) - .set("offlineStatus", oneClickOfflineThreadStatus.get()); - - return Result.succeed(dict); - } - - public static List parseJsonNode(JsonNode operating) { - final JsonNode tasksJsonNode = operating.withArray("tasks"); - if (tasksJsonNode == null || tasksJsonNode.isEmpty()) { - return null; - } - final List result = new ArrayList<>(tasksJsonNode.size()); - for (JsonNode node : tasksJsonNode) { - final Task task = new Task(); - task.setId(node.get("id").asInt()); - task.setName(node.get("name").asText()); - result.add(task); - } - return result; - } - - private static class OneClickOperatingThread extends Thread { - - private static final Logger LOGGER = LoggerFactory.getLogger(OneClickOperatingThread.class); - - private final String threadName; - - private final List taskOperatingResults; - - private final AtomicBoolean threadStatus; - - private final Consumer consumer; - - public OneClickOperatingThread( - String threadName, - List taskOperatingResults, - AtomicBoolean threadStatus, - Consumer consumer) { - super(threadName); - this.threadName = threadName; - this.threadStatus = threadStatus; - this.threadStatus.set(true); - this.taskOperatingResults = taskOperatingResults; - this.consumer = consumer; - } - - @Override - public void run() { - try { - if (CollectionUtils.isEmpty(taskOperatingResults)) { - return; - } - for (TaskOperatingResult taskOperatingResult : taskOperatingResults) { - try { - taskOperatingResult.setStatus(TaskOperatingStatus.OPERATING_BEFORE); - consumer.accept(taskOperatingResult); - } catch (Throwable e) { - exceptionDealWith(taskOperatingResult, e); - } - } - } finally { - this.threadStatus.set(false); - } - } - - private void exceptionDealWith(TaskOperatingResult taskOperatingResult, Throwable e) { - taskOperatingResult.setStatus(TaskOperatingStatus.EXCEPTION); - taskOperatingResult.setCode(CodeEnum.EXCEPTION.getCode()); - taskOperatingResult.setMessage(ExceptionUtil.stacktraceToString(e)); - LOGGER.error( - "[{}], taskId={}, taskName={}, operating exception", - threadName, - taskOperatingResult.getTask().getId(), - taskOperatingResult.getTask().getName(), - e); - } - } -} diff --git a/dinky-common/src/main/java/org/dinky/config/Dialect.java b/dinky-common/src/main/java/org/dinky/config/Dialect.java index cec34f8fb8c..1ce5c28afa7 100644 --- a/dinky-common/src/main/java/org/dinky/config/Dialect.java +++ b/dinky-common/src/main/java/org/dinky/config/Dialect.java @@ -77,7 +77,7 @@ public static Dialect get(String value) { * @param value {@link Dialect} * @return If is flink sql, return false, otherwise return true. */ - public static boolean notFlinkSql(String value) { + public static boolean isCommonSql(String value) { Dialect dialect = Dialect.get(value); switch (dialect) { case SQL: diff --git a/dinky-common/src/main/java/org/dinky/data/enums/JobLifeCycle.java b/dinky-common/src/main/java/org/dinky/data/enums/JobLifeCycle.java index 54cfe647811..73cb91d5203 100644 --- a/dinky-common/src/main/java/org/dinky/data/enums/JobLifeCycle.java +++ b/dinky-common/src/main/java/org/dinky/data/enums/JobLifeCycle.java @@ -28,12 +28,8 @@ */ public enum JobLifeCycle { UNKNOWN(0, "未知"), - CREATE(1, "创建"), - DEVELOP(2, "开发"), - DEBUG(3, "调试"), - RELEASE(4, "发布"), - ONLINE(5, "上线"), - CANCEL(6, "注销"); + DEVELOP(1, "开发"), + ONLINE(2, "上线"); private Integer value; private String label; diff --git a/dinky-common/src/main/java/org/dinky/data/enums/Status.java b/dinky-common/src/main/java/org/dinky/data/enums/Status.java index 010248803d3..a3f77d450a9 100644 --- a/dinky-common/src/main/java/org/dinky/data/enums/Status.java +++ b/dinky-common/src/main/java/org/dinky/data/enums/Status.java @@ -171,6 +171,10 @@ public enum Status { TASK_NOT_EXIST(12003, "task.not.exist"), JOB_INSTANCE_NOT_EXIST(12004, "job.instance.not.exist"), SAVEPOINT_IS_NULL(12005, "savepoint.is.null"), + TASK_STATUS_IS_NOT_DONE(12006, "task.status.is.not.done"), + TASK_SQL_EXPLAN_FAILED(12007, "task.sql.explain.failed"), + TASK_UPDATE_FAILED(12008, "task.update.failed"), + TASK_IS_ONLINE(12009, "task.is.online"), /** * alert instance @@ -195,6 +199,7 @@ public enum Status { CLUSTER_INSTANCE_RECYCLE_SUCCESS(15002, "cluster.instance.recycle.success"), CLUSTER_INSTANCE_KILL(15003, "cluster.instance.kill"), CLUSTER_INSTANCE_DEPLOY(15004, "cluster.instance.deploy"), + CLUSTER_NOT_EXIST(15004, "cluster.not.exist"), /** * git diff --git a/dinky-common/src/main/resources/i18n/messages_en_US.properties b/dinky-common/src/main/resources/i18n/messages_en_US.properties index 8a80231200f..3fe1c46ed6e 100644 --- a/dinky-common/src/main/resources/i18n/messages_en_US.properties +++ b/dinky-common/src/main/resources/i18n/messages_en_US.properties @@ -92,6 +92,7 @@ tenant.name.exist=Tenant Already Exists failed=Failed added.failed=Added Failed task.not.exist=Task Not Exist +task.is.online= Task is online, modification is prohibited cluster.instance.deploy=Deploy Success clear.failed=Clear Failed rename.success=Rename Successfully @@ -119,6 +120,7 @@ git.sort.failed=Git Sort Failed query.failed=Query Failed save.success=Save Successfully cluster.instance.kill=Kill Success +cluster.not.exist=Cluster Not Exist operate.failed=Operate Failed test.connection.failed=Test Connection Failed switching.tenant.success=Select Tenant Success @@ -231,4 +233,8 @@ sys.resource.settings.hdfs.fs.defaultFS.note=fs.defaultFS configuration items, s #Dinky Gateway -gateway.kubernetes.test.failed= failed to test the Flink configuration: \ No newline at end of file +gateway.kubernetes.test.failed= failed to test the Flink configuration: + +task.status.is.not.done=In the current publishing state, a job is running and the online fails, please stop and go online +task.sql.explain.failed=SQL parsing failed, please check the SQL statement +task.update.failed=Task Update failed diff --git a/dinky-common/src/main/resources/i18n/messages_zh_CN.properties b/dinky-common/src/main/resources/i18n/messages_zh_CN.properties index 93f355b5b72..227ce0b0437 100644 --- a/dinky-common/src/main/resources/i18n/messages_zh_CN.properties +++ b/dinky-common/src/main/resources/i18n/messages_zh_CN.properties @@ -92,6 +92,7 @@ tenant.name.exist=租户已存在 failed=获取失败 added.failed=新增失败 task.not.exist=任务不存在 +task.is.online=任务已上线,禁止修改 cluster.instance.deploy=部署完成 clear.failed=清除失败 rename.success=重命名成功 @@ -119,6 +120,7 @@ git.sort.failed=排序失败 query.failed=查询失败 save.success=保存成功 cluster.instance.kill=已杀死该进程/集群 +cluster.not.exist=集群不存在 operate.failed=操作失败 test.connection.failed=测试连接失败 switching.tenant.success=选择租户成功 @@ -229,3 +231,8 @@ sys.resource.settings.hdfs.fs.defaultFS.note=fs.defaultFS 配置项,例如远 #Dinky Gateway gateway.kubernetes.test.failed=测试 Flink 配置失败: + +# Task +task.status.is.not.done=当前发布状态下有作业正在运行,上线失败,请停止后上线 +task.sql.explain.failed=sql解析失败,请检查 +task.update.failed=Task更新失败 \ No newline at end of file diff --git a/dinky-core/src/main/java/org/dinky/data/result/ResultPool.java b/dinky-core/src/main/java/org/dinky/data/result/ResultPool.java index b3eb895353c..fd6802b71f9 100644 --- a/dinky-core/src/main/java/org/dinky/data/result/ResultPool.java +++ b/dinky-core/src/main/java/org/dinky/data/result/ResultPool.java @@ -19,6 +19,8 @@ package org.dinky.data.result; +import org.dinky.metadata.result.JdbcSelectResult; + import java.util.concurrent.TimeUnit; import cn.hutool.cache.Cache; @@ -33,6 +35,8 @@ public final class ResultPool { private ResultPool() {} + private static final Cache COMMON_SQL_SEARCH_CACHE = + new TimedCache<>(TimeUnit.MINUTES.toMillis(10)); private static final Cache results = new TimedCache<>(TimeUnit.MINUTES.toMillis(10)); public static boolean containsKey(String key) { @@ -43,6 +47,14 @@ public static void put(SelectResult result) { results.put(result.getJobId(), result); } + public static void putCommonSqlCache(Integer taskId, JdbcSelectResult result) { + COMMON_SQL_SEARCH_CACHE.put(taskId, result); + } + + public static JdbcSelectResult getCommonSqlCache(Integer taskId) { + return COMMON_SQL_SEARCH_CACHE.get(taskId); + } + public static SelectResult get(String key) { if (containsKey(key)) { return results.get(key); diff --git a/dinky-core/src/main/java/org/dinky/explainer/Explainer.java b/dinky-core/src/main/java/org/dinky/explainer/Explainer.java index a96438a2420..1448ab428e9 100644 --- a/dinky-core/src/main/java/org/dinky/explainer/Explainer.java +++ b/dinky-core/src/main/java/org/dinky/explainer/Explainer.java @@ -32,6 +32,7 @@ import org.dinky.explainer.printTable.PrintStatementExplainer; import org.dinky.function.data.model.UDF; import org.dinky.function.util.UDFUtil; +import org.dinky.gateway.enums.GatewayType; import org.dinky.interceptor.FlinkInterceptor; import org.dinky.job.JobConfig; import org.dinky.job.JobManager; @@ -362,14 +363,14 @@ public JobPlanInfo getJobPlanInfo(String statement) { } public List getLineage(String statement) { - JobConfig jobConfig = new JobConfig( - "local", - false, - false, - true, - useStatementSet, - 1, - executor.getTableConfig().getConfiguration().toMap()); + JobConfig jobConfig = JobConfig.builder() + .type(GatewayType.LOCAL.getLongValue()) + .useRemote(false) + .fragment(true) + .statementSet(useStatementSet) + .parallelism(1) + .configJson(executor.getTableConfig().getConfiguration().toMap()) + .build(); this.initialize(JobManager.buildPlanMode(jobConfig), jobConfig, statement); List lineageRelList = new ArrayList<>(); diff --git a/dinky-core/src/main/java/org/dinky/job/JobConfig.java b/dinky-core/src/main/java/org/dinky/job/JobConfig.java index 7ac2271730c..0bbea9771a7 100644 --- a/dinky-core/src/main/java/org/dinky/job/JobConfig.java +++ b/dinky-core/src/main/java/org/dinky/job/JobConfig.java @@ -22,6 +22,7 @@ import org.dinky.assertion.Asserts; import org.dinky.data.constant.NetConstant; import org.dinky.executor.ExecutorConfig; +import org.dinky.gateway.config.FlinkConfig; import org.dinky.gateway.config.GatewayConfig; import org.dinky.gateway.enums.GatewayType; import org.dinky.gateway.enums.SavePointStrategy; @@ -35,25 +36,59 @@ import io.swagger.annotations.ApiModel; import io.swagger.annotations.ApiModelProperty; -import lombok.Getter; -import lombok.Setter; +import lombok.AllArgsConstructor; +import lombok.Builder; +import lombok.Data; /** * JobConfig * * @since 2021/6/27 18:45 */ -@Getter -@Setter +@Data +@Builder +@AllArgsConstructor @ApiModel(value = "JobConfig", description = "Configuration details of a job") public class JobConfig { @ApiModelProperty(value = "Flink run mode", dataType = "String", example = "batch", notes = "Flink run mode") private String type; + @ApiModelProperty(value = "Check Point", dataType = "Integer", example = "1", notes = "Check point for the task") + private Integer checkpoint; + + @ApiModelProperty(value = "Save point strategy", dataType = "SavePointStrategy", notes = "Save point strategy") + private SavePointStrategy savePointStrategy; + + @ApiModelProperty(value = "Save Point Path", dataType = "String", notes = "Save point path for the task") + private String savePointPath; + + @ApiModelProperty(value = "Parallelism level", dataType = "Integer", example = "4", notes = "Parallelism level") + private Integer parallelism; + + @ApiModelProperty(value = "Cluster ID", dataType = "Integer", example = "456", notes = "Cluster ID") + private Integer clusterId; + + @ApiModelProperty( + value = "Cluster configuration ID", + dataType = "Integer", + example = "789", + notes = "Cluster configuration ID") + private Integer clusterConfigurationId; + + @ApiModelProperty(value = "JAR file ID", dataType = "Integer", example = "101", notes = "JAR file ID") + private Integer jarId; + @ApiModelProperty(value = "Task JobLifeCycle", dataType = "Integer", example = "2", notes = "Task JobLifeCycle") private Integer step; + @ApiModelProperty( + value = "JSON configuration", + dataType = "Map", + example = "{\"config1\": \"value1\", \"config2\": \"value2\"}", + notes = "JSON configuration") + private Map configJson; + @ApiModelProperty( value = "Flag indicating whether to use the result", dataType = "boolean", @@ -75,13 +110,6 @@ public class JobConfig { notes = "Flag indicating whether to use auto-cancel") private boolean useAutoCancel; - @ApiModelProperty( - value = "Flag indicating whether to use session", - dataType = "boolean", - example = "true", - notes = "Flag indicating whether to use session") - private boolean useSession; - @ApiModelProperty( value = "Session information", dataType = "String", @@ -96,19 +124,6 @@ public class JobConfig { notes = "Flag indicating whether to use remote execution") private boolean useRemote; - @ApiModelProperty(value = "Cluster ID", dataType = "Integer", example = "456", notes = "Cluster ID") - private Integer clusterId; - - @ApiModelProperty( - value = "Cluster configuration ID", - dataType = "Integer", - example = "789", - notes = "Cluster configuration ID") - private Integer clusterConfigurationId; - - @ApiModelProperty(value = "JAR file ID", dataType = "Integer", example = "101", notes = "JAR file ID") - private Integer jarId; - @ApiModelProperty( value = "Flag indicating whether it's a JAR task", dataType = "boolean", @@ -148,21 +163,21 @@ public class JobConfig { dataType = "boolean", example = "true", notes = "Flag indicating whether to use SQL fragment") - private boolean useSqlFragment; + private boolean fragment; @ApiModelProperty( value = "Flag indicating whether to use statement set", dataType = "boolean", example = "false", notes = "Flag indicating whether to use statement set") - private boolean useStatementSet; + private boolean statementSet; @ApiModelProperty( value = "Flag indicating whether to use batch model", dataType = "boolean", example = "true", notes = "Flag indicating whether to use batch model") - private boolean useBatchModel; + private boolean batchModel; @ApiModelProperty( value = "Maximum number of rows", @@ -171,26 +186,6 @@ public class JobConfig { notes = "Maximum number of rows") private Integer maxRowNum; - @ApiModelProperty( - value = "Checkpoint interval", - dataType = "Integer", - example = "5000", - notes = "Checkpoint interval") - private Integer checkpoint; - - @ApiModelProperty(value = "Parallelism level", dataType = "Integer", example = "4", notes = "Parallelism level") - private Integer parallelism; - - @ApiModelProperty(value = "Save point strategy", dataType = "SavePointStrategy", notes = "Save point strategy") - private SavePointStrategy savePointStrategy; - - @ApiModelProperty( - value = "Path for save points", - dataType = "String", - example = "/savepoints", - notes = "Path for save points") - private String savePointPath; - @ApiModelProperty(value = "Gateway configuration", dataType = "GatewayConfig", notes = "Gateway configuration") private GatewayConfig gatewayConfig; @@ -201,15 +196,8 @@ public class JobConfig { notes = "Map of variables") private Map variables; - @ApiModelProperty( - value = "JSON configuration", - dataType = "Map", - example = "{\"config1\": \"value1\", \"config2\": \"value2\"}", - notes = "JSON configuration") - private Map configJson; - public JobConfig() { - this.configJson = new HashMap(); + this.configJson = new HashMap<>(); } public void setAddress(String address) { @@ -227,174 +215,14 @@ public void setAddress(String address) { } } - public JobConfig( - String type, - boolean useSession, - boolean useRemote, - boolean useSqlFragment, - boolean useStatementSet, - Integer parallelism, - Map configJson) { - this.type = type; - this.useSession = useSession; - this.useRemote = useRemote; - this.useSqlFragment = useSqlFragment; - this.useStatementSet = useStatementSet; - this.parallelism = parallelism; - this.configJson = configJson; - } - - public JobConfig( - String type, - boolean useResult, - boolean useChangeLog, - boolean useAutoCancel, - boolean useSession, - String session, - Integer clusterId, - Integer clusterConfigurationId, - Integer jarId, - Integer taskId, - String jobName, - boolean useSqlFragment, - boolean useStatementSet, - boolean useBatchModel, - Integer maxRowNum, - Integer checkpoint, - Integer parallelism, - Integer savePointStrategyValue, - String savePointPath, - Map variables, - Map configJson) { - this.type = type; - this.useResult = useResult; - this.useChangeLog = useChangeLog; - this.useAutoCancel = useAutoCancel; - this.useSession = useSession; - this.session = session; - this.useRemote = true; - this.clusterId = clusterId; - this.clusterConfigurationId = clusterConfigurationId; - this.jarId = jarId; - this.taskId = taskId; - this.jobName = jobName; - this.useSqlFragment = useSqlFragment; - this.useStatementSet = useStatementSet; - this.useBatchModel = useBatchModel; - this.maxRowNum = maxRowNum; - this.checkpoint = checkpoint; - this.parallelism = parallelism; - this.savePointStrategy = SavePointStrategy.get(savePointStrategyValue); - this.savePointPath = savePointPath; - this.variables = variables; - this.configJson = configJson; - } - - public JobConfig( - String type, - boolean useResult, - boolean useChangeLog, - boolean useAutoCancel, - boolean useSession, - String session, - boolean useRemote, - String address, - String jobName, - boolean useSqlFragment, - boolean useStatementSet, - Integer maxRowNum, - Integer checkpoint, - Integer parallelism, - Integer savePointStrategyValue, - String savePointPath, - Map configJson, - GatewayConfig gatewayConfig) { - this.type = type; - this.useResult = useResult; - this.useChangeLog = useChangeLog; - this.useAutoCancel = useAutoCancel; - this.useSession = useSession; - this.session = session; - this.useRemote = useRemote; - this.jobName = jobName; - this.useSqlFragment = useSqlFragment; - this.useStatementSet = useStatementSet; - this.maxRowNum = maxRowNum; - this.checkpoint = checkpoint; - this.parallelism = parallelism; - this.savePointStrategy = SavePointStrategy.get(savePointStrategyValue); - this.savePointPath = savePointPath; - this.configJson = configJson; - this.gatewayConfig = gatewayConfig; - setAddress(address); - } - - public JobConfig( - String type, - boolean useResult, - boolean useSession, - String session, - boolean useRemote, - Integer clusterId, - Integer maxRowNum) { - this.type = type; - this.useResult = useResult; - this.useSession = useSession; - this.session = session; - this.useRemote = useRemote; - this.clusterId = clusterId; - this.maxRowNum = maxRowNum; - } - - public JobConfig( - String type, - Integer step, - boolean useResult, - boolean useSession, - boolean useRemote, - Integer clusterId, - Integer clusterConfigurationId, - Integer jarId, - Integer taskId, - String jobName, - boolean useSqlFragment, - boolean useStatementSet, - boolean useBatchModel, - Integer checkpoint, - Integer parallelism, - Integer savePointStrategyValue, - String savePointPath, - Map configJson, - boolean isJarTask) { - this.type = type; - this.step = step; - this.useResult = useResult; - this.useSession = useSession; - this.useRemote = useRemote; - this.clusterId = clusterId; - this.clusterConfigurationId = clusterConfigurationId; - this.jarId = jarId; - this.taskId = taskId; - this.jobName = jobName; - this.useSqlFragment = useSqlFragment; - this.useStatementSet = useStatementSet; - this.useBatchModel = useBatchModel; - this.checkpoint = checkpoint; - this.parallelism = parallelism; - this.savePointStrategy = SavePointStrategy.get(savePointStrategyValue); - this.savePointPath = savePointPath; - this.configJson = configJson; - this.isJarTask = isJarTask; - } - public ExecutorConfig getExecutorSetting() { return ExecutorConfig.build( address, checkpoint, parallelism, - useSqlFragment, - useStatementSet, - useBatchModel, + fragment, + statementSet, + batchModel, savePointPath, jobName, configJson, @@ -402,14 +230,13 @@ public ExecutorConfig getExecutorSetting() { } public void buildGatewayConfig(FlinkClusterConfig config) { + FlinkConfig flinkConfig = config.getFlinkConfig(); + flinkConfig.getConfiguration().putAll(getConfigJson()); + flinkConfig.getConfiguration().put(CoreOptions.DEFAULT_PARALLELISM.key(), String.valueOf(parallelism)); + flinkConfig.setJobName(getJobName()); + gatewayConfig = GatewayConfig.build(config); gatewayConfig.setTaskId(getTaskId()); - gatewayConfig.getFlinkConfig().setJobName(getJobName()); - gatewayConfig - .getFlinkConfig() - .getConfiguration() - .put(CoreOptions.DEFAULT_PARALLELISM.key(), String.valueOf(parallelism)); - setUseRemote(false); // todo: remove } public void addGatewayConfig(Map config) { diff --git a/dinky-core/src/main/java/org/dinky/job/JobManager.java b/dinky-core/src/main/java/org/dinky/job/JobManager.java index e3a36e1ad3a..f61e856d920 100644 --- a/dinky-core/src/main/java/org/dinky/job/JobManager.java +++ b/dinky-core/src/main/java/org/dinky/job/JobManager.java @@ -176,7 +176,7 @@ public boolean init() { useGateway = GatewayType.isDeployCluster(config.getType()); handler = JobHandler.build(); } - useStatementSet = config.isUseStatementSet(); + useStatementSet = config.isStatementSet(); useRestAPI = SystemConfiguration.getInstances().isUseRestAPI(); sqlSeparator = SystemConfiguration.getInstances().getSqlSeparator(); executorConfig = config.getExecutorSetting(); diff --git a/dinky-core/src/test/java/org/dinky/core/JobManagerTest.java b/dinky-core/src/test/java/org/dinky/core/JobManagerTest.java index 57a2ff28c74..e16a0465ccb 100644 --- a/dinky-core/src/test/java/org/dinky/core/JobManagerTest.java +++ b/dinky-core/src/test/java/org/dinky/core/JobManagerTest.java @@ -21,12 +21,11 @@ import org.dinky.data.result.ResultPool; import org.dinky.data.result.SelectResult; +import org.dinky.gateway.enums.GatewayType; import org.dinky.job.JobConfig; import org.dinky.job.JobManager; import org.dinky.job.JobResult; -import java.util.HashMap; - import org.junit.Ignore; import org.junit.Test; import org.slf4j.Logger; @@ -45,29 +44,20 @@ public class JobManagerTest { @Ignore @Test public void cancelJobSelect() { - - JobConfig config = new JobConfig( - "session-yarn", - true, - true, - true, - true, - "s1", - 2, - null, - null, - null, - "测试", - false, - false, - false, - 100, - 0, - 1, - 0, - null, - new HashMap<>(), - new HashMap<>()); + JobConfig config = JobConfig.builder() + .type(GatewayType.YARN_SESSION.getLongValue()) + .useResult(true) + .useChangeLog(true) + .useAutoCancel(true) + .session("s1") + .clusterId(2) + .jobName("Test") + .fragment(false) + .statementSet(false) + .batchModel(false) + .maxRowNum(100) + .parallelism(1) + .build(); if (config.isUseRemote()) { config.setAddress("192.168.123.157:8081"); } diff --git a/dinky-process/src/main/java/org/dinky/process/exception/ExcuteException.java b/dinky-process/src/main/java/org/dinky/process/exception/ExcuteException.java new file mode 100644 index 00000000000..ba29fecbfed --- /dev/null +++ b/dinky-process/src/main/java/org/dinky/process/exception/ExcuteException.java @@ -0,0 +1,48 @@ +/* + * + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +package org.dinky.process.exception; + +import org.dinky.process.context.ProcessContextHolder; +import org.dinky.utils.LogUtil; + +public class ExcuteException extends Exception { + + public ExcuteException() {} + + public ExcuteException(String message) { + super(message); + ProcessContextHolder.getProcess().error(message); + } + + public ExcuteException(String message, Throwable cause) { + super(message, cause); + ProcessContextHolder.getProcess().error(LogUtil.getError(cause)); + } + + public ExcuteException(Throwable cause) { + super(cause); + ProcessContextHolder.getProcess().error(cause.toString()); + } + + public ExcuteException(String message, Throwable cause, boolean enableSuppression, boolean writableStackTrace) { + super(message, cause, enableSuppression, writableStackTrace); + ProcessContextHolder.getProcess().error(LogUtil.getError(cause)); + } +} diff --git a/dinky-process/src/main/java/org/dinky/process/model/ProcessEntity.java b/dinky-process/src/main/java/org/dinky/process/model/ProcessEntity.java index fc76eaebbbd..318d0a2c9d5 100644 --- a/dinky-process/src/main/java/org/dinky/process/model/ProcessEntity.java +++ b/dinky-process/src/main/java/org/dinky/process/model/ProcessEntity.java @@ -30,6 +30,7 @@ import java.util.UUID; import cn.hutool.core.text.CharSequenceUtil; +import cn.hutool.core.text.StrFormatter; /** * Process @@ -163,6 +164,10 @@ public void info(String str) { ConsolePool.write(message, userId); } + public void info(String strPattern, Object... argArray) { + info(StrFormatter.format(strPattern, argArray)); + } + public void infoSuccess() { if (isNullProcess()) { return; @@ -189,6 +194,10 @@ public void error(String str) { ConsolePool.write(message, userId); } + public void error(String strPattern, Object... argArray) { + error(StrFormatter.format(strPattern, argArray)); + } + public void nextStep() { if (isNullProcess()) { return; diff --git a/dinky-web/src/components/CallBackButton/LoadingBtn.tsx b/dinky-web/src/components/CallBackButton/LoadingBtn.tsx new file mode 100644 index 00000000000..71207096d5a --- /dev/null +++ b/dinky-web/src/components/CallBackButton/LoadingBtn.tsx @@ -0,0 +1,26 @@ +import React, {useState} from "react"; +import {Button} from "antd"; +import {ButtonProps} from "antd/es/button/button"; + + + +export const LoadingBtn: React.FC = (props) => { + + const [loading, setLoading] = useState(false); + + const handleClick = async (event: React.MouseEvent) => { + if (props.onClick) { + setLoading(true) + await props.onClick(event) + setLoading(false) + } + } + + return ( + + ); +}; diff --git a/dinky-web/src/locales/en-US/global.ts b/dinky-web/src/locales/en-US/global.ts index 97b9792bdf7..32321aa3a87 100644 --- a/dinky-web/src/locales/en-US/global.ts +++ b/dinky-web/src/locales/en-US/global.ts @@ -36,6 +36,8 @@ export default { 'button.edit': 'Edit', 'button.start': 'Start', 'button.stop': 'Stop', + 'button.publish': 'Publish', + 'button.offline': 'OffLine', 'button.assignUser': 'Assign Users', 'button.assignRole': 'Assign Role', 'button.changePassword': 'Change Password', diff --git a/dinky-web/src/locales/en-US/pages.ts b/dinky-web/src/locales/en-US/pages.ts index 6ffa381b6ea..f880ce18042 100644 --- a/dinky-web/src/locales/en-US/pages.ts +++ b/dinky-web/src/locales/en-US/pages.ts @@ -309,7 +309,7 @@ export default { 'pages.datastudio.editor.stop.job': 'Stop job', 'pages.datastudio.editor.stop.jobConfirm': 'Are you sure to stop the job [{jobName}]? ', 'pages.datastudio.editor.submitting': 'The new task [{jobName}] is executing', - 'pages.datastudio.to.jobDetail': 'Go to Job Detail', + 'pages.datastudio.to.jobDetail': 'Operation Center', 'pages.datastudio.explain.validate': 'Validating...', 'pages.datastudio.explain.validate.allright': 'All Right', 'pages.datastudio.explain.validate.error': 'There are errors, a total of {errorCount}', diff --git a/dinky-web/src/locales/zh-CN/global.ts b/dinky-web/src/locales/zh-CN/global.ts index d505563bc0d..8d04e820e0d 100644 --- a/dinky-web/src/locales/zh-CN/global.ts +++ b/dinky-web/src/locales/zh-CN/global.ts @@ -19,7 +19,7 @@ export default { 'button.check': '检查', - 'button.graph': '图', + 'button.graph': 'DAG', 'button.create': '新建', 'button.recovery': '恢复', 'button.close': '关闭', @@ -38,6 +38,8 @@ export default { 'button.edit': '编辑', 'button.start': '启动', 'button.stop': '停止', + 'button.publish': '发布', + 'button.offline': '下线', 'button.assignUser': '分配用户', 'button.assignRole': '分配角色', 'button.changePassword': '修改密码', diff --git a/dinky-web/src/locales/zh-CN/pages.ts b/dinky-web/src/locales/zh-CN/pages.ts index 506529ad601..4b454cc305c 100644 --- a/dinky-web/src/locales/zh-CN/pages.ts +++ b/dinky-web/src/locales/zh-CN/pages.ts @@ -290,8 +290,8 @@ export default { * * */ - 'pages.datastudio.editor.check': '检查当前的 FlinkSql', - 'pages.datastudio.editor.exec': '执行', + 'pages.datastudio.editor.check': '检查', + 'pages.datastudio.editor.exec': '运行', 'pages.datastudio.editor.exec.error': '任务【{jobName}】执行失败', 'pages.datastudio.editor.exec.success': '执行成功', 'pages.datastudio.editor.execute.warn': @@ -302,7 +302,7 @@ export default { 'pages.datastudio.editor.stop.job': '停止作业', 'pages.datastudio.editor.stop.jobConfirm': '确定停止作业【{jobName}】吗?', 'pages.datastudio.editor.submitting': '新任务【{jobName}】正在执行', - 'pages.datastudio.to.jobDetail': '跳转到作业详情', + 'pages.datastudio.to.jobDetail': '运维', 'pages.datastudio.explain.validate': '正在校验中...', 'pages.datastudio.explain.validate.allright': '全部正确', 'pages.datastudio.explain.validate.error': '存在错误,共计{errorCount}个', diff --git a/dinky-web/src/pages/DataStudio/HeaderContainer/Explain/index.tsx b/dinky-web/src/pages/DataStudio/HeaderContainer/Explain/index.tsx index 50c57942558..8f9a5ec0e33 100644 --- a/dinky-web/src/pages/DataStudio/HeaderContainer/Explain/index.tsx +++ b/dinky-web/src/pages/DataStudio/HeaderContainer/Explain/index.tsx @@ -67,7 +67,7 @@ const Explain: React.FC = (props: any) => { ...current, // useSession: useSession, // session: currentSession.session, - configJson: JSON.stringify(current?.config), + configJson: current?.config, taskId: current?.id }; setResult({l('pages.datastudio.explain.validate')}); diff --git a/dinky-web/src/pages/DataStudio/HeaderContainer/function.tsx b/dinky-web/src/pages/DataStudio/HeaderContainer/function.tsx index d31eace2096..4df927768aa 100644 --- a/dinky-web/src/pages/DataStudio/HeaderContainer/function.tsx +++ b/dinky-web/src/pages/DataStudio/HeaderContainer/function.tsx @@ -17,7 +17,8 @@ * */ -import { TabsPageType } from '@/pages/DataStudio/model'; +import { TabsPageType, TaskDataType } from '@/pages/DataStudio/model'; +import { JOB_LIFE_CYCLE, JOB_STATUS } from '@/pages/DevOps/constants'; import { HomeOutlined } from '@ant-design/icons'; /** @@ -35,6 +36,20 @@ export const buildBreadcrumbItems = (breadcrumb: string) => { return items; }; -export const projectCommonShow = (type?: TabsPageType, subType?: string, data?: any) => { +export const projectCommonShow = (type?: TabsPageType) => { return type === TabsPageType.project; }; + +export const isOnline = (data: TaskDataType | undefined) => { + if (data) { + return JOB_LIFE_CYCLE.ONLINE == data.step; + } + return false; +}; + +export const isRunning = (data: TaskDataType | undefined) => { + if (data) { + return JOB_STATUS.RUNNING == data.status; + } + return false; +}; diff --git a/dinky-web/src/pages/DataStudio/HeaderContainer/index.tsx b/dinky-web/src/pages/DataStudio/HeaderContainer/index.tsx index 6b3f2c2a685..978f38d3644 100644 --- a/dinky-web/src/pages/DataStudio/HeaderContainer/index.tsx +++ b/dinky-web/src/pages/DataStudio/HeaderContainer/index.tsx @@ -16,48 +16,54 @@ * limitations under the License. * */ +import { LoadingBtn } from '@/components/CallBackButton/LoadingBtn'; import { FlexCenterDiv } from '@/components/StyledComponents'; import { getCurrentData, getCurrentTab, mapDispatchToProps } from '@/pages/DataStudio/function'; import Explain from '@/pages/DataStudio/HeaderContainer/Explain'; import FlinkGraph from '@/pages/DataStudio/HeaderContainer/FlinkGraph'; import { buildBreadcrumbItems, + isOnline, + isRunning, projectCommonShow } from '@/pages/DataStudio/HeaderContainer/function'; import { + cancelTask, executeSql, getJobPlan, - isOnline, - isSql, - offLineTask + offLinelTask, + onLineTask } from '@/pages/DataStudio/HeaderContainer/service'; import { - DataStudioParams, DataStudioTabsItemType, + MetadataTabsItemType, StateType, TabsPageType, TaskDataType, VIEW } from '@/pages/DataStudio/model'; +import { JOB_LIFE_CYCLE, JOB_STATUS } from '@/pages/DevOps/constants'; import { ConfigStateType } from '@/pages/SettingCenter/GlobalSetting/model'; import { SettingConfigKeyEnum } from '@/pages/SettingCenter/GlobalSetting/SettingOverView/constants'; import { handlePutDataJson } from '@/services/BusinessCrud'; import { BaseConfigProperties } from '@/types/SettingCenter/data'; import { l } from '@/utils/intl'; -import { connect, history } from '@@/exports'; +import { connect } from '@@/exports'; import { + ApartmentOutlined, + CaretRightFilled, EnvironmentOutlined, - FlagTwoTone, + FundOutlined, + MergeCellsOutlined, MoreOutlined, - PauseCircleTwoTone, - PlayCircleTwoTone, + PauseOutlined, PlayCircleTwoTone, RotateRightOutlined, - SafetyCertificateTwoTone, - SaveTwoTone, - SendOutlined, - SmileOutlined + SaveOutlined, + ScheduleOutlined, + SendOutlined } from '@ant-design/icons'; -import { Breadcrumb, Button, Descriptions, message, Modal, notification, Space } from 'antd'; +import { Breadcrumb, Descriptions, message, Modal, Space } from 'antd'; +import { ButtonProps } from 'antd/es/button/button'; import React, { useEffect, useState } from 'react'; const headerStyle: React.CSSProperties = { @@ -71,12 +77,13 @@ const headerStyle: React.CSSProperties = { }; type ButtonRoute = { - icon: React.ReactNode; - title: string; - click: () => void; + icon?: React.ReactNode; + title?: string; + click?: () => void; hotKey?: (e: KeyboardEvent) => boolean; hotKeyDesc?: string; - isShow?: (type?: TabsPageType, subType?: string, data?: any) => boolean; + isShow: boolean; + props?: ButtonProps; }; const HeaderContainer = (props: any) => { @@ -91,18 +98,26 @@ const HeaderContainer = (props: any) => { } = props; const [modal, contextHolder] = Modal.useModal(); - const [notificationApi, notificationContextHolder] = notification.useNotification(); const [messageApi, messageContextHolder] = message.useMessage(); const [enableDs, setEnableDs] = useState(false); + const [currentData, setCurrentData] = useState(undefined); + const [currentTab, setCurrentTab] = useState< + DataStudioTabsItemType | MetadataTabsItemType | undefined + >(undefined); useEffect(() => { queryDsConfig(SettingConfigKeyEnum.DOLPHIN_SCHEDULER.toLowerCase()); }, []); + useEffect(() => { + setCurrentTab(getCurrentTab(panes, activeKey)); + setCurrentData(getCurrentData(panes, activeKey)); + }, [panes, activeKey]); + useEffect(() => { // 检查是否开启 ds 配置 & 如果 if (!dsConfig) { - dsConfig.map((item: BaseConfigProperties) => { + dsConfig.foreach((item: BaseConfigProperties) => { if (item.key === 'dolphinscheduler.settings.enable') { setEnableDs(item.value === 'true'); } @@ -110,201 +125,171 @@ const HeaderContainer = (props: any) => { } }, [dsConfig]); + const handleSave = async () => { + await handlePutDataJson('/api/task', currentData); + saveTabs({ ...props.tabs }); + }; + const handlerStop = () => { - const current = getCurrentData(panes, activeKey); - if (!current) { - return; - } + if (!currentData) return; modal.confirm({ title: l('pages.datastudio.editor.stop.job'), content: l('pages.datastudio.editor.stop.jobConfirm', '', { - jobName: current.name + jobName: currentData.name }), okText: l('button.confirm'), cancelText: l('button.cancel'), onOk: async () => { - offLineTask(l('pages.datastudio.editor.stop.job'), current.id, 'canceljob').then( - (result) => { - ( - getCurrentTab(panes, activeKey)?.params as DataStudioParams - ).taskData.jobInstanceId = 0; - saveTabs({ ...props.tabs }); - } - ); + cancelTask(l('pages.datastudio.editor.stop.job'), currentData.id).then(() => { + currentData.status = JOB_STATUS.CANCELED; + saveTabs({ ...props.tabs }); + }); } }); }; - const handlerExec = () => { - const current = getCurrentData(panes, activeKey); - if (!current) { - return; - } - if (!isSql(current.dialect) && !isOnline(current.type)) { - messageApi.warning(l('pages.datastudio.editor.execute.warn', '', { type: current.type })); - return; - } - - const param: TaskDataType = { - ...current, - jobName: current.name, - taskId: current.id - }; + const handlerSubmit = async () => { + if (!currentData) return; - const taskKey = Math.random() * 1000 + ''; + const res = await executeSql( + l('pages.datastudio.editor.submitting', '', { jobName: currentData.name }), + currentData.id + ); + if (!res) return; - notificationApi.success({ - message: l('pages.datastudio.editor.submitting', '', { - jobName: param.name - }), - description: param.statement.substring(0, 40) + '...', - duration: null, - key: taskKey, - icon: + updateJobRunningMsg({ + taskId: currentData.id, + jobName: currentData.name, + jobState: res.datas.status, + runningLog: res.msg }); - executeSql(l('pages.datastudio.editor.submitting', '', { jobName: param.name }), param).then( - (res) => { - notificationApi.destroy(taskKey); - if (!res) { - return; - } - updateJobRunningMsg({ - taskId: current.id, - jobName: current.name, - jobState: res.datas.status, - runningLog: res.msg - }); - if (res.datas.success) { - messageApi.success(l('pages.datastudio.editor.exec.success')); - (getCurrentTab(panes, activeKey)?.params as DataStudioParams).taskData.jobInstanceId = - res.datas.jobInstanceId; - saveTabs({ ...props.tabs }); - } - } - ); + messageApi.success(l('pages.datastudio.editor.exec.success')); + currentData.status = JOB_STATUS.RUNNING; + saveTabs({ ...props.tabs }); + }; + + const handleChangeJobLife = async () => { + if (!currentData) return; + if (isOnline(currentData)) { + await offLinelTask(currentData.id); + currentData.step = JOB_LIFE_CYCLE.DEVELOP; + } else { + await onLineTask(currentData.id); + currentData.step = JOB_LIFE_CYCLE.ONLINE; + } + saveTabs({ ...props.tabs }); + }; + + const showDagGraph = async () => { + const result = await getJobPlan(l('pages.datastudio.editor.explan.tip'), currentData); + if (result) { + modal.confirm({ + title: l('pages.datastudio.editor.explan.tip'), + width: '100%', + icon: null, + content: , + cancelButtonProps: { style: { display: 'none' } } + }); + } + }; + + const showExplain = async () => { + modal.confirm({ + title: l('pages.datastudio.explain.validate.msg'), + width: '100%', + icon: null, + content: , + cancelButtonProps: { style: { display: 'none' } } + }); }; const routes: ButtonRoute[] = [ // 保存按钮 icon { - icon: , - title: l('button.save'), - click: () => { - const current = getCurrentData(panes, activeKey); - handlePutDataJson('/api/task', current).then(() => saveTabs({ ...props.tabs })); - }, hotKey: (e: KeyboardEvent) => e.ctrlKey && e.key === 's', hotKeyDesc: 'Ctrl+S', - isShow: projectCommonShow + isShow: projectCommonShow(currentTab?.type), + icon: , + title: l('button.save'), + click: () => handleSave(), + props: { + disabled: isOnline(currentData) + } + }, + { + // 执行图按钮 + icon: , + title: l('button.graph'), + isShow: projectCommonShow(currentTab?.type), + click: async () => showDagGraph() }, { // 检查 sql按钮 - icon: , + icon: , title: l('pages.datastudio.editor.check'), - click: () => { - modal.confirm({ - title: l('pages.datastudio.explain.validate.msg'), - width: '100%', - icon: null, - content: , - cancelButtonProps: { style: { display: 'none' } } - }); - }, - isShow: projectCommonShow - // hotKey: (e: KeyboardEvent) => e.ctrlKey && e.key === 's' + click: () => showExplain(), + isShow: projectCommonShow(currentTab?.type) }, { - // 执行图按钮 - icon: , - title: l('button.graph'), - click: () => { - const currentData = getCurrentData(panes, activeKey); - const res = getJobPlan(l('pages.datastudio.editor.explan.tip'), currentData); - res.then((result) => { - if (result) { - modal.confirm({ - title: l('pages.datastudio.editor.explan.tip'), - width: '100%', - icon: null, - content: , - cancelButtonProps: { style: { display: 'none' } } - }); - } - }); - }, - // hotKey: (e: KeyboardEvent) => e.ctrlKey && e.key === 's' - isShow: projectCommonShow + // 推送海豚, 此处需要将系统设置中的 ds 的配置拿出来做判断 启用才展示 + icon: , + title: l('button.push'), + hotKey: (e: KeyboardEvent) => e.ctrlKey && e.key === 's', + isShow: enableDs + }, + { + // 发布按钮 + icon: isOnline(currentData) ? : , + title: isOnline(currentData) ? l('button.offline') : l('button.publish'), + isShow: currentTab?.type == TabsPageType.project, + click: () => handleChangeJobLife() + }, + { + // flink jobdetail跳转 + icon: , + title: l('pages.datastudio.to.jobDetail'), + isShow: + currentTab?.type == TabsPageType.project && + currentData?.jobInstanceId && + currentTab.subType == 'flinksql', + props: { + href: `/#/devops/job-detail?id=${currentData?.jobInstanceId}`, + target: '_blank' + } }, { // 执行按钮 - icon: , + icon: , title: l('pages.datastudio.editor.exec'), - click: handlerExec, + click: handlerSubmit, hotKey: (e: KeyboardEvent) => e.shiftKey && e.key === 'F10', hotKeyDesc: 'Shift+F10', - isShow: (type?: TabsPageType, subType?: string, data?: any) => - type === TabsPageType.project && !data?.jobInstanceId + isShow: currentTab?.type == TabsPageType.project && !isRunning(currentData), + props: { + style: {background: "#52c41a"}, + type: 'primary', + } }, { // 停止按钮 - icon: , + icon: , title: l('pages.datastudio.editor.stop'), click: handlerStop, - isShow: (type?: TabsPageType, subType?: string, data?: any) => - type === TabsPageType.project && data?.jobInstanceId - // hotKey: (e: KeyboardEvent) => e.shiftKey && e.key === 'F10', - // hotKeyDesc: "Shift+F10" - }, - { - // flink jobdetail跳转 - icon: , - title: l('pages.datastudio.to.jobDetail'), - click: () => history.push(`/devops/job-detail?id=${getCurrentData(panes, activeKey)?.id}`), - isShow: (type?: TabsPageType, subType?: string, data?: any) => - type === TabsPageType.project && data?.jobInstanceId && subType === 'flinksql' - }, - // { - // // 异步提交按钮 - // icon: , - // title: l('button.async'), - // click: () => { - // console.log("ctrl+s") - // }, - // // hotKey: (e: KeyboardEvent) => e.ctrlKey && e.key === 's' - // }, - { - // 推送海豚, 此处需要将系统设置中的 ds 的配置拿出来做判断 启用才展示 - icon: , - title: l('button.push'), - click: () => {}, - hotKey: (e: KeyboardEvent) => e.ctrlKey && e.key === 's', - isShow: () => enableDs + isShow: currentTab?.type == TabsPageType.project && isRunning(currentData), + hotKey: (e: KeyboardEvent) => e.shiftKey && e.key === 'F10', + hotKeyDesc: 'Shift+F10', + props: { + type: 'primary', + danger: true + } }, - // { - // // 发布按钮 - // icon: , - // title: l('button.publish'), - // click: () => { - // console.log("ctrl+s") - // }, - // // hotKey: (e: KeyboardEvent) => e.ctrlKey && e.key === 's' - // }, { - // // api 按钮 - // icon: , - // title: l('button.api'), - // click: () => { - // console.log("ctrl+s") - // }, - // // hotKey: (e: KeyboardEvent) => e.ctrlKey && e.key === 's' - // }, { - // icon: , - title: 'More', + title: '', click: () => {}, - isShow: () => true + isShow: true // hotKey: (e: KeyboardEvent) => e.ctrlKey && e.key === 's' } ]; @@ -328,59 +313,44 @@ const HeaderContainer = (props: any) => { ); }; - const renderHotkey = () => { - document.onkeydown = (e) => { - if (getCurrentTab(panes, activeKey)) { - routes - .filter((r) => r.hotKey?.(e)) - .forEach((r) => { - r.click(); - e.preventDefault(); - }); - } - }; + + document.onkeydown = (e) => { + routes + .filter((r) => r.hotKey?.(e)) + .forEach((r) => { + r.click?.(); + e.preventDefault(); + }); }; - renderHotkey(); /** * @description: 渲染右侧按钮 */ const renderRightButtons = () => { return ( - <> - +
+ {routes - .filter((x) => { - if (x.isShow) { - const currentTab = getCurrentTab(panes, activeKey); - if (currentTab) { - return x.isShow( - currentTab?.type, - currentTab?.subType, - (currentTab as DataStudioTabsItemType)?.params.taskData - ); - } - } - return false; - }) + .filter((x) => x.isShow) .map((route) => { - const { icon, title, click, hotKeyDesc } = route; return ( -
); }; diff --git a/dinky-web/src/pages/DataStudio/HeaderContainer/service.tsx b/dinky-web/src/pages/DataStudio/HeaderContainer/service.tsx index 9ed07b179f3..3ea7833ed40 100644 --- a/dinky-web/src/pages/DataStudio/HeaderContainer/service.tsx +++ b/dinky-web/src/pages/DataStudio/HeaderContainer/service.tsx @@ -1,19 +1,29 @@ -import { TaskDataType } from '@/pages/DataStudio/model'; import { postAll } from '@/services/api'; import { handleGetOption, handleOption } from '@/services/BusinessCrud'; -import { DIALECT, RUN_MODE } from '@/services/constants'; +import { DIALECT } from '@/services/constants'; export async function explainSql(params: any) { - return postAll('/api/studio/explainSql', params); + return postAll('/api/task/explainSql', params); } + export async function getJobPlan(title: string, params: any) { - return handleOption('/api/studio/getJobPlan', title, params); + return handleOption('/api/task/getJobPlan', title, params); +} + +export async function executeSql(title: string, id: number) { + return handleGetOption('/api/task/submitTask', title, { id }); } -export async function executeSql(title: string, params: TaskDataType) { - return handleOption('/api/studio/executeSql', title, params); + +export function cancelTask(title: string, id: number) { + return handleGetOption('api/task/cancel', title, { id }); +} + +export function onLineTask(id: number) { + return handleGetOption('api/task/onLineTask', '', { taskId: id }); } -export function offLineTask(title: string, id: number, type: string) { - return handleGetOption('api/task/offLineTask', title, { id, type }); + +export function offLinelTask(id: number) { + return handleGetOption('api/task/offLineTask', '', { taskId: id }); } export const isSql = (dialect: string) => { @@ -37,14 +47,3 @@ export const isSql = (dialect: string) => { return false; } }; -export const isOnline = (type: string) => { - switch (type) { - case RUN_MODE.LOCAL: - case RUN_MODE.STANDALONE: - case RUN_MODE.YARN_SESSION: - case RUN_MODE.KUBERNETES_SESSION: - return true; - default: - return false; - } -}; diff --git a/dinky-web/src/pages/DevOps/constants.tsx b/dinky-web/src/pages/DevOps/constants.tsx index fede6d8b6b4..cfe8495a8c0 100644 --- a/dinky-web/src/pages/DevOps/constants.tsx +++ b/dinky-web/src/pages/DevOps/constants.tsx @@ -17,12 +17,8 @@ export enum JOB_LIFE_CYCLE { UNKNOWN = 0, - CREATE = 1, - DEVELOP = 2, - DEBUG = 3, - RELEASE = 4, - ONLINE = 5, - CANCEL = 6 + DEVELOP = 1, + ONLINE = 2 } export enum JOB_STATUS { From 96576a959591de7407210b84638e1f08ba0bff3c Mon Sep 17 00:00:00 2001 From: Licho Date: Wed, 11 Oct 2023 09:14:40 +0800 Subject: [PATCH 2/2] explicit set locale default value (#2376) * refactor: explicit set default locale * fix: task information changed function * Spotless Apply --------- Co-authored-by: leechor --- dinky-common/src/main/java/org/dinky/utils/I18n.java | 7 +++++-- .../AuthCenter/Token/component/TokenModalForm/index.tsx | 3 +-- dinky-web/src/pages/DataStudio/index.tsx | 1 + .../Alert/AlertGroup/components/AlertGroupForm/index.tsx | 2 +- .../RegCenter/GitProject/components/ProjectModal/index.tsx | 2 +- 5 files changed, 9 insertions(+), 6 deletions(-) diff --git a/dinky-common/src/main/java/org/dinky/utils/I18n.java b/dinky-common/src/main/java/org/dinky/utils/I18n.java index 67d7fefab21..fdc268a6300 100644 --- a/dinky-common/src/main/java/org/dinky/utils/I18n.java +++ b/dinky-common/src/main/java/org/dinky/utils/I18n.java @@ -34,7 +34,10 @@ public final class I18n { private static final Logger logger = LoggerFactory.getLogger(I18n.class); private static final String MESSAGES_BASE = "i18n.messages"; - private static ResourceBundle bundle; + + static { + Locale.setDefault(Locale.US); + } private I18n() {} @@ -52,7 +55,7 @@ public static void setLocale(Locale l) { } public static String getMessage(String key) { - bundle = ResourceBundle.getBundle(MESSAGES_BASE); + ResourceBundle bundle = ResourceBundle.getBundle(MESSAGES_BASE); String message = bundle.getString(key); if (!JDK_ABOVE_1_8) { message = new String(message.getBytes(StandardCharsets.ISO_8859_1), StandardCharsets.UTF_8); diff --git a/dinky-web/src/pages/AuthCenter/Token/component/TokenModalForm/index.tsx b/dinky-web/src/pages/AuthCenter/Token/component/TokenModalForm/index.tsx index 3201b3872c3..1a1e25cd2b2 100644 --- a/dinky-web/src/pages/AuthCenter/Token/component/TokenModalForm/index.tsx +++ b/dinky-web/src/pages/AuthCenter/Token/component/TokenModalForm/index.tsx @@ -150,7 +150,6 @@ const TokenModalForm: React.FC = (props) => { form.setFieldValue('tokenValue', tokenValue); }; - return ( <> @@ -165,7 +164,7 @@ const TokenModalForm: React.FC = (props) => { initialValues={{ ...value }} modalProps={{ destroyOnClose: true, - onCancel: () => handleCancel(), + onCancel: () => handleCancel() }} > diff --git a/dinky-web/src/pages/DataStudio/index.tsx b/dinky-web/src/pages/DataStudio/index.tsx index 4570e2759b4..557267fd09a 100644 --- a/dinky-web/src/pages/DataStudio/index.tsx +++ b/dinky-web/src/pages/DataStudio/index.tsx @@ -183,6 +183,7 @@ const DataStudio = (props: any) => { console.log('changed', key, info[key], params.taskData[key]); return true; } + return false; }); if (changed) { diff --git a/dinky-web/src/pages/RegCenter/Alert/AlertGroup/components/AlertGroupForm/index.tsx b/dinky-web/src/pages/RegCenter/Alert/AlertGroup/components/AlertGroupForm/index.tsx index c9d50eed99b..edc3f9eb401 100644 --- a/dinky-web/src/pages/RegCenter/Alert/AlertGroup/components/AlertGroupForm/index.tsx +++ b/dinky-web/src/pages/RegCenter/Alert/AlertGroup/components/AlertGroupForm/index.tsx @@ -149,7 +149,7 @@ const AlertGroupForm: React.FC = (props) => { submitter={{ render: () => [...renderFooter()] }} modalProps={{ destroyOnClose: true, - onCancel: () => handleModalVisible(false), + onCancel: () => handleModalVisible(false) }} > {renderAlertGroupForm()} diff --git a/dinky-web/src/pages/RegCenter/GitProject/components/ProjectModal/index.tsx b/dinky-web/src/pages/RegCenter/GitProject/components/ProjectModal/index.tsx index fccdaea40f4..9fc0482e818 100644 --- a/dinky-web/src/pages/RegCenter/GitProject/components/ProjectModal/index.tsx +++ b/dinky-web/src/pages/RegCenter/GitProject/components/ProjectModal/index.tsx @@ -111,7 +111,7 @@ const ProjectModal: React.FC = (props) => { initialValues={values} modalProps={{ destroyOnClose: true, - onCancel: () => handleCancel(), + onCancel: () => handleCancel() }} >