From fec5c574ab7de618100a6c247e620e91fcb3f337 Mon Sep 17 00:00:00 2001 From: zhu-mingye <934230207@qq.com> Date: Mon, 16 Oct 2023 18:07:06 +0800 Subject: [PATCH] refactor_model_to_typehandler --- .../main/java/org/dinky/assertion/Assert.java | 8 +- .../controller/ClusterInstanceController.java | 83 ++-- .../java/org/dinky/data/dto/JobDataDto.java | 38 +- .../java/org/dinky/data/dto/StudioDDLDTO.java | 2 +- .../main/java/org/dinky/data/dto/TaskDTO.java | 11 +- .../data/dto/TaskVersionConfigureDTO.java | 2 +- .../{Cluster.java => ClusterInstance.java} | 28 +- .../java/org/dinky/data/model/History.java | 2 +- .../org/dinky/data/model/HomeResource.java | 2 +- .../java/org/dinky/data/model/JobHistory.java | 24 +- .../org/dinky/data/model/JobInfoDetail.java | 4 +- .../org/dinky/data/model/JobInstance.java | 4 +- .../main/java/org/dinky/data/model/Task.java | 2 +- .../typehandler/AbstractJsonTypeHandler.java | 59 +++ .../data/typehandler/JSONObjectHandler.java | 72 +++ .../java/org/dinky/job/Job2MysqlHandler.java | 20 +- .../dinky/job/handler/JobAlertHandler.java | 24 +- .../dinky/job/handler/JobMetricsHandler.java | 3 +- .../dinky/job/handler/JobRefreshHandler.java | 16 +- .../dinky/mapper/ClusterInstanceMapper.java | 6 +- .../dinky/service/ClusterInstanceService.java | 32 +- .../impl/ClusterInstanceServiceImpl.java | 85 ++-- .../service/impl/JobInstanceServiceImpl.java | 6 +- .../dinky/service/impl/StudioServiceImpl.java | 8 +- .../dinky/service/impl/TaskServiceImpl.java | 26 +- dinky-admin/src/main/resources/db/db-h2.sql | 14 +- .../mapper/ClusterInstanceMapper.xml | 6 +- .../dinky/alert/Rules/CheckpointsRule.java | 92 ---- .../dinky/alert/rules/CheckpointsRule.java | 137 ++++++ .../alert/{Rules => rules}/ExceptionRule.java | 2 +- .../FlinkJobNodeBackPressure.java | 50 +- .../flink/checkpoint/CheckPointOverView.java | 433 ++++++++++++++++++ .../checkpoint/CheckpointStatistics.java | 149 ++++++ .../checkpoint/TaskCheckpointStatistics.java | 119 +++++ .../flink/config/CheckpointConfigInfo.java | 167 +++++++ .../data/flink/config/ExecutionConfig.java | 29 +- .../data/flink/config/FlinkJobConfigInfo.java | 21 +- .../exceptions/FlinkJobExceptionsDetail.java | 41 +- .../data/flink/job/FlinkJobDetailInfo.java | 57 ++- .../dinky/data/flink/job/FlinkJobPlan.java | 25 +- .../data/flink/job/FlinkJobPlanNode.java | 45 +- .../data/flink/job/FlinkJobPlanNodeInput.java | 10 +- .../dinky/data/flink/job/FlinkJobVertex.java | 49 +- .../watermark/FlinkJobNodeWaterMark.java | 15 +- dinky-web/config/proxy.ts | 13 +- dinky-web/src/global.less | 28 +- dinky-web/src/locales/en-US/pages.ts | 5 +- dinky-web/src/locales/zh-CN/pages.ts | 5 +- .../Role/components/RoleProTable/index.tsx | 3 +- .../components/PermissionsProTable/index.tsx | 3 +- .../components/TenantProTable/index.tsx | 3 +- .../components/TenantUserList/index.tsx | 2 +- .../Token/component/TokenList/index.tsx | 2 +- .../User/components/UserProTable/index.tsx | 2 +- .../RightContainer/JobConfig/function.tsx | 6 +- .../CheckPointsTab/components/CkDesc.tsx | 2 +- .../Configuration/components/contants.tsx | 15 + .../components/InstanceList/index.tsx | 304 ++++++------ .../DataSourceProForm/index.tsx | 2 +- .../components/DocumentProTable/index.tsx | 3 +- .../components/ProjectProTable/index.tsx | 3 +- .../components/GlobalVarProTable/index.tsx | 3 +- .../UDF/components/TemplateTable/index.tsx | 5 +- dinky-web/src/services/endpoints.tsx | 1 + dinky-web/src/types/RegCenter/init.d.ts | 3 +- dinky-web/src/types/RegCenter/state.d.ts | 1 + script/sql/dinky-mysql.sql | 8 +- script/sql/dinky-pg.sql | 18 +- .../1.0.0-SNAPSHOT_schema/mysql/dinky_ddl.sql | 3 +- .../1.0.0-SNAPSHOT_schema/mysql/dinky_dml.sql | 52 ++- 70 files changed, 1937 insertions(+), 586 deletions(-) rename dinky-admin/src/main/java/org/dinky/data/model/{Cluster.java => ClusterInstance.java} (85%) create mode 100644 dinky-admin/src/main/java/org/dinky/data/typehandler/AbstractJsonTypeHandler.java create mode 100644 dinky-admin/src/main/java/org/dinky/data/typehandler/JSONObjectHandler.java delete mode 100644 dinky-alert/dinky-alert-base/src/main/java/org/dinky/alert/Rules/CheckpointsRule.java create mode 100644 dinky-alert/dinky-alert-base/src/main/java/org/dinky/alert/rules/CheckpointsRule.java rename dinky-alert/dinky-alert-base/src/main/java/org/dinky/alert/{Rules => rules}/ExceptionRule.java (98%) create mode 100644 dinky-common/src/main/java/org/dinky/data/flink/checkpoint/CheckPointOverView.java create mode 100644 dinky-common/src/main/java/org/dinky/data/flink/checkpoint/CheckpointStatistics.java create mode 100644 dinky-common/src/main/java/org/dinky/data/flink/checkpoint/TaskCheckpointStatistics.java create mode 100644 dinky-common/src/main/java/org/dinky/data/flink/config/CheckpointConfigInfo.java diff --git a/dinky-admin/src/main/java/org/dinky/assertion/Assert.java b/dinky-admin/src/main/java/org/dinky/assertion/Assert.java index 6cb5005076..05dd795f3f 100644 --- a/dinky-admin/src/main/java/org/dinky/assertion/Assert.java +++ b/dinky-admin/src/main/java/org/dinky/assertion/Assert.java @@ -20,7 +20,7 @@ package org.dinky.assertion; import org.dinky.data.exception.BusException; -import org.dinky.data.model.Cluster; +import org.dinky.data.model.ClusterInstance; import org.dinky.data.model.Jar; import org.dinky.data.model.Task; @@ -31,9 +31,9 @@ */ public interface Assert { - static void check(Cluster cluster) { - if (cluster.getId() == null) { - throw new BusException("Flink 集群【" + cluster.getId() + "】不存在"); + static void check(ClusterInstance clusterInstance) { + if (clusterInstance.getId() == null) { + throw new BusException("Flink 集群【" + clusterInstance.getId() + "】不存在"); } } diff --git a/dinky-admin/src/main/java/org/dinky/controller/ClusterInstanceController.java b/dinky-admin/src/main/java/org/dinky/controller/ClusterInstanceController.java index 3050908517..12d716a722 100644 --- a/dinky-admin/src/main/java/org/dinky/controller/ClusterInstanceController.java +++ b/dinky-admin/src/main/java/org/dinky/controller/ClusterInstanceController.java @@ -23,8 +23,7 @@ import org.dinky.data.constant.PermissionConstants; import org.dinky.data.enums.BusinessType; import org.dinky.data.enums.Status; -import org.dinky.data.model.Cluster; -import org.dinky.data.result.ProTableResult; +import org.dinky.data.model.ClusterInstance; import org.dinky.data.result.Result; import org.dinky.service.ClusterInstanceService; @@ -40,6 +39,7 @@ import org.springframework.web.bind.annotation.RequestParam; import org.springframework.web.bind.annotation.RestController; +import com.baomidou.mybatisplus.core.conditions.query.LambdaQueryWrapper; import com.fasterxml.jackson.databind.JsonNode; import cn.dev33.satoken.annotation.SaCheckPermission; @@ -53,7 +53,7 @@ /** ClusterInstanceController */ @Slf4j @RestController -@Api(tags = "Cluster Instance Controller") +@Api(tags = "ClusterInstance Instance Controller") @RequestMapping("/api/cluster") @RequiredArgsConstructor public class ClusterInstanceController { @@ -63,7 +63,7 @@ public class ClusterInstanceController { /** * added or updated cluster instance * - * @param cluster {@link Cluster} cluster instance + * @param clusterInstance {@link ClusterInstance} cluster instance * @return {@link Result}<{@link Void}> * @throws Exception exception */ @@ -71,21 +71,21 @@ public class ClusterInstanceController { @Log(title = "Insert Or Update Cluster Instance", businessType = BusinessType.INSERT_OR_UPDATE) @ApiOperation("Insert Or Update Cluster Instance") @ApiImplicitParam( - name = "cluster", - value = "Cluster Instance", - dataType = "Cluster", + name = "clusterInstance", + value = "ClusterInstance Instance", + dataType = "ClusterInstance", paramType = "body", required = true, - dataTypeClass = Cluster.class) + dataTypeClass = ClusterInstance.class) @SaCheckPermission( value = { PermissionConstants.REGISTRATION_CLUSTER_INSTANCE_EDIT, PermissionConstants.REGISTRATION_CLUSTER_INSTANCE_ADD }, mode = SaMode.OR) - public Result saveOrUpdateClusterInstance(@RequestBody Cluster cluster) throws Exception { - cluster.setAutoRegisters(false); - clusterInstanceService.registersCluster(cluster); + public Result saveOrUpdateClusterInstance(@RequestBody ClusterInstance clusterInstance) throws Exception { + clusterInstance.setAutoRegisters(false); + clusterInstanceService.registersCluster(clusterInstance); return Result.succeed(Status.SAVE_SUCCESS); } @@ -100,7 +100,7 @@ public Result saveOrUpdateClusterInstance(@RequestBody Cluster cluster) th @ApiOperation("Update Cluster Instance Status") @ApiImplicitParam( name = "id", - value = "Cluster Instance Id", + value = "ClusterInstance Instance Id", dataType = "Integer", paramType = "query", required = true, @@ -144,47 +144,48 @@ public Result deleteClusterInstanceById(@RequestParam Integer id) { } } - /** - * list cluster instances - * - * @param para {@link JsonNode} query parameters - * @return {@link ProTableResult}<{@link Cluster}> - */ - @PostMapping + @GetMapping("/list") @ApiOperation("Cluster Instance List") @ApiImplicitParam( - name = "para", - value = "Query Parameters", - dataType = "JsonNode", - paramType = "body", + name = "keyword", + value = "Query keyword", + dataType = "String", + paramType = "query", required = true, dataTypeClass = JsonNode.class) - public ProTableResult listClusters(@RequestBody JsonNode para) { - return clusterInstanceService.selectForProTable(para); + public Result> listClusterInstance(@RequestParam("keyword") String searchKeyWord) { + return Result.succeed(clusterInstanceService.list(new LambdaQueryWrapper() + .like(ClusterInstance::getName, searchKeyWord) + .or() + .like(ClusterInstance::getAlias, searchKeyWord) + .or() + .like(ClusterInstance::getNote, searchKeyWord))); } /** * get all enable cluster instances * - * @return {@link Result}<{@link List}<{@link Cluster}>> + * @return {@link Result}<{@link List}<{@link ClusterInstance}>> */ @GetMapping("/listEnabledAll") @ApiOperation("Get all enable cluster instances") - public Result> listEnabledAllClusterInstance() { - List clusters = clusterInstanceService.listEnabledAllClusterInstance(); - return Result.succeed(clusters); + public Result> listEnabledAllClusterInstance() { + List clusterInstances = clusterInstanceService.listEnabledAllClusterInstance(); + return Result.succeed(clusterInstances); } /** * get session enable cluster instances , this method is {@link Deprecated} * - * @return {@link Result}<{@link List}<{@link Cluster}>> + * @return {@link Result}<{@link List}<{@link ClusterInstance}>> */ @GetMapping("/listSessionEnable") - @ApiOperation(value = "Get Enable Session Cluster", notes = "Get All Enable Cluster Instances Of Session Type") - public Result> listSessionEnable() { - List clusters = clusterInstanceService.listSessionEnable(); - return Result.succeed(clusters); + @ApiOperation( + value = "Get Enable Session ClusterInstance", + notes = "Get All Enable Cluster Instances Of Session Type") + public Result> listSessionEnable() { + List clusterInstances = clusterInstanceService.listSessionEnable(); + return Result.succeed(clusterInstances); } /** @@ -197,9 +198,9 @@ public Result> listSessionEnable() { @ApiOperation("Cluster Instance Heartbeat") @SaCheckPermission(value = {PermissionConstants.REGISTRATION_CLUSTER_INSTANCE_HEARTBEATS}) public Result heartbeat() { - List clusters = clusterInstanceService.list(); - for (Cluster cluster : clusters) { - clusterInstanceService.registersCluster(cluster); + List clusterInstances = clusterInstanceService.list(); + for (ClusterInstance clusterInstance : clusterInstances) { + clusterInstanceService.registersCluster(clusterInstance); } return Result.succeed(Status.CLUSTER_INSTANCE_HEARTBEAT_SUCCESS); } @@ -229,7 +230,7 @@ public Result recycleCluster() { @ApiOperation("Cluster Instance Kill") @ApiImplicitParam( name = "id", - value = "Cluster Instance Id", + value = "ClusterInstance Instance Id", dataType = "Integer", paramType = "query", required = true, @@ -238,7 +239,7 @@ public Result recycleCluster() { @SaCheckPermission(value = {PermissionConstants.REGISTRATION_CLUSTER_INSTANCE_KILL}) public Result killClusterInstance(@RequestParam("id") Integer id) { clusterInstanceService.killCluster(id); - return Result.succeed("Kill Cluster Succeed."); + return Result.succeed("Kill ClusterInstance Succeed."); } @PutMapping("/deploySessionClusterInstance") @@ -246,14 +247,14 @@ public Result killClusterInstance(@RequestParam("id") Integer id) { @ApiOperation("Deploy Session Cluster Instance") @ApiImplicitParam( name = "id", - value = "Cluster Instance Id", + value = "ClusterInstance Instance Id", dataType = "Integer", paramType = "query", required = true, dataTypeClass = Integer.class, example = "1") @SaCheckPermission(value = {PermissionConstants.REGISTRATION_CLUSTER_CONFIG_DEPLOY}) - public Result deploySessionClusterInstance(@RequestParam("id") Integer id) { + public Result deploySessionClusterInstance(@RequestParam("id") Integer id) { return Result.succeed(clusterInstanceService.deploySessionCluster(id), Status.CLUSTER_INSTANCE_DEPLOY); } } diff --git a/dinky-admin/src/main/java/org/dinky/data/dto/JobDataDto.java b/dinky-admin/src/main/java/org/dinky/data/dto/JobDataDto.java index c8781d61ca..4e6d04563b 100644 --- a/dinky-admin/src/main/java/org/dinky/data/dto/JobDataDto.java +++ b/dinky-admin/src/main/java/org/dinky/data/dto/JobDataDto.java @@ -19,6 +19,8 @@ package org.dinky.data.dto; +import org.dinky.data.flink.checkpoint.CheckPointOverView; +import org.dinky.data.flink.config.CheckpointConfigInfo; import org.dinky.data.flink.config.FlinkJobConfigInfo; import org.dinky.data.flink.exceptions.FlinkJobExceptionsDetail; import org.dinky.data.flink.job.FlinkJobDetailInfo; @@ -27,7 +29,6 @@ import java.time.LocalDateTime; -import com.baomidou.mybatisplus.annotation.TableField; import com.fasterxml.jackson.databind.JsonNode; import cn.hutool.json.JSONUtil; @@ -63,30 +64,24 @@ public class JobDataDto { @ApiModelProperty(value = "Exceptions Detail Object", notes = "Object representing job exceptions details") private FlinkJobExceptionsDetail exceptions; - @TableField(exist = false) @ApiModelProperty(value = "Checkpoints Object", notes = "Object representing job checkpoints") - private JsonNode checkpoints; + private CheckPointOverView checkpoints; - @TableField(exist = false) @ApiModelProperty(value = "Checkpoints Config Object", notes = "Object representing checkpoints configuration") - private JsonNode checkpointsConfig; + private CheckpointConfigInfo checkpointsConfig; - @ApiModelProperty(value = "FlinkJobConfigInfo", notes = "FlinkJobConfigInfo representing job configuration") + @ApiModelProperty(value = "JobConfigInfo", notes = "JobConfigInfo representing job configuration") private FlinkJobConfigInfo config; - @TableField(exist = false) @ApiModelProperty(value = "Jar Object", notes = "Object representing the JAR used in the job") private JsonNode jar; - @TableField(exist = false) - @ApiModelProperty(value = "Cluster Object", notes = "Object representing the cluster") + @ApiModelProperty(value = "ClusterInstance Object", notes = "Object representing the cluster") private JsonNode cluster; - @TableField(exist = false) @ApiModelProperty(value = "Cluster Configuration Object", notes = "Object representing cluster configuration") private JsonNode clusterConfiguration; - @TableField(exist = false) @ApiModelProperty( value = "Error Flag", dataType = "boolean", @@ -94,7 +89,6 @@ public class JobDataDto { notes = "Flag indicating if there was an error") private boolean error; - @TableField(exist = false) @ApiModelProperty( value = "Error Message", dataType = "boolean", @@ -106,11 +100,11 @@ public JobHistory toJobHistory() { return JobHistory.builder() .id(this.id) .tenantId(this.tenantId) - .jobJson(JSONUtil.toJsonStr(getJob())) - .exceptionsJson(JSONUtil.toJsonStr(getExceptions())) - .checkpointsJson(JSONUtil.toJsonStr(getCheckpoints())) - .checkpointsConfigJson(JSONUtil.toJsonStr(getCheckpointsConfig())) - .configJson(JSONUtil.toJsonStr(getConfig())) + .jobJson(this.job) + .exceptionsJson(this.exceptions) + .checkpointsJson(this.checkpoints) + .checkpointsConfigJson(this.checkpointsConfig) + .configJson(this.config) .jarJson(JSONUtil.toJsonStr(getJar())) .clusterJson(JSONUtil.toJsonStr(getCluster())) .clusterConfigurationJson(JSONUtil.toJsonStr(getClusterConfiguration())) @@ -122,11 +116,11 @@ public static JobDataDto fromJobHistory(JobHistory jobHistory) { return JobDataDto.builder() .id(jobHistory.getId()) .tenantId(jobHistory.getTenantId()) - .job(JsonUtils.toJavaBean(jobHistory.getJobJson(), FlinkJobDetailInfo.class)) - .exceptions(JsonUtils.toJavaBean(jobHistory.getExceptionsJson(), FlinkJobExceptionsDetail.class)) - .checkpoints(JsonUtils.parseToJsonNode(jobHistory.getCheckpointsJson())) - .checkpointsConfig(JsonUtils.parseToJsonNode(jobHistory.getCheckpointsConfigJson())) - .config(JsonUtils.toJavaBean(jobHistory.getConfigJson(), FlinkJobConfigInfo.class)) + .job(jobHistory.getJobJson()) + .exceptions(jobHistory.getExceptionsJson()) + .checkpoints(jobHistory.getCheckpointsJson()) + .checkpointsConfig(jobHistory.getCheckpointsConfigJson()) + .config(jobHistory.getConfigJson()) .jar(JsonUtils.parseToJsonNode(jobHistory.getJarJson())) .cluster(JsonUtils.parseToJsonNode(jobHistory.getClusterJson())) .clusterConfiguration(JsonUtils.parseToJsonNode(jobHistory.getClusterConfigurationJson())) diff --git a/dinky-admin/src/main/java/org/dinky/data/dto/StudioDDLDTO.java b/dinky-admin/src/main/java/org/dinky/data/dto/StudioDDLDTO.java index c034107a34..f27b0f73c2 100644 --- a/dinky-admin/src/main/java/org/dinky/data/dto/StudioDDLDTO.java +++ b/dinky-admin/src/main/java/org/dinky/data/dto/StudioDDLDTO.java @@ -65,7 +65,7 @@ public class StudioDDLDTO { private boolean useRemote; @ApiModelProperty( - value = "Cluster ID", + value = "ClusterInstance ID", dataType = "Integer", example = "1", notes = "The identifier of the cluster") diff --git a/dinky-admin/src/main/java/org/dinky/data/dto/TaskDTO.java b/dinky-admin/src/main/java/org/dinky/data/dto/TaskDTO.java index 914736f650..42446529c4 100644 --- a/dinky-admin/src/main/java/org/dinky/data/dto/TaskDTO.java +++ b/dinky-admin/src/main/java/org/dinky/data/dto/TaskDTO.java @@ -104,7 +104,7 @@ public class TaskDTO extends AbstractStatementDTO { private boolean batchModel; @ApiModelProperty( - value = "Cluster ID", + value = "ClusterInstance ID", dataType = "Integer", example = "1", notes = "The identifier of the cluster") @@ -167,8 +167,7 @@ public class TaskDTO extends AbstractStatementDTO { @ApiModelProperty(value = "Statement", dataType = "String", notes = "SQL statement for the task") private String statement; - @ApiModelProperty(value = "Cluster Name", dataType = "String", notes = "Name of the associated cluster") - @TableField(exist = false) + @ApiModelProperty(value = "ClusterInstance Name", dataType = "String", notes = "Name of the associated cluster") private String clusterName; @ApiModelProperty( @@ -179,30 +178,24 @@ public class TaskDTO extends AbstractStatementDTO { private TaskExtConfig configJson; @ApiModelProperty(value = "Path", dataType = "String", notes = "Path associated with the task") - @TableField(exist = false) private String path; @ApiModelProperty(value = "JAR Name", dataType = "String", notes = "Name of the associated JAR") - @TableField(exist = false) private String jarName; @ApiModelProperty( value = "Cluster Configuration Name", dataType = "String", notes = "Name of the associated cluster configuration") - @TableField(exist = false) private String clusterConfigurationName; @ApiModelProperty(value = "Database Name", dataType = "String", notes = "Name of the associated database") - @TableField(exist = false) private String databaseName; @ApiModelProperty(value = "Environment Name", dataType = "String", notes = "Name of the associated environment") - @TableField(exist = false) private String envName; @ApiModelProperty(value = "Alert Group Name", dataType = "String", notes = "Name of the associated alert group") - @TableField(exist = false) private String alertGroupName; @ApiModelProperty( diff --git a/dinky-admin/src/main/java/org/dinky/data/dto/TaskVersionConfigureDTO.java b/dinky-admin/src/main/java/org/dinky/data/dto/TaskVersionConfigureDTO.java index 7d142dbd99..6f67a0fd41 100644 --- a/dinky-admin/src/main/java/org/dinky/data/dto/TaskVersionConfigureDTO.java +++ b/dinky-admin/src/main/java/org/dinky/data/dto/TaskVersionConfigureDTO.java @@ -76,7 +76,7 @@ public class TaskVersionConfigureDTO implements Serializable { private Boolean batchModel; @ApiModelProperty( - value = "Flink Cluster ID", + value = "Flink ClusterInstance ID", dataType = "Integer", example = "3", notes = "The ID of the Flink cluster") diff --git a/dinky-admin/src/main/java/org/dinky/data/model/Cluster.java b/dinky-admin/src/main/java/org/dinky/data/model/ClusterInstance.java similarity index 85% rename from dinky-admin/src/main/java/org/dinky/data/model/Cluster.java rename to dinky-admin/src/main/java/org/dinky/data/model/ClusterInstance.java index 74c0d0592a..717f8ff750 100644 --- a/dinky-admin/src/main/java/org/dinky/data/model/Cluster.java +++ b/dinky-admin/src/main/java/org/dinky/data/model/ClusterInstance.java @@ -31,15 +31,15 @@ import lombok.EqualsAndHashCode; /** - * Cluster + * ClusterInstance * * @since 2021/5/28 13:53 */ @Data @EqualsAndHashCode(callSuper = false) @TableName("dinky_cluster") -@ApiModel(value = "Cluster", description = "Cluster") -public class Cluster extends SuperEntity { +@ApiModel(value = "ClusterInstance", description = "ClusterInstance") +public class ClusterInstance extends SuperEntity { private static final long serialVersionUID = 3104721227014487321L; @@ -112,17 +112,17 @@ public class Cluster extends SuperEntity { @ApiModelProperty(value = "taskId", required = true, dataType = "Integer", example = "test", notes = "task id") private Integer taskId; - public static Cluster autoRegistersCluster( + public static ClusterInstance autoRegistersCluster( String hosts, String name, String alias, String type, Integer clusterConfigurationId, Integer taskId) { - Cluster cluster = new Cluster(); - cluster.setName(name); - cluster.setAlias(alias); - cluster.setHosts(hosts); - cluster.setType(type); - cluster.setClusterConfigurationId(clusterConfigurationId); - cluster.setTaskId(taskId); - cluster.setAutoRegisters(true); - cluster.setEnabled(true); - return cluster; + ClusterInstance clusterInstance = new ClusterInstance(); + clusterInstance.setName(name); + clusterInstance.setAlias(alias); + clusterInstance.setHosts(hosts); + clusterInstance.setType(type); + clusterInstance.setClusterConfigurationId(clusterConfigurationId); + clusterInstance.setTaskId(taskId); + clusterInstance.setAutoRegisters(true); + clusterInstance.setEnabled(true); + return clusterInstance; } } diff --git a/dinky-admin/src/main/java/org/dinky/data/model/History.java b/dinky-admin/src/main/java/org/dinky/data/model/History.java index 88c8d0527b..8c5809106a 100644 --- a/dinky-admin/src/main/java/org/dinky/data/model/History.java +++ b/dinky-admin/src/main/java/org/dinky/data/model/History.java @@ -50,7 +50,7 @@ public class History implements Serializable { @ApiModelProperty(value = "Tenant ID", dataType = "Integer", example = "1", required = true) private Integer tenantId; - @ApiModelProperty(value = "Cluster ID", dataType = "Integer") + @ApiModelProperty(value = "ClusterInstance ID", dataType = "Integer") private Integer clusterId; @ApiModelProperty(value = "Cluster Configuration ID", dataType = "Integer") diff --git a/dinky-admin/src/main/java/org/dinky/data/model/HomeResource.java b/dinky-admin/src/main/java/org/dinky/data/model/HomeResource.java index ea3d6fb3df..bb28c67a24 100644 --- a/dinky-admin/src/main/java/org/dinky/data/model/HomeResource.java +++ b/dinky-admin/src/main/java/org/dinky/data/model/HomeResource.java @@ -27,7 +27,7 @@ @ApiModel(value = "HomeResource", description = "Home Resource Information") public class HomeResource { - @ApiModelProperty(value = "Flink Cluster Count", dataType = "Integer") + @ApiModelProperty(value = "Flink ClusterInstance Count", dataType = "Integer") private Integer flinkClusterCount; @ApiModelProperty(value = "Flink Config Count", dataType = "Integer") diff --git a/dinky-admin/src/main/java/org/dinky/data/model/JobHistory.java b/dinky-admin/src/main/java/org/dinky/data/model/JobHistory.java index b80620c792..9ae1f307c3 100644 --- a/dinky-admin/src/main/java/org/dinky/data/model/JobHistory.java +++ b/dinky-admin/src/main/java/org/dinky/data/model/JobHistory.java @@ -19,6 +19,13 @@ package org.dinky.data.model; +import org.dinky.data.flink.checkpoint.CheckPointOverView; +import org.dinky.data.flink.config.CheckpointConfigInfo; +import org.dinky.data.flink.config.FlinkJobConfigInfo; +import org.dinky.data.flink.exceptions.FlinkJobExceptionsDetail; +import org.dinky.data.flink.job.FlinkJobDetailInfo; +import org.dinky.data.typehandler.JSONObjectHandler; + import java.io.Serializable; import java.time.LocalDateTime; @@ -65,35 +72,40 @@ public class JobHistory implements Serializable { dataType = "String", example = "{\"jobName\": \"Example Job\"}", notes = "JSON representation of the job") - private String jobJson; + @TableField(typeHandler = JSONObjectHandler.class) + private FlinkJobDetailInfo jobJson; @ApiModelProperty( value = "Exceptions JSON", dataType = "String", example = "{\"exceptionType\": \"RuntimeException\"}", notes = "JSON representation of exceptions") - private String exceptionsJson; + @TableField(typeHandler = JSONObjectHandler.class) + private FlinkJobExceptionsDetail exceptionsJson; @ApiModelProperty( value = "Checkpoints JSON", dataType = "String", example = "{\"checkpointId\": 123}", notes = "JSON representation of checkpoints") - private String checkpointsJson; + @TableField(typeHandler = JSONObjectHandler.class) + private CheckPointOverView checkpointsJson; @ApiModelProperty( value = "Checkpoints Config JSON", dataType = "String", example = "{\"configParam\": \"value\"}", notes = "JSON representation of checkpoints config") - private String checkpointsConfigJson; + @TableField(typeHandler = JSONObjectHandler.class) + private CheckpointConfigInfo checkpointsConfigJson; @ApiModelProperty( value = "Config JSON", dataType = "String", example = "{\"configParam\": \"value\"}", notes = "JSON representation of config") - private String configJson; + @TableField(typeHandler = JSONObjectHandler.class) + private FlinkJobConfigInfo configJson; @ApiModelProperty( value = "Jar JSON", @@ -103,7 +115,7 @@ public class JobHistory implements Serializable { private String jarJson; @ApiModelProperty( - value = "Cluster JSON", + value = "ClusterInstance JSON", dataType = "String", example = "{\"clusterName\": \"exampleCluster\"}", notes = "JSON representation of the cluster") diff --git a/dinky-admin/src/main/java/org/dinky/data/model/JobInfoDetail.java b/dinky-admin/src/main/java/org/dinky/data/model/JobInfoDetail.java index 9e84548fd1..64f2426b3f 100644 --- a/dinky-admin/src/main/java/org/dinky/data/model/JobInfoDetail.java +++ b/dinky-admin/src/main/java/org/dinky/data/model/JobInfoDetail.java @@ -44,8 +44,8 @@ public class JobInfoDetail { @ApiModelProperty(value = "Job Instance", notes = "Details about the job instance") private JobInstance instance; - @ApiModelProperty(value = "Cluster", notes = "Details about the cluster") - private Cluster cluster; + @ApiModelProperty(value = "ClusterInstance", notes = "Details about the cluster Instance") + private ClusterInstance clusterInstance; @ApiModelProperty(value = "Cluster Configuration", notes = "Details about the cluster configuration") private ClusterConfigurationDTO clusterConfiguration; diff --git a/dinky-admin/src/main/java/org/dinky/data/model/JobInstance.java b/dinky-admin/src/main/java/org/dinky/data/model/JobInstance.java index 6478080c70..31b9dd637d 100644 --- a/dinky-admin/src/main/java/org/dinky/data/model/JobInstance.java +++ b/dinky-admin/src/main/java/org/dinky/data/model/JobInstance.java @@ -76,10 +76,10 @@ public class JobInstance implements Serializable { private Integer step; @ApiModelProperty( - value = "Cluster ID", + value = "ClusterInstance ID", dataType = "Integer", example = "1", - notes = "Cluster ID associated with the job instance") + notes = "ClusterInstance ID associated with the job instance") private Integer clusterId; @ApiModelProperty(value = "JID", dataType = "String", notes = "JID of the job instance") diff --git a/dinky-admin/src/main/java/org/dinky/data/model/Task.java b/dinky-admin/src/main/java/org/dinky/data/model/Task.java index f51314cc4b..e6edab1c77 100644 --- a/dinky-admin/src/main/java/org/dinky/data/model/Task.java +++ b/dinky-admin/src/main/java/org/dinky/data/model/Task.java @@ -94,7 +94,7 @@ public class Task extends SuperEntity { private Boolean batchModel; @ApiModelProperty( - value = "Cluster ID", + value = "ClusterInstance ID", dataType = "Integer", example = "2001", notes = "ID of the cluster associated with the task") diff --git a/dinky-admin/src/main/java/org/dinky/data/typehandler/AbstractJsonTypeHandler.java b/dinky-admin/src/main/java/org/dinky/data/typehandler/AbstractJsonTypeHandler.java new file mode 100644 index 0000000000..e20ad3c053 --- /dev/null +++ b/dinky-admin/src/main/java/org/dinky/data/typehandler/AbstractJsonTypeHandler.java @@ -0,0 +1,59 @@ +/* + * + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +package org.dinky.data.typehandler; + +import org.apache.ibatis.type.BaseTypeHandler; +import org.apache.ibatis.type.JdbcType; + +import java.sql.CallableStatement; +import java.sql.PreparedStatement; +import java.sql.ResultSet; +import java.sql.SQLException; + + +public abstract class AbstractJsonTypeHandler extends BaseTypeHandler { + + @Override + public void setNonNullParameter(PreparedStatement ps, int i, T parameter, JdbcType jdbcType) throws SQLException { + ps.setString(i, toJson(parameter)); + } + + @Override + public T getNullableResult(ResultSet rs, String columnName) throws SQLException { + String json = rs.getString(columnName); + return parse(json); + } + + @Override + public T getNullableResult(ResultSet rs, int columnIndex) throws SQLException { + final String json = rs.getString(columnIndex); + return parse(json); + } + + @Override + public T getNullableResult(CallableStatement cs, int columnIndex) throws SQLException { + final String json = cs.getString(columnIndex); + return parse(json); + } + + protected abstract T parse(String json); + + protected abstract String toJson(T obj); +} diff --git a/dinky-admin/src/main/java/org/dinky/data/typehandler/JSONObjectHandler.java b/dinky-admin/src/main/java/org/dinky/data/typehandler/JSONObjectHandler.java new file mode 100644 index 0000000000..6a81b5bf28 --- /dev/null +++ b/dinky-admin/src/main/java/org/dinky/data/typehandler/JSONObjectHandler.java @@ -0,0 +1,72 @@ +/* + * + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +package org.dinky.data.typehandler; + +import org.dinky.data.flink.checkpoint.CheckPointOverView; +import org.dinky.data.flink.config.CheckpointConfigInfo; +import org.dinky.data.flink.config.FlinkJobConfigInfo; +import org.dinky.data.flink.exceptions.FlinkJobExceptionsDetail; +import org.dinky.data.flink.job.FlinkJobDetailInfo; + +import org.apache.ibatis.type.JdbcType; +import org.apache.ibatis.type.MappedJdbcTypes; +import org.apache.ibatis.type.MappedTypes; + +import com.alibaba.fastjson.JSONValidator; + +import cn.hutool.json.JSONUtil; +import lombok.extern.slf4j.Slf4j; + +/** + * @author ZackYoung + * @version 1.0 + * @date 2022/1/12 + */ +@Slf4j +@MappedJdbcTypes(value = JdbcType.VARCHAR, includeNullJdbcType = true) +@MappedTypes({ + FlinkJobDetailInfo.class, + FlinkJobExceptionsDetail.class, + CheckPointOverView.class, + CheckpointConfigInfo.class, + FlinkJobConfigInfo.class, +}) +public class JSONObjectHandler extends AbstractJsonTypeHandler { + + private final Class type; + + public JSONObjectHandler(Class type) { + this.type = type; + } + + @Override + protected T parse(String content) { + if (content == null || !JSONValidator.from(content).validate()) { + log.debug("unknown json:{}", content); + return null; + } + return JSONUtil.toBean(content, type); + } + + @Override + protected String toJson(T object) { + return JSONUtil.toJsonStr(object); + } +} diff --git a/dinky-admin/src/main/java/org/dinky/job/Job2MysqlHandler.java b/dinky-admin/src/main/java/org/dinky/job/Job2MysqlHandler.java index 03e83a5ae4..530bb04d79 100644 --- a/dinky-admin/src/main/java/org/dinky/job/Job2MysqlHandler.java +++ b/dinky-admin/src/main/java/org/dinky/job/Job2MysqlHandler.java @@ -24,7 +24,7 @@ import org.dinky.daemon.task.DaemonFactory; import org.dinky.daemon.task.DaemonTaskConfig; import org.dinky.data.enums.JobStatus; -import org.dinky.data.model.Cluster; +import org.dinky.data.model.ClusterInstance; import org.dinky.data.model.History; import org.dinky.data.model.JobHistory; import org.dinky.data.model.JobInstance; @@ -128,33 +128,33 @@ public boolean success() { history.setJobManagerAddress(job.isUseGateway() ? job.getJobManagerAddress() : null); Integer clusterId = job.getJobConfig().getClusterId(); - Cluster cluster; + ClusterInstance clusterInstance; final Integer clusterConfigurationId = job.getJobConfig().getClusterConfigurationId(); if (job.isUseGateway()) { - cluster = clusterInstanceService.registersCluster(Cluster.autoRegistersCluster( + clusterInstance = clusterInstanceService.registersCluster(ClusterInstance.autoRegistersCluster( job.getJobManagerAddress(), job.getJobId(), job.getJobConfig().getJobName() + "_" + LocalDateTime.now(), job.getType().getLongValue(), clusterConfigurationId, taskId)); - if (Asserts.isNotNull(cluster)) { - clusterId = cluster.getId(); + if (Asserts.isNotNull(clusterInstance)) { + clusterId = clusterInstance.getId(); } } else if (GatewayType.LOCAL.equalsValue(job.getJobConfig().getType()) && Asserts.isNotNullString(job.getJobManagerAddress())) { - cluster = clusterInstanceService.registersCluster(Cluster.autoRegistersCluster( + clusterInstance = clusterInstanceService.registersCluster(ClusterInstance.autoRegistersCluster( job.getJobManagerAddress(), job.getJobId(), job.getJobConfig().getJobName() + "_" + LocalDateTime.now(), job.getType().getLongValue(), null, taskId)); - if (Asserts.isNotNull(cluster)) { - clusterId = cluster.getId(); + if (Asserts.isNotNull(clusterInstance)) { + clusterId = clusterInstance.getId(); } } else { - cluster = clusterInstanceService.getById(clusterId); + clusterInstance = clusterInstanceService.getById(clusterId); } history.setClusterId(clusterId); @@ -186,7 +186,7 @@ public boolean success() { JobHistory.JobHistoryBuilder jobHistoryBuilder = JobHistory.builder(); JobHistory jobHistory = jobHistoryBuilder .id(jobInstance.getId()) - .clusterJson(JsonUtils.toJsonString(cluster)) + .clusterJson(JsonUtils.toJsonString(clusterInstance)) .jarJson( Asserts.isNotNull(job.getJobConfig().getJarId()) ? JsonUtils.toJsonString( diff --git a/dinky-admin/src/main/java/org/dinky/job/handler/JobAlertHandler.java b/dinky-admin/src/main/java/org/dinky/job/handler/JobAlertHandler.java index dc645a7ed7..f29c1d5229 100644 --- a/dinky-admin/src/main/java/org/dinky/job/handler/JobAlertHandler.java +++ b/dinky-admin/src/main/java/org/dinky/job/handler/JobAlertHandler.java @@ -22,8 +22,8 @@ import org.dinky.alert.Alert; import org.dinky.alert.AlertConfig; import org.dinky.alert.AlertResult; -import org.dinky.alert.Rules.CheckpointsRule; -import org.dinky.alert.Rules.ExceptionRule; +import org.dinky.alert.rules.CheckpointsRule; +import org.dinky.alert.rules.ExceptionRule; import org.dinky.assertion.Asserts; import org.dinky.context.FreeMarkerHolder; import org.dinky.context.SpringContextUtils; @@ -126,9 +126,12 @@ public JobAlertHandler() { public void check(JobInfoDetail jobInfoDetail) { ruleFacts.put(AlertRuleOptions.JOB_ALERT_RULE_TIME, TimeUtil.nowStr()); ruleFacts.put(AlertRuleOptions.JOB_ALERT_RULE_JOB_DETAIL, jobInfoDetail); - ruleFacts.put( - AlertRuleOptions.JOB_ALERT_RULE_JOB_NAME, - jobInfoDetail.getJobDataDto().getJob()); + if (Asserts.isNotNull(jobInfoDetail.getJobDataDto().getJob())) { + ruleFacts.put( + AlertRuleOptions.JOB_ALERT_RULE_JOB_NAME, + jobInfoDetail.getJobDataDto().getJob()); + } + ruleFacts.put( AlertRuleOptions.JOB_ALERT_RULE_KEY, jobInfoDetail.getInstance().getId()); ruleFacts.put(AlertRuleOptions.JOB_ALERT_RULE_JOB_INSTANCE, jobInfoDetail.getInstance()); @@ -138,10 +141,13 @@ public void check(JobInfoDetail jobInfoDetail) { ruleFacts.put( AlertRuleOptions.JOB_ALERT_RULE_END_TIME, TimeUtil.convertTimeToString(jobInfoDetail.getInstance().getFinishTime())); - ruleFacts.put( - AlertRuleOptions.JOB_ALERT_RULE_CHECK_POINTS, - jobInfoDetail.getJobDataDto().getCheckpoints()); - ruleFacts.put(AlertRuleOptions.JOB_ALERT_RULE_CLUSTER, jobInfoDetail.getCluster()); + if (Asserts.isNotNull(jobInfoDetail.getJobDataDto().getCheckpoints())) { + ruleFacts.put( + AlertRuleOptions.JOB_ALERT_RULE_CHECK_POINTS, + jobInfoDetail.getJobDataDto().getCheckpoints()); + } + + ruleFacts.put(AlertRuleOptions.JOB_ALERT_RULE_CLUSTER, jobInfoDetail.getClusterInstance()); ruleFacts.put( AlertRuleOptions.JOB_ALERT_RULE_EXCEPTIONS, jobInfoDetail.getJobDataDto().getExceptions()); diff --git a/dinky-admin/src/main/java/org/dinky/job/handler/JobMetricsHandler.java b/dinky-admin/src/main/java/org/dinky/job/handler/JobMetricsHandler.java index fd8afdf0cf..6de3361297 100644 --- a/dinky-admin/src/main/java/org/dinky/job/handler/JobMetricsHandler.java +++ b/dinky-admin/src/main/java/org/dinky/job/handler/JobMetricsHandler.java @@ -53,7 +53,8 @@ public class JobMetricsHandler { */ public static void writeFlinkMetrics(JobInfoDetail jobInfoDetail) { Map> customMetricsList = jobInfoDetail.getCustomMetricsMap(); - String[] jobManagerUrls = jobInfoDetail.getCluster().getJobManagerHost().split(","); + String[] jobManagerUrls = + jobInfoDetail.getClusterInstance().getJobManagerHost().split(","); String jobId = jobInfoDetail.getInstance().getJid(); // Create a CompletableFuture array for concurrent acquisition of indicator data diff --git a/dinky-admin/src/main/java/org/dinky/job/handler/JobRefreshHandler.java b/dinky-admin/src/main/java/org/dinky/job/handler/JobRefreshHandler.java index ea29e096c8..f2fa56d637 100644 --- a/dinky-admin/src/main/java/org/dinky/job/handler/JobRefreshHandler.java +++ b/dinky-admin/src/main/java/org/dinky/job/handler/JobRefreshHandler.java @@ -27,6 +27,8 @@ import org.dinky.data.dto.JobDataDto; import org.dinky.data.enums.JobStatus; import org.dinky.data.flink.backpressure.FlinkJobNodeBackPressure; +import org.dinky.data.flink.checkpoint.CheckPointOverView; +import org.dinky.data.flink.config.CheckpointConfigInfo; import org.dinky.data.flink.config.FlinkJobConfigInfo; import org.dinky.data.flink.exceptions.FlinkJobExceptionsDetail; import org.dinky.data.flink.job.FlinkJobDetailInfo; @@ -93,7 +95,7 @@ public static boolean refreshJob(JobInfoDetail jobInfoDetail, boolean needSave) JobDataDto jobDataDto = getJobHistory( jobInstance.getId(), - jobInfoDetail.getCluster().getJobManagerHost(), + jobInfoDetail.getClusterInstance().getJobManagerHost(), jobInfoDetail.getInstance().getJid()); if (Asserts.isNull(jobDataDto.getJob()) || jobDataDto.isError()) { @@ -190,10 +192,10 @@ public static JobDataDto getJobHistory(Integer id, String jobManagerHost, String }); return builder.id(id) - .checkpoints(api.getCheckPoints(jobId)) - .checkpointsConfig(api.getCheckPointsConfig(jobId)) - .exceptions( - JsonUtils.toJavaBean(api.getException(jobId).toString(), FlinkJobExceptionsDetail.class)) + .checkpoints(JSONUtil.toBean(api.getCheckPoints(jobId).toString(), CheckPointOverView.class)) + .checkpointsConfig( + JSONUtil.toBean(api.getCheckPointsConfig(jobId).toString(), CheckpointConfigInfo.class)) + .exceptions(JSONUtil.toBean(api.getException(jobId).toString(), FlinkJobExceptionsDetail.class)) .job(flinkJobDetailInfo) .config(jobConfigInfo) .build(); @@ -215,7 +217,7 @@ private static JobStatus getJobStatus(JobInfoDetail jobInfoDetail) { if (!Asserts.isNull(clusterCfg)) { try { - String appId = jobInfoDetail.getCluster().getName(); + String appId = jobInfoDetail.getClusterInstance().getName(); GatewayConfig gatewayConfig = GatewayConfig.build(clusterCfg.getConfig()); gatewayConfig.getClusterConfig().setAppId(appId); @@ -242,7 +244,7 @@ private static JobStatus getJobStatus(JobInfoDetail jobInfoDetail) { private static void handleJobDone(JobInfoDetail jobInfoDetail) { JobInstance jobInstance = jobInfoDetail.getInstance(); JobDataDto jobDataDto = jobInfoDetail.getJobDataDto(); - String clusterType = jobInfoDetail.getCluster().getType(); + String clusterType = jobInfoDetail.getClusterInstance().getType(); if (GatewayType.isDeployCluster(clusterType)) { JobConfig jobConfig = new JobConfig(); diff --git a/dinky-admin/src/main/java/org/dinky/mapper/ClusterInstanceMapper.java b/dinky-admin/src/main/java/org/dinky/mapper/ClusterInstanceMapper.java index 9e3d590b9c..e6457a0f5c 100644 --- a/dinky-admin/src/main/java/org/dinky/mapper/ClusterInstanceMapper.java +++ b/dinky-admin/src/main/java/org/dinky/mapper/ClusterInstanceMapper.java @@ -19,7 +19,7 @@ package org.dinky.mapper; -import org.dinky.data.model.Cluster; +import org.dinky.data.model.ClusterInstance; import org.dinky.mybatis.mapper.SuperMapper; import org.apache.ibatis.annotations.Mapper; @@ -28,6 +28,6 @@ /** ClusterInstanceMapper */ @Mapper -public interface ClusterInstanceMapper extends SuperMapper { - List listSessionEnable(); +public interface ClusterInstanceMapper extends SuperMapper { + List listSessionEnable(); } diff --git a/dinky-admin/src/main/java/org/dinky/service/ClusterInstanceService.java b/dinky-admin/src/main/java/org/dinky/service/ClusterInstanceService.java index 2b59b6ddfe..1d9ce0416c 100644 --- a/dinky-admin/src/main/java/org/dinky/service/ClusterInstanceService.java +++ b/dinky-admin/src/main/java/org/dinky/service/ClusterInstanceService.java @@ -20,13 +20,13 @@ package org.dinky.service; import org.dinky.cluster.FlinkClusterInfo; -import org.dinky.data.model.Cluster; +import org.dinky.data.model.ClusterInstance; import org.dinky.mybatis.service.ISuperService; import java.util.List; /** ClusterInstanceService */ -public interface ClusterInstanceService extends ISuperService { +public interface ClusterInstanceService extends ISuperService { /** * check cluster heartbeat status @@ -40,10 +40,10 @@ public interface ClusterInstanceService extends ISuperService { /** * get job manager address * - * @param cluster {@link Cluster} cluster instance + * @param clusterInstance {@link ClusterInstance} clusterInstance instance * @return {@link String} eg: host1:8081 */ - String getJobManagerAddress(Cluster cluster); + String getJobManagerAddress(ClusterInstance clusterInstance); /** * build environment address @@ -72,31 +72,31 @@ public interface ClusterInstanceService extends ISuperService { /** * list enabled cluster instances * - * @return {@link List} + * @return {@link List< ClusterInstance >} */ - List listEnabledAllClusterInstance(); + List listEnabledAllClusterInstance(); /** * list session enable cluster instances * - * @return {@link List} + * @return {@link List< ClusterInstance >} */ - List listSessionEnable(); + List listSessionEnable(); /** * list auto enable cluster instances * - * @return {@link List} + * @return {@link List< ClusterInstance >} */ - List listAutoEnable(); + List listAutoEnable(); /** - * register cluster instance + * register clusterInstance instance * - * @param cluster {@link Cluster} cluster instance - * @return {@link Cluster} + * @param clusterInstance {@link ClusterInstance} clusterInstance instance + * @return {@link ClusterInstance} */ - Cluster registersCluster(Cluster cluster); + ClusterInstance registersCluster(ClusterInstance clusterInstance); /** * delete cluster instance by id @@ -132,7 +132,7 @@ public interface ClusterInstanceService extends ISuperService { * deploy session cluster * * @param id {@link Integer} cluster id - * @return {@link Cluster} + * @return {@link ClusterInstance} */ - Cluster deploySessionCluster(Integer id); + ClusterInstance deploySessionCluster(Integer id); } diff --git a/dinky-admin/src/main/java/org/dinky/service/impl/ClusterInstanceServiceImpl.java b/dinky-admin/src/main/java/org/dinky/service/impl/ClusterInstanceServiceImpl.java index 1ae29250de..2a990e1e0b 100644 --- a/dinky-admin/src/main/java/org/dinky/service/impl/ClusterInstanceServiceImpl.java +++ b/dinky-admin/src/main/java/org/dinky/service/impl/ClusterInstanceServiceImpl.java @@ -24,8 +24,8 @@ import org.dinky.cluster.FlinkCluster; import org.dinky.cluster.FlinkClusterInfo; import org.dinky.constant.FlinkConstant; -import org.dinky.data.model.Cluster; import org.dinky.data.model.ClusterConfiguration; +import org.dinky.data.model.ClusterInstance; import org.dinky.gateway.config.GatewayConfig; import org.dinky.gateway.exception.GatewayException; import org.dinky.gateway.model.FlinkClusterConfig; @@ -42,9 +42,11 @@ import java.util.List; import org.springframework.stereotype.Service; +import org.springframework.transaction.annotation.Transactional; import com.baomidou.mybatisplus.core.conditions.query.QueryWrapper; +import cn.hutool.core.util.StrUtil; import lombok.RequiredArgsConstructor; /** @@ -54,7 +56,7 @@ */ @Service @RequiredArgsConstructor -public class ClusterInstanceServiceImpl extends SuperServiceImpl +public class ClusterInstanceServiceImpl extends SuperServiceImpl implements ClusterInstanceService { private final ClusterConfigurationService clusterConfigurationService; @@ -65,17 +67,18 @@ public FlinkClusterInfo checkHeartBeat(String hosts, String host) { } @Override - public String getJobManagerAddress(Cluster cluster) { - Assert.check(cluster); - FlinkClusterInfo info = FlinkCluster.testFlinkJobManagerIP(cluster.getHosts(), cluster.getJobManagerHost()); + public String getJobManagerAddress(ClusterInstance clusterInstance) { + Assert.check(clusterInstance); + FlinkClusterInfo info = + FlinkCluster.testFlinkJobManagerIP(clusterInstance.getHosts(), clusterInstance.getJobManagerHost()); String host = null; if (info.isEffective()) { host = info.getJobManagerAddress(); } Assert.checkHost(host); - if (!host.equals(cluster.getJobManagerHost())) { - cluster.setJobManagerHost(host); - updateById(cluster); + if (!host.equals(clusterInstance.getJobManagerHost())) { + clusterInstance.setJobManagerHost(host); + updateById(clusterInstance); } return host; } @@ -108,25 +111,29 @@ public String buildLocalEnvironmentAddress() { } @Override - public List listEnabledAllClusterInstance() { - return this.list(new QueryWrapper().eq("enabled", 1)); + public List listEnabledAllClusterInstance() { + return this.list(new QueryWrapper().eq("enabled", 1)); } @Override - public List listSessionEnable() { + public List listSessionEnable() { return baseMapper.listSessionEnable(); } @Override - public List listAutoEnable() { - return list(new QueryWrapper().eq("enabled", 1).eq("auto_registers", 1)); + public List listAutoEnable() { + return list(new QueryWrapper().eq("enabled", 1).eq("auto_registers", 1)); } @Override - public Cluster registersCluster(Cluster cluster) { - checkHealth(cluster); - saveOrUpdate(cluster); - return cluster; + @Transactional(rollbackFor = Exception.class) + public ClusterInstance registersCluster(ClusterInstance clusterInstance) { + checkHealth(clusterInstance); + if (StrUtil.isEmpty(clusterInstance.getAlias())) { + clusterInstance.setAlias(clusterInstance.getName()); + } + saveOrUpdate(clusterInstance); + return clusterInstance; } /** @@ -140,17 +147,17 @@ public Boolean deleteClusterInstanceById(Integer id) { @Override public Boolean modifyClusterInstanceStatus(Integer id) { - Cluster clusterInfo = getById(id); - clusterInfo.setEnabled(!clusterInfo.getEnabled()); - checkHealth(clusterInfo); - return updateById(clusterInfo); + ClusterInstance clusterInstanceInfo = getById(id); + clusterInstanceInfo.setEnabled(!clusterInstanceInfo.getEnabled()); + checkHealth(clusterInstanceInfo); + return updateById(clusterInstanceInfo); } @Override public Integer recycleCluster() { - List clusters = listAutoEnable(); + List clusterInstances = listAutoEnable(); int count = 0; - for (Cluster item : clusters) { + for (ClusterInstance item : clusterInstances) { if ((!checkHealth(item)) && removeById(item)) { count++; } @@ -160,20 +167,20 @@ public Integer recycleCluster() { @Override public void killCluster(Integer id) { - Cluster cluster = getById(id); - if (Asserts.isNull(cluster)) { - throw new GatewayException("The cluster does not exist."); - } else if (!checkHealth(cluster)) { - throw new GatewayException("The cluster has been killed."); + ClusterInstance clusterInstance = getById(id); + if (Asserts.isNull(clusterInstance)) { + throw new GatewayException("The clusterInstance does not exist."); + } else if (!checkHealth(clusterInstance)) { + throw new GatewayException("The clusterInstance has been killed."); } - Integer clusterConfigurationId = cluster.getClusterConfigurationId(); + Integer clusterConfigurationId = clusterInstance.getClusterConfigurationId(); FlinkClusterConfig flinkClusterConfig = clusterConfigurationService.getFlinkClusterCfg(clusterConfigurationId); GatewayConfig gatewayConfig = GatewayConfig.build(flinkClusterConfig); - JobManager.killCluster(gatewayConfig, cluster.getName()); + JobManager.killCluster(gatewayConfig, clusterInstance.getName()); } @Override - public Cluster deploySessionCluster(Integer id) { + public ClusterInstance deploySessionCluster(Integer id) { ClusterConfiguration clusterCfg = clusterConfigurationService.getClusterConfigById(id); if (Asserts.isNull(clusterCfg)) { throw new GatewayException("The cluster configuration does not exist."); @@ -181,7 +188,7 @@ public Cluster deploySessionCluster(Integer id) { GatewayConfig gatewayConfig = GatewayConfig.build(FlinkClusterConfig.create(clusterCfg.getType(), clusterCfg.getConfigJson())); GatewayResult gatewayResult = JobManager.deploySessionCluster(gatewayConfig); - return registersCluster(Cluster.autoRegistersCluster( + return registersCluster(ClusterInstance.autoRegistersCluster( gatewayResult.getWebURL().replace("http://", ""), gatewayResult.getId(), clusterCfg.getName() + "_" + LocalDateTime.now(), @@ -190,16 +197,16 @@ public Cluster deploySessionCluster(Integer id) { null)); } - private boolean checkHealth(Cluster cluster) { - FlinkClusterInfo info = checkHeartBeat(cluster.getHosts(), cluster.getJobManagerHost()); + private boolean checkHealth(ClusterInstance clusterInstance) { + FlinkClusterInfo info = checkHeartBeat(clusterInstance.getHosts(), clusterInstance.getJobManagerHost()); if (!info.isEffective()) { - cluster.setJobManagerHost(""); - cluster.setStatus(0); + clusterInstance.setJobManagerHost(""); + clusterInstance.setStatus(0); return false; } else { - cluster.setJobManagerHost(info.getJobManagerAddress()); - cluster.setStatus(1); - cluster.setVersion(info.getVersion()); + clusterInstance.setJobManagerHost(info.getJobManagerAddress()); + clusterInstance.setStatus(1); + clusterInstance.setVersion(info.getVersion()); return true; } } diff --git a/dinky-admin/src/main/java/org/dinky/service/impl/JobInstanceServiceImpl.java b/dinky-admin/src/main/java/org/dinky/service/impl/JobInstanceServiceImpl.java index 641744097b..42367bb1e9 100644 --- a/dinky-admin/src/main/java/org/dinky/service/impl/JobInstanceServiceImpl.java +++ b/dinky-admin/src/main/java/org/dinky/service/impl/JobInstanceServiceImpl.java @@ -27,8 +27,8 @@ import org.dinky.data.dto.JobDataDto; import org.dinky.data.enums.JobStatus; import org.dinky.data.enums.Status; -import org.dinky.data.model.Cluster; import org.dinky.data.model.ClusterConfiguration; +import org.dinky.data.model.ClusterInstance; import org.dinky.data.model.History; import org.dinky.data.model.JobInfoDetail; import org.dinky.data.model.JobInstance; @@ -156,8 +156,8 @@ public JobInfoDetail getJobInfoDetailInfo(JobInstance jobInstance) { Asserts.checkNull(jobInstance, Status.JOB_INSTANCE_NOT_EXIST.getMessage()); jobInfoDetail.setInstance(jobInstance); - Cluster cluster = clusterInstanceService.getById(jobInstance.getClusterId()); - jobInfoDetail.setCluster(cluster); + ClusterInstance clusterInstance = clusterInstanceService.getById(jobInstance.getClusterId()); + jobInfoDetail.setClusterInstance(clusterInstance); History history = historyService.getById(jobInstance.getHistoryId()); history.setConfig(JsonUtils.parseObject(history.getConfigJson())); diff --git a/dinky-admin/src/main/java/org/dinky/service/impl/StudioServiceImpl.java b/dinky-admin/src/main/java/org/dinky/service/impl/StudioServiceImpl.java index 332edf04d1..46db683cc8 100644 --- a/dinky-admin/src/main/java/org/dinky/service/impl/StudioServiceImpl.java +++ b/dinky-admin/src/main/java/org/dinky/service/impl/StudioServiceImpl.java @@ -26,7 +26,7 @@ import org.dinky.data.dto.StudioDDLDTO; import org.dinky.data.dto.StudioMetaStoreDTO; import org.dinky.data.model.Catalog; -import org.dinky.data.model.Cluster; +import org.dinky.data.model.ClusterInstance; import org.dinky.data.model.DataBase; import org.dinky.data.model.FlinkColumn; import org.dinky.data.model.Schema; @@ -131,10 +131,10 @@ public LineageResult getLineage(StudioCADTO studioCADTO) { @Override public List listFlinkJobs(Integer clusterId) { - Cluster cluster = clusterInstanceService.getById(clusterId); - Asserts.checkNotNull(cluster, "该集群不存在"); + ClusterInstance clusterInstance = clusterInstanceService.getById(clusterId); + Asserts.checkNotNull(clusterInstance, "该集群不存在"); try { - return FlinkAPI.build(cluster.getJobManagerHost()).listJobs(); + return FlinkAPI.build(clusterInstance.getJobManagerHost()).listJobs(); } catch (Exception e) { log.info("查询作业时集群不存在"); } diff --git a/dinky-admin/src/main/java/org/dinky/service/impl/TaskServiceImpl.java b/dinky-admin/src/main/java/org/dinky/service/impl/TaskServiceImpl.java index 4b7bb7ef5d..b75e70f261 100644 --- a/dinky-admin/src/main/java/org/dinky/service/impl/TaskServiceImpl.java +++ b/dinky-admin/src/main/java/org/dinky/service/impl/TaskServiceImpl.java @@ -36,8 +36,8 @@ import org.dinky.data.exception.TaskNotDoneException; import org.dinky.data.model.AlertGroup; import org.dinky.data.model.Catalogue; -import org.dinky.data.model.Cluster; import org.dinky.data.model.ClusterConfiguration; +import org.dinky.data.model.ClusterInstance; import org.dinky.data.model.DataBase; import org.dinky.data.model.Jar; import org.dinky.data.model.JobInfoDetail; @@ -308,8 +308,8 @@ public JobResult restartTask(Integer id, String savePointPath) throws ExcuteExce public boolean cancelTaskJob(TaskDTO task) { JobInstance jobInstance = jobInstanceService.getById(task.getJobInstanceId()); Assert.notNull(jobInstance, Status.JOB_INSTANCE_NOT_EXIST.getMessage()); - Cluster cluster = clusterInstanceService.getById(jobInstance.getClusterId()); - Assert.notNull(cluster, Status.CLUSTER_NOT_EXIST.getMessage()); + ClusterInstance clusterInstance = clusterInstanceService.getById(jobInstance.getClusterId()); + Assert.notNull(clusterInstance, Status.CLUSTER_NOT_EXIST.getMessage()); JobManager jobManager = JobManager.build(buildJobConfig(task)); boolean cancelled = jobManager.cancel(jobInstance.getJid()); @@ -407,9 +407,9 @@ public TaskDTO getTaskInfoById(Integer id) { BeanUtil.copyProperties(mTask, taskDTO); if (taskDTO.getClusterId() != null) { - Cluster cluster = clusterInstanceService.getById(taskDTO.getClusterId()); - if (cluster != null) { - taskDTO.setClusterName(cluster.getAlias()); + ClusterInstance clusterInstance = clusterInstanceService.getById(taskDTO.getClusterId()); + if (clusterInstance != null) { + taskDTO.setClusterName(clusterInstance.getAlias()); } } if (taskDTO.getJobInstanceId() != null) { @@ -588,9 +588,9 @@ public Integer queryAllSizeByName(String name) { public String exportJsonByTaskId(Integer taskId) { TaskDTO task = getTaskInfoById(taskId); if (Asserts.isNotNull(task.getClusterId())) { - Cluster cluster = clusterInstanceService.getById(task.getClusterId()); - if (Asserts.isNotNull(cluster)) { - task.setClusterName(cluster.getName()); + ClusterInstance clusterInstance = clusterInstanceService.getById(task.getClusterId()); + if (Asserts.isNotNull(clusterInstance)) { + task.setClusterName(clusterInstance.getName()); } } @@ -675,10 +675,10 @@ public Result buildTaskByJsonNode(JsonNode jsonNode, ObjectMapper mapper) for (JsonNode json : jsonNodes) { TaskDTO task = mapper.treeToValue(json, TaskDTO.class); if (Asserts.isNotNull(task.getClusterName())) { - Cluster cluster = - clusterInstanceService.getOne(new QueryWrapper().eq("name", task.getClusterName())); - if (Asserts.isNotNull(cluster)) { - task.setClusterId(cluster.getId()); + ClusterInstance clusterInstance = clusterInstanceService.getOne( + new QueryWrapper().eq("name", task.getClusterName())); + if (Asserts.isNotNull(clusterInstance)) { + task.setClusterId(clusterInstance.getId()); } } diff --git a/dinky-admin/src/main/resources/db/db-h2.sql b/dinky-admin/src/main/resources/db/db-h2.sql index 04ad8e3512..3be2e2143b 100644 --- a/dinky-admin/src/main/resources/db/db-h2.sql +++ b/dinky-admin/src/main/resources/db/db-h2.sql @@ -1948,12 +1948,12 @@ create table if not exists dinky_alert_rules -- ---------------------------- -- Records of dinky_alert_rule -- ---------------------------- --- INSERT INTO dinky_alert_rules VALUES (3, 'alert.rule.jobFail', '[{"ruleKey":"jobInstance.status","ruleOperator":"EQ","ruleValue":"\'FAILED\'","rulePriority":"1"}]', 1, 'SYSTEM', ' or ', '', 1, '1970-01-01 00:00:00', '2023-09-04 23:03:02'); --- INSERT INTO dinky_alert_rules VALUES (4, 'alert.rule.getJobInfoFail', '[{"ruleKey":"jobInstance.status","ruleOperator":"EQ","ruleValue":"\'UNKNOWN\'","rulePriority":"1"}]', 1, 'SYSTEM', ' or ', '', 1, '1970-01-01 00:00:00', '2023-09-05 18:03:43'); --- INSERT INTO dinky_alert_rules VALUES (5, 'alert.rule.jobRestart', '[{"ruleKey":"jobInstance.status","ruleOperator":"EQ","ruleValue":"\'RESTARTING\'","rulePriority":"1"}]', 1, 'SYSTEM', ' or ', '', 1, '1970-01-01 00:00:00', '2023-09-06 21:35:12'); -INSERT INTO dinky_alert_rules VALUES (6, 'alert.rule.checkpointFail', '[{"ruleKey":"checkPoints.checkFailed(#key,#checkPoints)","ruleOperator":"EQ","ruleValue":"true"}]', 1, 'SYSTEM', ' or ', '', 1, '1970-01-01 00:00:00', '2023-09-06 21:49:03'); -INSERT INTO dinky_alert_rules VALUES (7, 'alert.rule.jobRunException', '[{"ruleKey":"exceptionRule.isException(#key,#exceptions)","ruleOperator":"EQ","ruleValue":"true"}]', 1, 'SYSTEM', ' or ', '', 1, '1970-01-01 00:00:00', '2023-09-06 21:50:12'); -INSERT INTO dinky_alert_rules VALUES (8, 'alert.rule.checkpointTimeout', '[{"ruleKey":"checkPoints.checkpointTime(#key,#checkPoints)","ruleOperator":"GE","ruleValue":"1000"}]', 1, 'CUSTOM', ' or ', '', 1, '1970-01-01 00:00:00', '2023-09-06 22:23:35'); +INSERT INTO dinky_alert_rules (id, name, rule, template_id, rule_type, trigger_conditions, description, enabled, create_time, update_time) VALUES (3, 'alert.rule.jobFail', '[{"ruleKey":"jobInstance.status","ruleOperator":"EQ","ruleValue":"\'FAILED\'","rulePriority":"1"}]', 1, 'SYSTEM', ' or ', '', 1, '1970-01-01 00:00:00', '2023-09-04 23:03:02'); +INSERT INTO dinky_alert_rules (id, name, rule, template_id, rule_type, trigger_conditions, description, enabled, create_time, update_time) VALUES (4, 'alert.rule.getJobInfoFail', '[{"ruleKey":"jobInstance.status","ruleOperator":"EQ","ruleValue":"\'UNKNOWN\'","rulePriority":"1"}]', 1, 'SYSTEM', ' or ', '', 1, '1970-01-01 00:00:00', '2023-09-05 18:03:43'); +INSERT INTO dinky_alert_rules (id, name, rule, template_id, rule_type, trigger_conditions, description, enabled, create_time, update_time) VALUES (5, 'alert.rule.jobRestart', '[{"ruleKey":"jobInstance.status","ruleOperator":"EQ","ruleValue":"\'RESTARTING\'","rulePriority":"1"}]', 1, 'SYSTEM', ' or ', '', 1, '1970-01-01 00:00:00', '2023-09-06 21:35:12'); +INSERT INTO dinky_alert_rules (id, name, rule, template_id, rule_type, trigger_conditions, description, enabled, create_time, update_time) VALUES (6, 'alert.rule.checkpointFail', '[{"ruleKey":"checkpointRule.checkFailed(#key,#checkPoints)","ruleOperator":"EQ","ruleValue":"true"}]', 1, 'SYSTEM', ' or ', '', 1, '1970-01-01 00:00:00', '2023-09-06 21:49:03'); +INSERT INTO dinky_alert_rules (id, name, rule, template_id, rule_type, trigger_conditions, description, enabled, create_time, update_time) VALUES (7, 'alert.rule.jobRunException', '[{"ruleKey":"exceptionRule.isException(#key,#exceptions)","ruleOperator":"EQ","ruleValue":"true"}]', 1, 'SYSTEM', ' or ', '', 1, '1970-01-01 00:00:00', '2023-09-06 21:50:12'); +INSERT INTO dinky_alert_rules (id, name, rule, template_id, rule_type, trigger_conditions, description, enabled, create_time, update_time) VALUES (8, 'alert.rule.checkpointTimeout', '[{"ruleKey":"checkpointRule.checkpointTime(#key,#checkPoints)","ruleOperator":"GE","ruleValue":"1000"}]', 1, 'CUSTOM', ' or ', '', 1, '1970-01-01 00:00:00', '2023-09-06 22:23:35'); INSERT INTO dinky_alert_template VALUES (1, 'Default', ' - **Job Name :** ${task.name} @@ -1961,6 +1961,6 @@ INSERT INTO dinky_alert_template VALUES (1, 'Default', ' - **Alert Time :** ${time} - **Start Time :** ${startTime} - **End Time :** ${endTime} -- **${exceptions.get("root-exception").toString()?substring(0,20)}** +- **${exceptions.rootException.substring(0,20)}** [Go toTask Web](http://${taskUrl}) ', 1, null, null); diff --git a/dinky-admin/src/main/resources/mapper/ClusterInstanceMapper.xml b/dinky-admin/src/main/resources/mapper/ClusterInstanceMapper.xml index bc17ffe366..6a409b20b3 100644 --- a/dinky-admin/src/main/resources/mapper/ClusterInstanceMapper.xml +++ b/dinky-admin/src/main/resources/mapper/ClusterInstanceMapper.xml @@ -3,7 +3,7 @@ - + @@ -27,7 +27,7 @@ - select a.* from @@ -58,7 +58,7 @@ - select a.* from diff --git a/dinky-alert/dinky-alert-base/src/main/java/org/dinky/alert/Rules/CheckpointsRule.java b/dinky-alert/dinky-alert-base/src/main/java/org/dinky/alert/Rules/CheckpointsRule.java deleted file mode 100644 index 61229efb05..0000000000 --- a/dinky-alert/dinky-alert-base/src/main/java/org/dinky/alert/Rules/CheckpointsRule.java +++ /dev/null @@ -1,92 +0,0 @@ -/* - * - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -package org.dinky.alert.Rules; - -import java.util.concurrent.TimeUnit; - -import com.fasterxml.jackson.databind.JsonNode; -import com.fasterxml.jackson.databind.node.ObjectNode; -import com.google.common.cache.CacheBuilder; -import com.google.common.cache.CacheLoader; -import com.google.common.cache.LoadingCache; - -public class CheckpointsRule { - - private final LoadingCache checkpointsCache; - - /** - * Constructor for initializing the CheckpointsRule object. - */ - public CheckpointsRule() { - checkpointsCache = CacheBuilder.newBuilder() - .expireAfterAccess(60, TimeUnit.SECONDS) - .build(CacheLoader.from(key -> null)); - } - - /** - * Checks if a checkpoint has expired. - * @param latest The latest checkpoint node. - * @param key The key used to identify the checkpoint. - * @param ckKey The checkpoint key to check for expiration. - * @return True if the checkpoint has expired, false otherwise. - */ - private boolean isExpire(JsonNode latest, String key, String ckKey) { - JsonNode his = checkpointsCache.getIfPresent(key); - if (latest.get(ckKey) == null || !latest.get(ckKey).has("trigger_timestamp")) { - return true; - } - long latestTime = latest.get(ckKey).get("trigger_timestamp").asLong(-1); - checkpointsCache.put(key, latest); - if (his != null) { - long hisTime = his.get(ckKey).get("trigger_timestamp").asLong(-1); - return hisTime == latestTime || System.currentTimeMillis() - latestTime > 60000; - } - - return false; - } - - /** - * Retrieves the checkpoint time for a specific key. - * @param key The key used to identify the checkpoint. - * @param checkpoints The checkpoints object containing relevant data. - * @return The checkpoint time, or null if the checkpoint has expired. - */ - public Long checkpointTime(String key, ObjectNode checkpoints) { - JsonNode latest = checkpoints.get("latest"); - if (isExpire(latest, key, "completed")) { - return null; - } - return latest.get("completed").get("end_to_end_duration").asLong(-1); - } - - /** - * Checks if a specific checkpoint has failed. - * @param key The key used to identify the checkpoint. - * @param checkpoints The checkpoints object containing relevant data. - * @return True if the checkpoint has failed, null if it has expired. - */ - public Boolean checkFailed(String key, ObjectNode checkpoints) { - JsonNode latest = checkpoints.get("latest"); - if (isExpire(latest, key, "failed")) { - return null; - } - return true; - } -} diff --git a/dinky-alert/dinky-alert-base/src/main/java/org/dinky/alert/rules/CheckpointsRule.java b/dinky-alert/dinky-alert-base/src/main/java/org/dinky/alert/rules/CheckpointsRule.java new file mode 100644 index 0000000000..d7afadf387 --- /dev/null +++ b/dinky-alert/dinky-alert-base/src/main/java/org/dinky/alert/rules/CheckpointsRule.java @@ -0,0 +1,137 @@ +/* + * + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +package org.dinky.alert.rules; + +import org.dinky.data.flink.checkpoint.CheckPointOverView; + +import java.util.Objects; +import java.util.concurrent.TimeUnit; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import com.google.common.cache.CacheBuilder; +import com.google.common.cache.CacheLoader; +import com.google.common.cache.LoadingCache; + +import lombok.extern.slf4j.Slf4j; + +@Slf4j +public class CheckpointsRule { + + private static final Logger logger = LoggerFactory.getLogger(CheckpointsRule.class); + + private final LoadingCache checkpointsCache; + + /** + * Constructor for initializing the CheckpointsRule object. + */ + public CheckpointsRule() { + checkpointsCache = CacheBuilder.newBuilder() + .expireAfterAccess(60, TimeUnit.SECONDS) + .build(CacheLoader.from(key -> null)); + } + + /** + * Checks if a checkpoint has expired. + * @param latest The latest checkpoint node. + * @param key The key used to identify the checkpoint. + * @param ckKey The checkpoint key to check for expiration. + * @return True if the checkpoint has expired, false otherwise. + */ + private boolean isExpire(CheckPointOverView latest, String key, String ckKey) { + logger.debug("checkpointTime key: {} ,checkpoints: {}, key: {}", key, latest, ckKey); + + CheckPointOverView his = (CheckPointOverView) checkpointsCache.getIfPresent(key); + + switch (ckKey) { + case "completed": + if (his != null) { + CheckPointOverView.CompletedCheckpointStatistics completedCheckpointStatistics = + his.getLatestCheckpoints().getCompletedCheckpointStatistics(); + if (completedCheckpointStatistics != null) { + return Objects.equals(completedCheckpointStatistics.getStatus(), "completed"); + } + } + return false; + case "failed": + CheckPointOverView.FailedCheckpointStatistics failedCheckpointStatistics = null; + if (his != null) { + failedCheckpointStatistics = his.getLatestCheckpoints().getFailedCheckpointStatistics(); + } + long failureTimestamp = 0; + CheckPointOverView.LatestCheckpoints latestLatestCheckpoints = latest.getLatestCheckpoints(); + if (latestLatestCheckpoints != null + && latestLatestCheckpoints.getFailedCheckpointStatistics() != null) { + failureTimestamp = latestLatestCheckpoints + .getFailedCheckpointStatistics() + .getTriggerTimestamp(); + } + if (null == latestLatestCheckpoints || 0 == failureTimestamp) { + return true; + } + long latestTime = + latestLatestCheckpoints.getFailedCheckpointStatistics().getTriggerTimestamp(); + checkpointsCache.put(key, latest); + if (his != null) { + long hisTime = 0; + if (failedCheckpointStatistics != null) { + hisTime = failedCheckpointStatistics.getTriggerTimestamp(); + } + return hisTime == latestTime || System.currentTimeMillis() - latestTime > 60000; + } + return false; + + default: + return false; + } + } + + /** + * Retrieves the checkpoint time for a specific key. + * @param key The key used to identify the checkpoint. + * @param checkpoints The checkpoints object containing relevant data. + * @return The checkpoint time, or null if the checkpoint has expired. + */ + public Long checkpointTime(String key, CheckPointOverView checkpoints) { + if (isExpire(checkpoints, key, "completed")) { + return null; + } + CheckPointOverView.LatestCheckpoints checkpointsLatestCheckpoints = checkpoints.getLatestCheckpoints(); + if (null == checkpointsLatestCheckpoints + || null == checkpointsLatestCheckpoints.getCompletedCheckpointStatistics()) { + return null; + } + return checkpoints + .getLatestCheckpoints() + .getCompletedCheckpointStatistics() + .getDuration(); + } + + /** + * Checks if a specific checkpoint has failed. + * @param key The key used to identify the checkpoint. + * @param checkpoints The checkpoints object containing relevant data. + * @return True if the checkpoint has failed, null if it has expired. + */ + public Boolean checkFailed(String key, CheckPointOverView checkpoints) { + return !isExpire(checkpoints, key, "failed"); + } +} diff --git a/dinky-alert/dinky-alert-base/src/main/java/org/dinky/alert/Rules/ExceptionRule.java b/dinky-alert/dinky-alert-base/src/main/java/org/dinky/alert/rules/ExceptionRule.java similarity index 98% rename from dinky-alert/dinky-alert-base/src/main/java/org/dinky/alert/Rules/ExceptionRule.java rename to dinky-alert/dinky-alert-base/src/main/java/org/dinky/alert/rules/ExceptionRule.java index 01ccc0606f..8716bf9759 100644 --- a/dinky-alert/dinky-alert-base/src/main/java/org/dinky/alert/Rules/ExceptionRule.java +++ b/dinky-alert/dinky-alert-base/src/main/java/org/dinky/alert/rules/ExceptionRule.java @@ -17,7 +17,7 @@ * */ -package org.dinky.alert.Rules; +package org.dinky.alert.rules; import org.dinky.data.flink.exceptions.FlinkJobExceptionsDetail; diff --git a/dinky-common/src/main/java/org/dinky/data/flink/backpressure/FlinkJobNodeBackPressure.java b/dinky-common/src/main/java/org/dinky/data/flink/backpressure/FlinkJobNodeBackPressure.java index 14b8db13c0..40c5a14dcf 100644 --- a/dinky-common/src/main/java/org/dinky/data/flink/backpressure/FlinkJobNodeBackPressure.java +++ b/dinky-common/src/main/java/org/dinky/data/flink/backpressure/FlinkJobNodeBackPressure.java @@ -22,11 +22,11 @@ import java.io.Serializable; import java.util.List; -import com.alibaba.fastjson2.annotation.JSONField; +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.annotation.JsonProperty; import io.swagger.annotations.ApiModel; import io.swagger.annotations.ApiModelProperty; -import lombok.AllArgsConstructor; import lombok.Builder; import lombok.Data; import lombok.NoArgsConstructor; @@ -53,13 +53,12 @@ @ApiModel(value = "FlinkJobNodeBackPressure", description = "Flink Job Node BackPressure Info") @Builder @Data -@AllArgsConstructor @NoArgsConstructor public class FlinkJobNodeBackPressure implements Serializable { private static final long serialVersionUID = 1L; @ApiModelProperty(value = "Status", required = true, notes = "Status", dataType = "String", example = "ok") - @JSONField(name = "status") + @JsonProperty("status") private String status; @ApiModelProperty( @@ -68,7 +67,7 @@ public class FlinkJobNodeBackPressure implements Serializable { notes = "BackpressureLevel", dataType = "String", example = "ok") - @JSONField(name = "backpressureLevel") + @JsonProperty("backpressureLevel") private String backpressureLevel; @ApiModelProperty( @@ -77,13 +76,25 @@ public class FlinkJobNodeBackPressure implements Serializable { notes = "EndTimestamp", dataType = "Long", example = "1696647436365") - @JSONField(name = "end-timestamp") + @JsonProperty("end-timestamp") private Long endTimestamp; @ApiModelProperty(value = "Subtasks", required = true, notes = "Subtasks", dataType = "List", example = "Subtasks") - @JSONField(name = "subtasks") + @JsonProperty("subtasks") private List subtasks; + @JsonCreator + public FlinkJobNodeBackPressure( + @JsonProperty("status") String status, + @JsonProperty("backpressureLevel") String backpressureLevel, + @JsonProperty("end-timestamp") Long endTimestamp, + @JsonProperty("subtasks") List subtasks) { + this.status = status; + this.backpressureLevel = backpressureLevel; + this.endTimestamp = endTimestamp; + this.subtasks = subtasks; + } + // double // @ApiModelProperty(value = "BackpressureLevel", required = true, notes = "BackpressureLevel", dataType = // "String", example = "ok") @@ -92,13 +103,12 @@ public class FlinkJobNodeBackPressure implements Serializable { @Data @Builder - @AllArgsConstructor @NoArgsConstructor @ApiModel(value = "FlinkJobNodeBackPressure-Subtasks", description = "Flink Job Node BackPressure Subtasks Info") public static class Subtasks { @ApiModelProperty(value = "Subtask", required = true, notes = "Subtask", dataType = "Integer", example = "0") - @JSONField(name = "subtask") + @JsonProperty("subtask") private Integer subtask; @ApiModelProperty( @@ -107,19 +117,19 @@ public static class Subtasks { notes = "BackpressureLevel", dataType = "String", example = "ok") - @JSONField(name = "backpressureLevel") + @JsonProperty("backpressureLevel") private String backpressureLevel; @ApiModelProperty(value = "Ratio", required = true, notes = "Ratio", dataType = "Double", example = "0") - @JSONField(name = "ratio") + @JsonProperty("ratio") private Double ratio; @ApiModelProperty(value = "IdleRatio", required = true, notes = "IdleRatio", dataType = "Double", example = "1") - @JSONField(name = "idleRatio") + @JsonProperty("idleRatio") private Double idleRatio; @ApiModelProperty(value = "BusyRatio", required = true, notes = "BusyRatio", dataType = "Double", example = "0") - @JSONField(name = "busyRatio") + @JsonProperty("busyRatio") private Double busyRatio; // // double @@ -127,5 +137,19 @@ public static class Subtasks { // = "String", example = "ok") // @JsonProperty("backpressure-level") // private String backpressureLevel; + + @JsonCreator + public Subtasks( + @JsonProperty("subtask") Integer subtask, + @JsonProperty("backpressureLevel") String backpressureLevel, + @JsonProperty("ratio") Double ratio, + @JsonProperty("idleRatio") Double idleRatio, + @JsonProperty("busyRatio") Double busyRatio) { + this.subtask = subtask; + this.backpressureLevel = backpressureLevel; + this.ratio = ratio; + this.idleRatio = idleRatio; + this.busyRatio = busyRatio; + } } } diff --git a/dinky-common/src/main/java/org/dinky/data/flink/checkpoint/CheckPointOverView.java b/dinky-common/src/main/java/org/dinky/data/flink/checkpoint/CheckPointOverView.java new file mode 100644 index 0000000000..8ffad9727e --- /dev/null +++ b/dinky-common/src/main/java/org/dinky/data/flink/checkpoint/CheckPointOverView.java @@ -0,0 +1,433 @@ +/* + * + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +package org.dinky.data.flink.checkpoint; + +import java.util.List; +import java.util.Map; + +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.annotation.JsonProperty; +import com.google.common.base.Preconditions; + +import lombok.Data; +import lombok.EqualsAndHashCode; +import lombok.NoArgsConstructor; + +@Data +@NoArgsConstructor +public class CheckPointOverView { + public static final String FIELD_NAME_COUNTS = "counts"; + + public static final String FIELD_NAME_SUMMARY = "summary"; + + public static final String FIELD_NAME_LATEST_CHECKPOINTS = "latest"; + + public static final String FIELD_NAME_HISTORY = "history"; + + @JsonProperty(FIELD_NAME_COUNTS) + private Counts counts; + + @JsonProperty(FIELD_NAME_SUMMARY) + private Summary summary; + + @JsonProperty(FIELD_NAME_LATEST_CHECKPOINTS) + private LatestCheckpoints latestCheckpoints; + + @JsonProperty(FIELD_NAME_HISTORY) + private List history; + + @JsonCreator + public CheckPointOverView( + @JsonProperty(FIELD_NAME_COUNTS) Counts counts, + @JsonProperty(FIELD_NAME_SUMMARY) Summary summary, + @JsonProperty(FIELD_NAME_LATEST_CHECKPOINTS) LatestCheckpoints latestCheckpoints, + @JsonProperty(FIELD_NAME_HISTORY) List history) { + this.counts = Preconditions.checkNotNull(counts); + this.summary = Preconditions.checkNotNull(summary); + this.latestCheckpoints = Preconditions.checkNotNull(latestCheckpoints); + this.history = Preconditions.checkNotNull(history); + } + + @Data + @NoArgsConstructor + public static final class Counts { + + public static final String FIELD_NAME_RESTORED_CHECKPOINTS = "restored"; + + public static final String FIELD_NAME_TOTAL_CHECKPOINTS = "total"; + + public static final String FIELD_NAME_IN_PROGRESS_CHECKPOINTS = "in_progress"; + + public static final String FIELD_NAME_COMPLETED_CHECKPOINTS = "completed"; + + public static final String FIELD_NAME_FAILED_CHECKPOINTS = "failed"; + + @JsonProperty(FIELD_NAME_RESTORED_CHECKPOINTS) + private long numberRestoredCheckpoints; + + @JsonProperty(FIELD_NAME_TOTAL_CHECKPOINTS) + private long totalNumberCheckpoints; + + @JsonProperty(FIELD_NAME_IN_PROGRESS_CHECKPOINTS) + private int numberInProgressCheckpoints; + + @JsonProperty(FIELD_NAME_COMPLETED_CHECKPOINTS) + private long numberCompletedCheckpoints; + + @JsonProperty(FIELD_NAME_FAILED_CHECKPOINTS) + private long numberFailedCheckpoints; + + @JsonCreator + public Counts( + @JsonProperty(FIELD_NAME_RESTORED_CHECKPOINTS) long numberRestoredCheckpoints, + @JsonProperty(FIELD_NAME_TOTAL_CHECKPOINTS) long totalNumberCheckpoints, + @JsonProperty(FIELD_NAME_IN_PROGRESS_CHECKPOINTS) int numberInProgressCheckpoints, + @JsonProperty(FIELD_NAME_COMPLETED_CHECKPOINTS) long numberCompletedCheckpoints, + @JsonProperty(FIELD_NAME_FAILED_CHECKPOINTS) long numberFailedCheckpoints) { + this.numberRestoredCheckpoints = numberRestoredCheckpoints; + this.totalNumberCheckpoints = totalNumberCheckpoints; + this.numberInProgressCheckpoints = numberInProgressCheckpoints; + this.numberCompletedCheckpoints = numberCompletedCheckpoints; + this.numberFailedCheckpoints = numberFailedCheckpoints; + } + } + + @Data + @NoArgsConstructor + public final class StatsSummaryDto { + + public static final String FIELD_NAME_MINIMUM = "min"; + + public static final String FIELD_NAME_MAXIMUM = "max"; + + public static final String FIELD_NAME_AVERAGE = "avg"; + + public static final String FIELD_NAME_P50 = "p50"; + + public static final String FIELD_NAME_P90 = "p90"; + + public static final String FIELD_NAME_P95 = "p95"; + + public static final String FIELD_NAME_P99 = "p99"; + + public static final String FIELD_NAME_P999 = "p999"; + + @JsonProperty(FIELD_NAME_MINIMUM) + private long minimum; + + @JsonProperty(FIELD_NAME_MAXIMUM) + private long maximum; + + @JsonProperty(FIELD_NAME_AVERAGE) + private long average; + + @JsonProperty(FIELD_NAME_P50) + private String p50; + + @JsonProperty(FIELD_NAME_P90) + private String p90; + + @JsonProperty(FIELD_NAME_P95) + private String p95; + + @JsonProperty(FIELD_NAME_P99) + private String p99; + + @JsonProperty(FIELD_NAME_P999) + private String p999; + + @JsonCreator + public StatsSummaryDto( + @JsonProperty(FIELD_NAME_MINIMUM) long minimum, + @JsonProperty(FIELD_NAME_MAXIMUM) long maximum, + @JsonProperty(FIELD_NAME_AVERAGE) long average, + @JsonProperty(FIELD_NAME_P50) String p50, + @JsonProperty(FIELD_NAME_P90) String p90, + @JsonProperty(FIELD_NAME_P95) String p95, + @JsonProperty(FIELD_NAME_P99) String p99, + @JsonProperty(FIELD_NAME_P999) String p999) { + this.minimum = minimum; + this.maximum = maximum; + this.average = average; + this.p50 = p50; + this.p90 = p90; + this.p95 = p95; + this.p99 = p99; + this.p999 = p999; + } + } + + /** + * Checkpoint summary. + */ + @Data + @NoArgsConstructor + public static final class Summary { + + public static final String FIELD_NAME_CHECKPOINTED_SIZE = "checkpointed_size"; + + /** + * The accurate name of this field should be 'checkpointed_data_size', keep it as before to + * not break backwards compatibility for old web UI. + * + * @see FLINK-13390 + */ + public static final String FIELD_NAME_STATE_SIZE = "state_size"; + + public static final String FIELD_NAME_DURATION = "end_to_end_duration"; + + public static final String FIELD_NAME_ALIGNMENT_BUFFERED = "alignment_buffered"; + + public static final String FIELD_NAME_PROCESSED_DATA = "processed_data"; + + public static final String FIELD_NAME_PERSISTED_DATA = "persisted_data"; + + @JsonProperty(FIELD_NAME_CHECKPOINTED_SIZE) + private StatsSummaryDto checkpointedSize; + + @JsonProperty(FIELD_NAME_STATE_SIZE) + private StatsSummaryDto stateSize; + + @JsonProperty(FIELD_NAME_DURATION) + private StatsSummaryDto duration; + + @JsonProperty(FIELD_NAME_ALIGNMENT_BUFFERED) + private StatsSummaryDto alignmentBuffered; + + @JsonProperty(FIELD_NAME_PROCESSED_DATA) + private StatsSummaryDto processedData; + + @JsonProperty(FIELD_NAME_PERSISTED_DATA) + private StatsSummaryDto persistedData; + + @JsonCreator + public Summary( + @JsonProperty(FIELD_NAME_CHECKPOINTED_SIZE) StatsSummaryDto checkpointedSize, + @JsonProperty(FIELD_NAME_STATE_SIZE) StatsSummaryDto stateSize, + @JsonProperty(FIELD_NAME_DURATION) StatsSummaryDto duration, + @JsonProperty(FIELD_NAME_ALIGNMENT_BUFFERED) StatsSummaryDto alignmentBuffered, + @JsonProperty(FIELD_NAME_PROCESSED_DATA) StatsSummaryDto processedData, + @JsonProperty(FIELD_NAME_PERSISTED_DATA) StatsSummaryDto persistedData) { + this.checkpointedSize = checkpointedSize; + this.stateSize = stateSize; + this.duration = duration; + this.alignmentBuffered = alignmentBuffered; + this.processedData = processedData; + this.persistedData = persistedData; + } + } + + /** + * Statistics for a completed checkpoint. + */ + @EqualsAndHashCode(callSuper = true) + @Data + @NoArgsConstructor + public static final class CompletedCheckpointStatistics extends CheckpointStatistics { + + public static final String FIELD_NAME_EXTERNAL_PATH = "external_path"; + + public static final String FIELD_NAME_DISCARDED = "discarded"; + + @JsonProperty(FIELD_NAME_EXTERNAL_PATH) + private String externalPath; + + @JsonProperty(FIELD_NAME_DISCARDED) + private boolean discarded; + + @JsonCreator + public CompletedCheckpointStatistics( + @JsonProperty(FIELD_NAME_ID) long id, + @JsonProperty(FIELD_NAME_STATUS) String status, + @JsonProperty(FIELD_NAME_IS_SAVEPOINT) boolean savepoint, + @JsonProperty(FIELD_NAME_TRIGGER_TIMESTAMP) long triggerTimestamp, + @JsonProperty(FIELD_NAME_LATEST_ACK_TIMESTAMP) long latestAckTimestamp, + @JsonProperty(FIELD_NAME_CHECKPOINTED_SIZE) long checkpointedSize, + @JsonProperty(FIELD_NAME_STATE_SIZE) long stateSize, + @JsonProperty(FIELD_NAME_DURATION) long duration, + @JsonProperty(FIELD_NAME_ALIGNMENT_BUFFERED) long alignmentBuffered, + @JsonProperty(FIELD_NAME_PROCESSED_DATA) long processedData, + @JsonProperty(FIELD_NAME_PERSISTED_DATA) long persistedData, + @JsonProperty(FIELD_NAME_NUM_SUBTASKS) int numSubtasks, + @JsonProperty(FIELD_NAME_NUM_ACK_SUBTASKS) int numAckSubtasks, + @JsonProperty(FIELD_NAME_CHECKPOINT_TYPE) String checkpointType, + @JsonProperty(FIELD_NAME_TASKS) Map checkpointingStatisticsPerTask, + @JsonProperty(FIELD_NAME_EXTERNAL_PATH) String externalPath, + @JsonProperty(FIELD_NAME_DISCARDED) boolean discarded) { + super( + id, + status, + savepoint, + triggerTimestamp, + latestAckTimestamp, + checkpointedSize, + stateSize, + duration, + alignmentBuffered, + processedData, + persistedData, + numSubtasks, + numAckSubtasks, + checkpointType, + checkpointingStatisticsPerTask); + this.externalPath = externalPath; + this.discarded = discarded; + } + } + + @EqualsAndHashCode(callSuper = true) + @Data + @NoArgsConstructor + public static final class FailedCheckpointStatistics extends CheckpointStatistics { + + public static final String FIELD_NAME_FAILURE_TIMESTAMP = "failure_timestamp"; + + public static final String FIELD_NAME_FAILURE_MESSAGE = "failure_message"; + + @JsonProperty(FIELD_NAME_FAILURE_TIMESTAMP) + private long failureTimestamp; + + @JsonProperty(FIELD_NAME_FAILURE_MESSAGE) + private String failureMessage; + + @JsonCreator + public FailedCheckpointStatistics( + @JsonProperty(FIELD_NAME_ID) long id, + @JsonProperty(FIELD_NAME_STATUS) String status, + @JsonProperty(FIELD_NAME_IS_SAVEPOINT) boolean savepoint, + @JsonProperty(FIELD_NAME_TRIGGER_TIMESTAMP) long triggerTimestamp, + @JsonProperty(FIELD_NAME_LATEST_ACK_TIMESTAMP) long latestAckTimestamp, + @JsonProperty(FIELD_NAME_CHECKPOINTED_SIZE) long checkpointedSize, + @JsonProperty(FIELD_NAME_STATE_SIZE) long stateSize, + @JsonProperty(FIELD_NAME_DURATION) long duration, + @JsonProperty(FIELD_NAME_ALIGNMENT_BUFFERED) long alignmentBuffered, + @JsonProperty(FIELD_NAME_PROCESSED_DATA) long processedData, + @JsonProperty(FIELD_NAME_PERSISTED_DATA) long persistedData, + @JsonProperty(FIELD_NAME_NUM_SUBTASKS) int numSubtasks, + @JsonProperty(FIELD_NAME_NUM_ACK_SUBTASKS) int numAckSubtasks, + @JsonProperty(FIELD_NAME_CHECKPOINT_TYPE) String checkpointType, + @JsonProperty(FIELD_NAME_TASKS) Map checkpointingStatisticsPerTask, + @JsonProperty(FIELD_NAME_FAILURE_TIMESTAMP) long failureTimestamp, + @JsonProperty(FIELD_NAME_FAILURE_MESSAGE) String failureMessage) { + super( + id, + status, + savepoint, + triggerTimestamp, + latestAckTimestamp, + checkpointedSize, + stateSize, + duration, + alignmentBuffered, + processedData, + persistedData, + numSubtasks, + numAckSubtasks, + checkpointType, + checkpointingStatisticsPerTask); + + this.failureTimestamp = failureTimestamp; + this.failureMessage = failureMessage; + } + } + + /** + * Statistics about the latest checkpoints. + */ + @Data + @NoArgsConstructor + public static final class LatestCheckpoints { + + public static final String FIELD_NAME_COMPLETED = "completed"; + + public static final String FIELD_NAME_SAVEPOINT = "savepoint"; + + public static final String FIELD_NAME_FAILED = "failed"; + + public static final String FIELD_NAME_RESTORED = "restored"; + + @JsonProperty(FIELD_NAME_COMPLETED) + private CheckPointOverView.CompletedCheckpointStatistics completedCheckpointStatistics; + + @JsonProperty(FIELD_NAME_SAVEPOINT) + private CheckPointOverView.CompletedCheckpointStatistics savepointStatistics; + + @JsonProperty(FIELD_NAME_FAILED) + private CheckPointOverView.FailedCheckpointStatistics failedCheckpointStatistics; + + @JsonProperty(FIELD_NAME_RESTORED) + private RestoredCheckpointStatistics restoredCheckpointStatistics; + + @JsonCreator + public LatestCheckpoints( + @JsonProperty(FIELD_NAME_COMPLETED) + CheckPointOverView.CompletedCheckpointStatistics completedCheckpointStatistics, + @JsonProperty(FIELD_NAME_SAVEPOINT) + CheckPointOverView.CompletedCheckpointStatistics savepointStatistics, + @JsonProperty(FIELD_NAME_FAILED) + CheckPointOverView.FailedCheckpointStatistics failedCheckpointStatistics, + @JsonProperty(FIELD_NAME_RESTORED) RestoredCheckpointStatistics restoredCheckpointStatistics) { + this.completedCheckpointStatistics = completedCheckpointStatistics; + this.savepointStatistics = savepointStatistics; + this.failedCheckpointStatistics = failedCheckpointStatistics; + this.restoredCheckpointStatistics = restoredCheckpointStatistics; + } + } + + /** + * Statistics for a restored checkpoint. + */ + @Data + @NoArgsConstructor + public static final class RestoredCheckpointStatistics { + + public static final String FIELD_NAME_ID = "id"; + + public static final String FIELD_NAME_RESTORE_TIMESTAMP = "restore_timestamp"; + + public static final String FIELD_NAME_IS_SAVEPOINT = "is_savepoint"; + + public static final String FIELD_NAME_EXTERNAL_PATH = "external_path"; + + @JsonProperty(FIELD_NAME_ID) + private long id; + + @JsonProperty(FIELD_NAME_RESTORE_TIMESTAMP) + private long restoreTimestamp; + + @JsonProperty(FIELD_NAME_IS_SAVEPOINT) + private boolean savepoint; + + @JsonProperty(FIELD_NAME_EXTERNAL_PATH) + private String externalPath; + + @JsonCreator + public RestoredCheckpointStatistics( + @JsonProperty(FIELD_NAME_ID) long id, + @JsonProperty(FIELD_NAME_RESTORE_TIMESTAMP) long restoreTimestamp, + @JsonProperty(FIELD_NAME_IS_SAVEPOINT) boolean savepoint, + @JsonProperty(FIELD_NAME_EXTERNAL_PATH) String externalPath) { + this.id = id; + this.restoreTimestamp = restoreTimestamp; + this.savepoint = savepoint; + this.externalPath = externalPath; + } + } +} diff --git a/dinky-common/src/main/java/org/dinky/data/flink/checkpoint/CheckpointStatistics.java b/dinky-common/src/main/java/org/dinky/data/flink/checkpoint/CheckpointStatistics.java new file mode 100644 index 0000000000..a23b4b41ec --- /dev/null +++ b/dinky-common/src/main/java/org/dinky/data/flink/checkpoint/CheckpointStatistics.java @@ -0,0 +1,149 @@ +/* + * + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +package org.dinky.data.flink.checkpoint; + +import java.util.Map; + +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.annotation.JsonProperty; +import com.google.common.base.Preconditions; + +import lombok.Data; +import lombok.NoArgsConstructor; + +@Data +@NoArgsConstructor +public class CheckpointStatistics { + + public static final String FIELD_NAME_ID = "id"; + + public static final String FIELD_NAME_STATUS = "status"; + + public static final String FIELD_NAME_IS_SAVEPOINT = "is_savepoint"; + + public static final String FIELD_NAME_TRIGGER_TIMESTAMP = "trigger_timestamp"; + + public static final String FIELD_NAME_LATEST_ACK_TIMESTAMP = "latest_ack_timestamp"; + + public static final String FIELD_NAME_CHECKPOINTED_SIZE = "checkpointed_size"; + + /** + * The accurate name of this field should be 'checkpointed_data_size', keep it as before to not + * break backwards compatibility for old web UI. + * + * @see FLINK-13390 + */ + public static final String FIELD_NAME_STATE_SIZE = "state_size"; + + public static final String FIELD_NAME_DURATION = "end_to_end_duration"; + + public static final String FIELD_NAME_ALIGNMENT_BUFFERED = "alignment_buffered"; + + public static final String FIELD_NAME_PROCESSED_DATA = "processed_data"; + + public static final String FIELD_NAME_PERSISTED_DATA = "persisted_data"; + + public static final String FIELD_NAME_NUM_SUBTASKS = "num_subtasks"; + + public static final String FIELD_NAME_NUM_ACK_SUBTASKS = "num_acknowledged_subtasks"; + + public static final String FIELD_NAME_TASKS = "tasks"; + + public static final String FIELD_NAME_CHECKPOINT_TYPE = "checkpoint_type"; + + @JsonProperty(FIELD_NAME_ID) + private long id; + + @JsonProperty(FIELD_NAME_STATUS) + private String status; + + @JsonProperty(FIELD_NAME_IS_SAVEPOINT) + private boolean savepoint; + + @JsonProperty(FIELD_NAME_TRIGGER_TIMESTAMP) + private long triggerTimestamp; + + @JsonProperty(FIELD_NAME_LATEST_ACK_TIMESTAMP) + private long latestAckTimestamp; + + @JsonProperty(FIELD_NAME_CHECKPOINTED_SIZE) + private long checkpointedSize; + + @JsonProperty(FIELD_NAME_STATE_SIZE) + private long stateSize; + + @JsonProperty(FIELD_NAME_DURATION) + private long duration; + + @JsonProperty(FIELD_NAME_ALIGNMENT_BUFFERED) + private long alignmentBuffered; + + @JsonProperty(FIELD_NAME_PROCESSED_DATA) + private long processedData; + + @JsonProperty(FIELD_NAME_PERSISTED_DATA) + private long persistedData; + + @JsonProperty(FIELD_NAME_NUM_SUBTASKS) + private int numSubtasks; + + @JsonProperty(FIELD_NAME_NUM_ACK_SUBTASKS) + private int numAckSubtasks; + + @JsonProperty(FIELD_NAME_CHECKPOINT_TYPE) + private String checkpointType; + + @JsonProperty(FIELD_NAME_TASKS) + private Map checkpointStatisticsPerTask; + + @JsonCreator + CheckpointStatistics( + @JsonProperty(FIELD_NAME_ID) long id, + @JsonProperty(FIELD_NAME_STATUS) String status, + @JsonProperty(FIELD_NAME_IS_SAVEPOINT) boolean savepoint, + @JsonProperty(FIELD_NAME_TRIGGER_TIMESTAMP) long triggerTimestamp, + @JsonProperty(FIELD_NAME_LATEST_ACK_TIMESTAMP) long latestAckTimestamp, + @JsonProperty(FIELD_NAME_CHECKPOINTED_SIZE) long checkpointedSize, + @JsonProperty(FIELD_NAME_STATE_SIZE) long stateSize, + @JsonProperty(FIELD_NAME_DURATION) long duration, + @JsonProperty(FIELD_NAME_ALIGNMENT_BUFFERED) long alignmentBuffered, + @JsonProperty(FIELD_NAME_PROCESSED_DATA) long processedData, + @JsonProperty(FIELD_NAME_PERSISTED_DATA) long persistedData, + @JsonProperty(FIELD_NAME_NUM_SUBTASKS) int numSubtasks, + @JsonProperty(FIELD_NAME_NUM_ACK_SUBTASKS) int numAckSubtasks, + @JsonProperty(FIELD_NAME_CHECKPOINT_TYPE) String checkpointType, + @JsonProperty(FIELD_NAME_TASKS) Map checkpointStatisticsPerTask) { + this.id = id; + this.status = Preconditions.checkNotNull(status); + this.savepoint = savepoint; + this.triggerTimestamp = triggerTimestamp; + this.latestAckTimestamp = latestAckTimestamp; + this.checkpointedSize = checkpointedSize; + this.stateSize = stateSize; + this.duration = duration; + this.alignmentBuffered = alignmentBuffered; + this.processedData = processedData; + this.persistedData = persistedData; + this.numSubtasks = numSubtasks; + this.numAckSubtasks = numAckSubtasks; + this.checkpointType = Preconditions.checkNotNull(checkpointType); + this.checkpointStatisticsPerTask = Preconditions.checkNotNull(checkpointStatisticsPerTask); + } +} diff --git a/dinky-common/src/main/java/org/dinky/data/flink/checkpoint/TaskCheckpointStatistics.java b/dinky-common/src/main/java/org/dinky/data/flink/checkpoint/TaskCheckpointStatistics.java new file mode 100644 index 0000000000..de38342222 --- /dev/null +++ b/dinky-common/src/main/java/org/dinky/data/flink/checkpoint/TaskCheckpointStatistics.java @@ -0,0 +1,119 @@ +/* + * + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +package org.dinky.data.flink.checkpoint; + +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.annotation.JsonProperty; +import com.google.common.base.Preconditions; + +import lombok.Data; +import lombok.NoArgsConstructor; + +@Data +@NoArgsConstructor +public class TaskCheckpointStatistics { + public static final String FIELD_NAME_ID = "id"; + + public static final String FIELD_NAME_CHECKPOINT_STATUS = "status"; + + public static final String FIELD_NAME_LATEST_ACK_TIMESTAMP = "latest_ack_timestamp"; + + public static final String FIELD_NAME_CHECKPOINTED_SIZE = "checkpointed_size"; + + /** + * The accurate name of this field should be 'checkpointed_data_size', keep it as before to not + * break backwards compatibility for old web UI. + * + * @see FLINK-13390 + */ + public static final String FIELD_NAME_STATE_SIZE = "state_size"; + + public static final String FIELD_NAME_DURATION = "end_to_end_duration"; + + public static final String FIELD_NAME_ALIGNMENT_BUFFERED = "alignment_buffered"; + + public static final String FIELD_NAME_PROCESSED_DATA = "processed_data"; + + public static final String FIELD_NAME_PERSISTED_DATA = "persisted_data"; + + public static final String FIELD_NAME_NUM_SUBTASKS = "num_subtasks"; + + public static final String FIELD_NAME_NUM_ACK_SUBTASKS = "num_acknowledged_subtasks"; + + @JsonProperty(FIELD_NAME_ID) + private long checkpointId; + + @JsonProperty(FIELD_NAME_CHECKPOINT_STATUS) + private String checkpointStatus; + + @JsonProperty(FIELD_NAME_LATEST_ACK_TIMESTAMP) + private long latestAckTimestamp; + + @JsonProperty(FIELD_NAME_CHECKPOINTED_SIZE) + private long checkpointedSize; + + @JsonProperty(FIELD_NAME_STATE_SIZE) + private long stateSize; + + @JsonProperty(FIELD_NAME_DURATION) + private long duration; + + @JsonProperty(FIELD_NAME_ALIGNMENT_BUFFERED) + private long alignmentBuffered; + + @JsonProperty(FIELD_NAME_PROCESSED_DATA) + private long processedData; + + @JsonProperty(FIELD_NAME_PERSISTED_DATA) + private long persistedData; + + @JsonProperty(FIELD_NAME_NUM_SUBTASKS) + private int numSubtasks; + + @JsonProperty(FIELD_NAME_NUM_ACK_SUBTASKS) + private int numAckSubtasks; + + @JsonCreator + public TaskCheckpointStatistics( + @JsonProperty(FIELD_NAME_ID) long checkpointId, + @JsonProperty(FIELD_NAME_CHECKPOINT_STATUS) String checkpointStatus, + @JsonProperty(FIELD_NAME_LATEST_ACK_TIMESTAMP) long latestAckTimestamp, + @JsonProperty(FIELD_NAME_CHECKPOINTED_SIZE) long checkpointedSize, + @JsonProperty(FIELD_NAME_STATE_SIZE) long stateSize, + @JsonProperty(FIELD_NAME_DURATION) long duration, + @JsonProperty(FIELD_NAME_ALIGNMENT_BUFFERED) long alignmentBuffered, + @JsonProperty(FIELD_NAME_PROCESSED_DATA) long processedData, + @JsonProperty(FIELD_NAME_PERSISTED_DATA) long persistedData, + @JsonProperty(FIELD_NAME_NUM_SUBTASKS) int numSubtasks, + @JsonProperty(FIELD_NAME_NUM_ACK_SUBTASKS) int numAckSubtasks) { + + this.checkpointId = checkpointId; + this.checkpointStatus = Preconditions.checkNotNull(checkpointStatus); + this.latestAckTimestamp = latestAckTimestamp; + this.checkpointedSize = checkpointedSize; + this.stateSize = stateSize; + this.duration = duration; + this.processedData = processedData; + this.alignmentBuffered = alignmentBuffered; + this.persistedData = persistedData; + this.numSubtasks = numSubtasks; + this.numAckSubtasks = numAckSubtasks; + } +} diff --git a/dinky-common/src/main/java/org/dinky/data/flink/config/CheckpointConfigInfo.java b/dinky-common/src/main/java/org/dinky/data/flink/config/CheckpointConfigInfo.java new file mode 100644 index 0000000000..e1c0946af7 --- /dev/null +++ b/dinky-common/src/main/java/org/dinky/data/flink/config/CheckpointConfigInfo.java @@ -0,0 +1,167 @@ +/* + * + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +package org.dinky.data.flink.config; + +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.annotation.JsonProperty; +import com.google.common.base.Preconditions; + +import lombok.Data; +import lombok.NoArgsConstructor; + +@Data +@NoArgsConstructor +public class CheckpointConfigInfo { + + public static final String FIELD_NAME_PROCESSING_MODE = "mode"; + + public static final String FIELD_NAME_CHECKPOINT_INTERVAL = "interval"; + + public static final String FIELD_NAME_CHECKPOINT_TIMEOUT = "timeout"; + + public static final String FIELD_NAME_CHECKPOINT_MIN_PAUSE = "min_pause"; + + public static final String FIELD_NAME_CHECKPOINT_MAX_CONCURRENT = "max_concurrent"; + + public static final String FIELD_NAME_EXTERNALIZED_CHECKPOINT_CONFIG = "externalization"; + + public static final String FIELD_NAME_STATE_BACKEND = "state_backend"; + + public static final String FIELD_NAME_CHECKPOINT_STORAGE = "checkpoint_storage"; + + public static final String FIELD_NAME_UNALIGNED_CHECKPOINTS = "unaligned_checkpoints"; + + public static final String FIELD_NAME_TOLERABLE_FAILED_CHECKPOINTS = "tolerable_failed_checkpoints"; + + public static final String FIELD_NAME_ALIGNED_CHECKPOINT_TIMEOUT = "aligned_checkpoint_timeout"; + + public static final String FIELD_NAME_CHECKPOINTS_AFTER_TASKS_FINISH = "checkpoints_after_tasks_finish"; + + public static final String FIELD_NAME_STATE_CHANGELOG = "state_changelog_enabled"; + + public static final String FIELD_NAME_PERIODIC_MATERIALIZATION_INTERVAL = + "changelog_periodic_materialization_interval"; + + public static final String FIELD_NAME_CHANGELOG_STORAGE = "changelog_storage"; + + @JsonProperty(FIELD_NAME_PROCESSING_MODE) + private String processingMode; + + @JsonProperty(FIELD_NAME_CHECKPOINT_INTERVAL) + private long checkpointInterval; + + @JsonProperty(FIELD_NAME_CHECKPOINT_TIMEOUT) + private long checkpointTimeout; + + @JsonProperty(FIELD_NAME_CHECKPOINT_MIN_PAUSE) + private long minPauseBetweenCheckpoints; + + @JsonProperty(FIELD_NAME_CHECKPOINT_MAX_CONCURRENT) + private long maxConcurrentCheckpoints; + + @JsonProperty(FIELD_NAME_EXTERNALIZED_CHECKPOINT_CONFIG) + private ExternalizedCheckpointInfo externalizedCheckpointInfo; + + @JsonProperty(FIELD_NAME_STATE_BACKEND) + private String stateBackend; + + @JsonProperty(FIELD_NAME_CHECKPOINT_STORAGE) + private String checkpointStorage; + + @JsonProperty(FIELD_NAME_UNALIGNED_CHECKPOINTS) + private boolean unalignedCheckpoints; + + @JsonProperty(FIELD_NAME_TOLERABLE_FAILED_CHECKPOINTS) + private int tolerableFailedCheckpoints; + + @JsonProperty(FIELD_NAME_ALIGNED_CHECKPOINT_TIMEOUT) + private long alignedCheckpointTimeout; + + @JsonProperty(FIELD_NAME_CHECKPOINTS_AFTER_TASKS_FINISH) + private boolean checkpointsWithFinishedTasks; + + @JsonProperty(FIELD_NAME_STATE_CHANGELOG) + private boolean stateChangelog; + + @JsonProperty(FIELD_NAME_PERIODIC_MATERIALIZATION_INTERVAL) + private long periodicMaterializationInterval; + + @JsonProperty(FIELD_NAME_CHANGELOG_STORAGE) + private String changelogStorage; + + @JsonCreator + public CheckpointConfigInfo( + @JsonProperty(FIELD_NAME_PROCESSING_MODE) String processingMode, + @JsonProperty(FIELD_NAME_CHECKPOINT_INTERVAL) long checkpointInterval, + @JsonProperty(FIELD_NAME_CHECKPOINT_TIMEOUT) long checkpointTimeout, + @JsonProperty(FIELD_NAME_CHECKPOINT_MIN_PAUSE) long minPauseBetweenCheckpoints, + @JsonProperty(FIELD_NAME_CHECKPOINT_MAX_CONCURRENT) int maxConcurrentCheckpoints, + @JsonProperty(FIELD_NAME_EXTERNALIZED_CHECKPOINT_CONFIG) + ExternalizedCheckpointInfo externalizedCheckpointInfo, + @JsonProperty(FIELD_NAME_STATE_BACKEND) String stateBackend, + @JsonProperty(FIELD_NAME_CHECKPOINT_STORAGE) String checkpointStorage, + @JsonProperty(FIELD_NAME_UNALIGNED_CHECKPOINTS) boolean unalignedCheckpoints, + @JsonProperty(FIELD_NAME_TOLERABLE_FAILED_CHECKPOINTS) int tolerableFailedCheckpoints, + @JsonProperty(FIELD_NAME_ALIGNED_CHECKPOINT_TIMEOUT) long alignedCheckpointTimeout, + @JsonProperty(FIELD_NAME_CHECKPOINTS_AFTER_TASKS_FINISH) boolean checkpointsWithFinishedTasks, + @JsonProperty(FIELD_NAME_STATE_CHANGELOG) boolean stateChangelog, + @JsonProperty(FIELD_NAME_PERIODIC_MATERIALIZATION_INTERVAL) long periodicMaterializationInterval, + @JsonProperty(FIELD_NAME_CHANGELOG_STORAGE) String changelogStorage) { + this.processingMode = Preconditions.checkNotNull(processingMode); + this.checkpointInterval = checkpointInterval; + this.checkpointTimeout = checkpointTimeout; + this.minPauseBetweenCheckpoints = minPauseBetweenCheckpoints; + this.maxConcurrentCheckpoints = maxConcurrentCheckpoints; + this.externalizedCheckpointInfo = Preconditions.checkNotNull(externalizedCheckpointInfo); + this.stateBackend = Preconditions.checkNotNull(stateBackend); + this.checkpointStorage = Preconditions.checkNotNull(checkpointStorage); + this.unalignedCheckpoints = unalignedCheckpoints; + this.tolerableFailedCheckpoints = tolerableFailedCheckpoints; + this.alignedCheckpointTimeout = alignedCheckpointTimeout; + this.checkpointsWithFinishedTasks = checkpointsWithFinishedTasks; + this.stateChangelog = stateChangelog; + this.periodicMaterializationInterval = periodicMaterializationInterval; + this.changelogStorage = changelogStorage; + } + + /** Contains information about the externalized checkpoint configuration. */ + @Data + @NoArgsConstructor + public static final class ExternalizedCheckpointInfo { + + public static final String FIELD_NAME_ENABLED = "enabled"; + + public static final String FIELD_NAME_DELETE_ON_CANCELLATION = "delete_on_cancellation"; + + @JsonProperty(FIELD_NAME_ENABLED) + private boolean enabled; + + @JsonProperty(FIELD_NAME_DELETE_ON_CANCELLATION) + private boolean deleteOnCancellation; + + @JsonCreator + public ExternalizedCheckpointInfo( + @JsonProperty(FIELD_NAME_ENABLED) boolean enabled, + @JsonProperty(FIELD_NAME_DELETE_ON_CANCELLATION) boolean deleteOnCancellation) { + this.enabled = enabled; + this.deleteOnCancellation = deleteOnCancellation; + } + } +} diff --git a/dinky-common/src/main/java/org/dinky/data/flink/config/ExecutionConfig.java b/dinky-common/src/main/java/org/dinky/data/flink/config/ExecutionConfig.java index 9a10677855..71140399db 100644 --- a/dinky-common/src/main/java/org/dinky/data/flink/config/ExecutionConfig.java +++ b/dinky-common/src/main/java/org/dinky/data/flink/config/ExecutionConfig.java @@ -21,11 +21,11 @@ import java.io.Serializable; -import com.alibaba.fastjson2.annotation.JSONField; +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.annotation.JsonProperty; import io.swagger.annotations.ApiModel; import io.swagger.annotations.ApiModelProperty; -import lombok.AllArgsConstructor; import lombok.Builder; import lombok.Data; import lombok.NoArgsConstructor; @@ -33,7 +33,6 @@ @ApiModel(value = "ExecutionConfig", description = "Execution Config") @Builder @Data -@AllArgsConstructor @NoArgsConstructor public class ExecutionConfig implements Serializable { private static final long serialVersionUID = 1L; @@ -44,7 +43,7 @@ public class ExecutionConfig implements Serializable { notes = "Execution Mode", dataType = "String", example = "PIPELINED") - @JSONField(name = "execution-mode") + @JsonProperty("execution-mode") private String executionMode; @ApiModelProperty( @@ -53,7 +52,7 @@ public class ExecutionConfig implements Serializable { notes = "Restart Strategy", dataType = "String", example = "Cluster level default restart strategy") - @JSONField(name = "restart-strategy") + @JsonProperty("restart-strategy") private String restartStrategy; @ApiModelProperty( @@ -62,7 +61,7 @@ public class ExecutionConfig implements Serializable { notes = "Job Parallelism", dataType = "Integer", example = "1") - @JSONField(name = "job-parallelism") + @JsonProperty("job-parallelism") private Integer jobParallelism; @ApiModelProperty( @@ -71,10 +70,24 @@ public class ExecutionConfig implements Serializable { notes = "Object Reuse Mode", dataType = "Boolean", example = "false") - @JSONField(name = "object-reuse-mode") + @JsonProperty("object-reuse-mode") private Boolean objectReuseMode; @ApiModelProperty(value = "User Config", required = true, notes = "User Config", dataType = "ObjectNode") - @JSONField(name = "user-config") + @JsonProperty("user-config") private Object userConfig; + + @JsonCreator + public ExecutionConfig( + @JsonProperty("execution-mode") String executionMode, + @JsonProperty("restart-strategy") String restartStrategy, + @JsonProperty("job-parallelism") Integer jobParallelism, + @JsonProperty("object-reuse-mode") Boolean objectReuseMode, + @JsonProperty("user-config") Object userConfig) { + this.executionMode = executionMode; + this.restartStrategy = restartStrategy; + this.jobParallelism = jobParallelism; + this.objectReuseMode = objectReuseMode; + this.userConfig = userConfig; + } } diff --git a/dinky-common/src/main/java/org/dinky/data/flink/config/FlinkJobConfigInfo.java b/dinky-common/src/main/java/org/dinky/data/flink/config/FlinkJobConfigInfo.java index 2f06f9841e..ba109703fe 100644 --- a/dinky-common/src/main/java/org/dinky/data/flink/config/FlinkJobConfigInfo.java +++ b/dinky-common/src/main/java/org/dinky/data/flink/config/FlinkJobConfigInfo.java @@ -21,11 +21,11 @@ import java.io.Serializable; -import com.alibaba.fastjson2.annotation.JSONField; +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.annotation.JsonProperty; import io.swagger.annotations.ApiModel; import io.swagger.annotations.ApiModelProperty; -import lombok.AllArgsConstructor; import lombok.Data; import lombok.NoArgsConstructor; @@ -44,7 +44,6 @@ */ @ApiModel(value = "FlinkJobConfigInfo", description = "Flink Job Config Info") @Data -@AllArgsConstructor @NoArgsConstructor public class FlinkJobConfigInfo implements Serializable { private static final long serialVersionUID = 1L; @@ -55,14 +54,24 @@ public class FlinkJobConfigInfo implements Serializable { notes = "Job ID", dataType = "String", example = "62254c597e60e3b978e1663f29b333cd") - @JSONField(name = "jid") + @JsonProperty("jid") private String jid; @ApiModelProperty(value = "Job Name", required = true, notes = "Job Name", dataType = "String", example = "test") - @JSONField(name = "name") + @JsonProperty("name") private String name; @ApiModelProperty(value = "Execution Config", required = true, notes = "Execution Config", dataType = "ObjectNode") - @JSONField(name = "execution-config") + @JsonProperty("execution-config") private ExecutionConfig executionConfig; + + @JsonCreator + public FlinkJobConfigInfo( + @JsonProperty("jid") String jid, + @JsonProperty("name") String name, + @JsonProperty("execution-config") ExecutionConfig executionConfig) { + this.jid = jid; + this.name = name; + this.executionConfig = executionConfig; + } } diff --git a/dinky-common/src/main/java/org/dinky/data/flink/exceptions/FlinkJobExceptionsDetail.java b/dinky-common/src/main/java/org/dinky/data/flink/exceptions/FlinkJobExceptionsDetail.java index 8cacb145f1..cce24e695a 100644 --- a/dinky-common/src/main/java/org/dinky/data/flink/exceptions/FlinkJobExceptionsDetail.java +++ b/dinky-common/src/main/java/org/dinky/data/flink/exceptions/FlinkJobExceptionsDetail.java @@ -22,11 +22,11 @@ import java.io.Serializable; import java.util.List; -import com.alibaba.fastjson2.annotation.JSONField; +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.annotation.JsonProperty; import io.swagger.annotations.ApiModel; import io.swagger.annotations.ApiModelProperty; -import lombok.AllArgsConstructor; import lombok.Builder; import lombok.Data; import lombok.NoArgsConstructor; @@ -46,7 +46,6 @@ @ApiModel(value = "FlinkJobExceptionsDetail", description = "Flink Job Exceptions Detail Info") @Builder @Data -@AllArgsConstructor @NoArgsConstructor public class FlinkJobExceptionsDetail implements Serializable { private static final long serialVersionUID = 1L; @@ -57,7 +56,7 @@ public class FlinkJobExceptionsDetail implements Serializable { notes = "All Exceptions", dataType = "List", example = "All Exceptions") - @JSONField(name = "all-exceptions") + @JsonProperty("all-exceptions") private List allExceptions; @ApiModelProperty( @@ -66,7 +65,7 @@ public class FlinkJobExceptionsDetail implements Serializable { notes = "Root Exception", dataType = "String", example = "Root Exception") - @JSONField(name = "root-exception") + @JsonProperty("root-exception") private String rootException = ""; @ApiModelProperty( @@ -75,7 +74,7 @@ public class FlinkJobExceptionsDetail implements Serializable { notes = "Timestamp", dataType = "Object", example = "Timestamp") - @JSONField(name = "timestamp") + @JsonProperty("timestamp") private Long timestamp; @ApiModelProperty( @@ -84,7 +83,7 @@ public class FlinkJobExceptionsDetail implements Serializable { notes = "Truncated", dataType = "Boolean", example = "Truncated") - @JSONField(name = "truncated") + @JsonProperty("truncated") private Boolean truncated; @ApiModelProperty( @@ -93,18 +92,31 @@ public class FlinkJobExceptionsDetail implements Serializable { notes = "Exception History", dataType = "Object", example = "Exception History") - @JSONField(name = "exceptionHistory") + @JsonProperty("exceptionHistory") private ExceptionHistory exceptionHistory; + @JsonCreator + public FlinkJobExceptionsDetail( + @JsonProperty("all-exceptions") List allExceptions, + @JsonProperty("root-exception") String rootException, + @JsonProperty("timestamp") Long timestamp, + @JsonProperty("truncated") Boolean truncated, + @JsonProperty("exceptionHistory") ExceptionHistory exceptionHistory) { + this.allExceptions = allExceptions; + this.rootException = rootException; + this.timestamp = timestamp; + this.truncated = truncated; + this.exceptionHistory = exceptionHistory; + } + @ApiModel(value = "ExceptionHistory", description = "Exception History Info") @Builder @Data - @AllArgsConstructor @NoArgsConstructor public static class ExceptionHistory { @ApiModelProperty(value = "Entries", required = true, notes = "Entries", dataType = "List", example = "Entries") - @JSONField(name = "entries") + @JsonProperty("entries") private List entries; @ApiModelProperty( @@ -113,7 +125,14 @@ public static class ExceptionHistory { notes = "Truncated", dataType = "Boolean", example = "Truncated") - @JSONField(name = "truncated") + @JsonProperty("truncated") private Boolean truncated; + + @JsonCreator + public ExceptionHistory( + @JsonProperty("entries") List entries, @JsonProperty("truncated") Boolean truncated) { + this.entries = entries; + this.truncated = truncated; + } } } diff --git a/dinky-common/src/main/java/org/dinky/data/flink/job/FlinkJobDetailInfo.java b/dinky-common/src/main/java/org/dinky/data/flink/job/FlinkJobDetailInfo.java index a4e80e9f89..e1f0b51584 100644 --- a/dinky-common/src/main/java/org/dinky/data/flink/job/FlinkJobDetailInfo.java +++ b/dinky-common/src/main/java/org/dinky/data/flink/job/FlinkJobDetailInfo.java @@ -23,11 +23,11 @@ import java.util.List; import java.util.Map; -import com.alibaba.fastjson2.annotation.JSONField; +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.annotation.JsonProperty; import io.swagger.annotations.ApiModel; import io.swagger.annotations.ApiModelProperty; -import lombok.AllArgsConstructor; import lombok.Builder; import lombok.Data; import lombok.NoArgsConstructor; @@ -177,56 +177,83 @@ @ApiModel(value = "FlinkJobDetailInfo", description = "Flink Job Detail Info") @Builder @Data -@AllArgsConstructor @NoArgsConstructor public class FlinkJobDetailInfo implements Serializable { private static final long serialVersionUID = 1L; @ApiModelProperty(value = "jid", notes = "jid", dataType = "String", example = "1") - @JSONField(name = "jid") + @JsonProperty(value = "jid") private String jid; @ApiModelProperty(value = "name", notes = "name", dataType = "String", example = "1") - @JSONField(name = "name") + @JsonProperty(value = "name") private String name; @ApiModelProperty(value = "state", notes = "state", dataType = "String", example = "1") - @JSONField(name = "state") + @JsonProperty(value = "state") private String state; @ApiModelProperty(value = "start-time", notes = "start-time", dataType = "long", example = "1") - @JSONField(name = "start-time") + @JsonProperty(value = "start-time") private Long startTime; @ApiModelProperty(value = "end-time", notes = "end-time", dataType = "long", example = "1") - @JSONField(name = "end-time") + @JsonProperty(value = "end-time") private Long endTime; @ApiModelProperty(value = "duration", notes = "duration", dataType = "long", example = "1") - @JSONField(name = "duration") + @JsonProperty(value = "duration") private Long duration; @ApiModelProperty(value = "maxParallelism", notes = "maxParallelism", dataType = "long", example = "1") - @JSONField(name = "maxParallelism") + @JsonProperty(value = "maxParallelism") private Long maxParallelism; @ApiModelProperty(value = "now", notes = "now", dataType = "long", example = "1") - @JSONField(name = "now") + @JsonProperty(value = "now") private Long now; @ApiModelProperty(value = "timestamps", notes = "timestamps", dataType = "long", example = "1") - @JSONField(name = "timestamps") + @JsonProperty(value = "timestamps") private Map timestamps; @ApiModelProperty(value = "vertices", notes = "vertices", dataType = "List", example = "1") - @JSONField(name = "vertices") + @JsonProperty(value = "vertices") private List vertices; @ApiModelProperty(value = "status-counts", notes = "status-counts", dataType = "Map", example = "1") - @JSONField(name = "status-counts") + @JsonProperty(value = "status-counts") private Map statusCounts; @ApiModelProperty(value = "plan", notes = "plan", dataType = "FlinkJobPlan", example = "1") - @JSONField(name = "plan") + @JsonProperty(value = "plan") private FlinkJobPlan plan; + + @JsonCreator + public FlinkJobDetailInfo( + @JsonProperty(value = "jid") String jid, + @JsonProperty(value = "name") String name, + @JsonProperty(value = "state") String state, + @JsonProperty(value = "start-time") Long startTime, + @JsonProperty(value = "end-time") Long endTime, + @JsonProperty(value = "duration") Long duration, + @JsonProperty(value = "maxParallelism") Long maxParallelism, + @JsonProperty(value = "now") Long now, + @JsonProperty(value = "timestamps") Map timestamps, + @JsonProperty(value = "vertices") List vertices, + @JsonProperty(value = "status-counts") Map statusCounts, + @JsonProperty(value = "plan") FlinkJobPlan plan) { + this.jid = jid; + this.name = name; + this.state = state; + this.startTime = startTime; + this.endTime = endTime; + this.duration = duration; + this.maxParallelism = maxParallelism; + this.now = now; + this.timestamps = timestamps; + this.vertices = vertices; + this.statusCounts = statusCounts; + this.plan = plan; + } } diff --git a/dinky-common/src/main/java/org/dinky/data/flink/job/FlinkJobPlan.java b/dinky-common/src/main/java/org/dinky/data/flink/job/FlinkJobPlan.java index 3d4bcd0828..c439be6fd5 100644 --- a/dinky-common/src/main/java/org/dinky/data/flink/job/FlinkJobPlan.java +++ b/dinky-common/src/main/java/org/dinky/data/flink/job/FlinkJobPlan.java @@ -22,11 +22,11 @@ import java.io.Serializable; import java.util.List; -import com.alibaba.fastjson2.annotation.JSONField; +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.annotation.JsonProperty; import io.swagger.annotations.ApiModel; import io.swagger.annotations.ApiModelProperty; -import lombok.AllArgsConstructor; import lombok.Builder; import lombok.Data; import lombok.NoArgsConstructor; @@ -34,24 +34,35 @@ @ApiModel(value = "FlinkJobPlan", description = "Flink Job Plan Info") @Builder @Data -@AllArgsConstructor @NoArgsConstructor public class FlinkJobPlan implements Serializable { private static final long serialVersionUID = 1L; @ApiModelProperty(value = "jid", notes = "jid", dataType = "String", example = "1") - @JSONField(name = "jid") + @JsonProperty(value = "jid") private String jid; @ApiModelProperty(value = "name", notes = "name", dataType = "String", example = "1") - @JSONField(name = "name") + @JsonProperty(value = "name") private String name; @ApiModelProperty(value = "type", notes = "type", dataType = "String", example = "1") - @JSONField(name = "type") + @JsonProperty(value = "type") private String type; @ApiModelProperty(value = "nodes", notes = "nodes", dataType = "List") - @JSONField(name = "nodes") + @JsonProperty(value = "nodes") private List nodes; + + @JsonCreator + public FlinkJobPlan( + @JsonProperty(value = "jid") String jid, + @JsonProperty(value = "name") String name, + @JsonProperty(value = "type") String type, + @JsonProperty(value = "nodes") List nodes) { + this.jid = jid; + this.name = name; + this.type = type; + this.nodes = nodes; + } } diff --git a/dinky-common/src/main/java/org/dinky/data/flink/job/FlinkJobPlanNode.java b/dinky-common/src/main/java/org/dinky/data/flink/job/FlinkJobPlanNode.java index 994e06c7ea..85cdb85392 100644 --- a/dinky-common/src/main/java/org/dinky/data/flink/job/FlinkJobPlanNode.java +++ b/dinky-common/src/main/java/org/dinky/data/flink/job/FlinkJobPlanNode.java @@ -25,11 +25,11 @@ import java.io.Serializable; import java.util.List; -import com.alibaba.fastjson2.annotation.JSONField; +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.annotation.JsonProperty; import io.swagger.annotations.ApiModel; import io.swagger.annotations.ApiModelProperty; -import lombok.AllArgsConstructor; import lombok.Builder; import lombok.Data; import lombok.NoArgsConstructor; @@ -56,47 +56,68 @@ @ApiModel(value = "FlinkJobPlanNode", description = "Flink Job Plan Node Info") @Builder @Data -@AllArgsConstructor @NoArgsConstructor public class FlinkJobPlanNode implements Serializable { private static final long serialVersionUID = 1L; @ApiModelProperty(value = "id", notes = "id", dataType = "String", example = "1") - @JSONField(name = "id") + @JsonProperty(value = "id") private String id; @ApiModelProperty(value = "parallelism", notes = "parallelism", dataType = "Integer", example = "1") - @JSONField(name = "parallelism") + @JsonProperty(value = "parallelism") private Integer parallelism; @ApiModelProperty(value = "operator", notes = "operator", dataType = "String", example = "1") - @JSONField(name = "operator") + @JsonProperty(value = "operator") private String operator; @ApiModelProperty(value = "operator_strategy", notes = "operator_strategy", dataType = "String", example = "1") - @JSONField(name = "operator_strategy") + @JsonProperty(value = "operator_strategy") private String operatorStrategy; @ApiModelProperty(value = "description", notes = "description", dataType = "String", example = "1") - @JSONField(name = "description") + @JsonProperty(value = "description") private String description; @ApiModelProperty(value = "inputs", notes = "inputs", dataType = "List") - @JSONField(name = "inputs") + @JsonProperty(value = "inputs") private List inputs; @ApiModelProperty(value = "optimizer_properties", notes = "optimizer_properties", dataType = "Object") - @JSONField(name = "optimizer_properties") + @JsonProperty(value = "optimizer_properties") private Object optimizerProperties; /** * extend field */ @ApiModelProperty(value = "backpressure", notes = "backpressure", dataType = "String", example = "1") - @JSONField(name = "backpressure") + @JsonProperty(value = "backpressure") private FlinkJobNodeBackPressure backpressure; @ApiModelProperty(value = "watermark", notes = "watermark", dataType = "String", example = "1") - @JSONField(name = "watermark") + @JsonProperty(value = "watermark") private List watermark; + + @JsonCreator + public FlinkJobPlanNode( + @JsonProperty(value = "id") String id, + @JsonProperty(value = "parallelism") Integer parallelism, + @JsonProperty(value = "operator") String operator, + @JsonProperty(value = "operator_strategy") String operatorStrategy, + @JsonProperty(value = "description") String description, + @JsonProperty(value = "inputs") List inputs, + @JsonProperty(value = "optimizer_properties") Object optimizerProperties, + @JsonProperty(value = "backpressure") FlinkJobNodeBackPressure backpressure, + @JsonProperty(value = "watermark") List watermark) { + this.id = id; + this.parallelism = parallelism; + this.operator = operator; + this.operatorStrategy = operatorStrategy; + this.description = description; + this.inputs = inputs; + this.optimizerProperties = optimizerProperties; + this.backpressure = backpressure; + this.watermark = watermark; + } } diff --git a/dinky-common/src/main/java/org/dinky/data/flink/job/FlinkJobPlanNodeInput.java b/dinky-common/src/main/java/org/dinky/data/flink/job/FlinkJobPlanNodeInput.java index 71244a0202..949aaca29f 100644 --- a/dinky-common/src/main/java/org/dinky/data/flink/job/FlinkJobPlanNodeInput.java +++ b/dinky-common/src/main/java/org/dinky/data/flink/job/FlinkJobPlanNodeInput.java @@ -21,7 +21,7 @@ import java.io.Serializable; -import com.alibaba.fastjson2.annotation.JSONField; +import com.fasterxml.jackson.annotation.JsonProperty; import io.swagger.annotations.ApiModel; import io.swagger.annotations.ApiModelProperty; @@ -48,18 +48,18 @@ public class FlinkJobPlanNodeInput implements Serializable { private static final long serialVersionUID = 1L; @ApiModelProperty(value = "num", notes = "num", dataType = "Integer", example = "1") - @JSONField(name = "num") + @JsonProperty(value = "num") private Integer num; @ApiModelProperty(value = "id", notes = "id", dataType = "String", example = "1") - @JSONField(name = "id") + @JsonProperty(value = "id") private String id; @ApiModelProperty(value = "ship_strategy", notes = "ship_strategy", dataType = "String", example = "1") - @JSONField(name = "ship_strategy") + @JsonProperty(value = "ship_strategy") private String shipStrategy; @ApiModelProperty(value = "exchange", notes = "exchange", dataType = "String", example = "1") - @JSONField(name = "exchange") + @JsonProperty(value = "exchange") private String exchange; } diff --git a/dinky-common/src/main/java/org/dinky/data/flink/job/FlinkJobVertex.java b/dinky-common/src/main/java/org/dinky/data/flink/job/FlinkJobVertex.java index c9469674a2..d606597cc8 100644 --- a/dinky-common/src/main/java/org/dinky/data/flink/job/FlinkJobVertex.java +++ b/dinky-common/src/main/java/org/dinky/data/flink/job/FlinkJobVertex.java @@ -22,11 +22,11 @@ import java.io.Serializable; import java.util.Map; -import com.alibaba.fastjson2.annotation.JSONField; +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.annotation.JsonProperty; import io.swagger.annotations.ApiModel; import io.swagger.annotations.ApiModelProperty; -import lombok.AllArgsConstructor; import lombok.Builder; import lombok.Data; import lombok.NoArgsConstructor; @@ -34,48 +34,71 @@ @ApiModel(value = "FlinkJobVertex", description = "Flink Job Vertex Info") @Builder @Data -@AllArgsConstructor @NoArgsConstructor public class FlinkJobVertex implements Serializable { private static final long serialVersionUID = 1L; @ApiModelProperty(value = "Vertex ID", notes = "Vertex ID") - @JSONField(name = "id") + @JsonProperty(value = "id") private String id; @ApiModelProperty(value = "Vertex Name", notes = "Vertex Name") - @JSONField(name = "name") + @JsonProperty(value = "name") private String name; @ApiModelProperty(value = "Vertex Max Parallelism", notes = "Vertex Max Parallelism") - @JSONField(name = "maxParallelism") + @JsonProperty(value = "maxParallelism") private Integer maxParallelism; @ApiModelProperty(value = "Vertex Parallelism", notes = "Vertex Parallelism") - @JSONField(name = "parallelism") + @JsonProperty(value = "parallelism") private Integer parallelism; @ApiModelProperty(value = "Vertex Status", notes = "Vertex Status") - @JSONField(name = "status") + @JsonProperty(value = "status") private String status; @ApiModelProperty(value = "Vertex Start Time", notes = "Vertex Start Time") - @JSONField(name = "start-time") + @JsonProperty(value = "start-time") private Long startTime; @ApiModelProperty(value = "Vertex End Time", notes = "Vertex End Time") - @JSONField(name = "end-time") + @JsonProperty(value = "end-time") private Long endTime; @ApiModelProperty(value = "Vertex Duration", notes = "Vertex Duration") - @JSONField(name = "duration") + @JsonProperty(value = "duration") private Long duration; @ApiModelProperty(value = "Vertex Tasks", notes = "Vertex Tasks") - @JSONField(name = "tasks") + @JsonProperty(value = "tasks") private Map tasks; @ApiModelProperty(value = "Vertex Metrics", notes = "Vertex Metrics") - @JSONField(name = "metrics") + @JsonProperty(value = "metrics") private Map metrics; + + @JsonCreator + public FlinkJobVertex( + @JsonProperty(value = "id") String id, + @JsonProperty(value = "name") String name, + @JsonProperty(value = "maxParallelism") Integer maxParallelism, + @JsonProperty(value = "parallelism") Integer parallelism, + @JsonProperty(value = "status") String status, + @JsonProperty(value = "start-time") Long startTime, + @JsonProperty(value = "end-time") Long endTime, + @JsonProperty(value = "duration") Long duration, + @JsonProperty(value = "tasks") Map tasks, + @JsonProperty(value = "metrics") Map metrics) { + this.id = id; + this.name = name; + this.maxParallelism = maxParallelism; + this.parallelism = parallelism; + this.status = status; + this.startTime = startTime; + this.endTime = endTime; + this.duration = duration; + this.tasks = tasks; + this.metrics = metrics; + } } diff --git a/dinky-common/src/main/java/org/dinky/data/flink/watermark/FlinkJobNodeWaterMark.java b/dinky-common/src/main/java/org/dinky/data/flink/watermark/FlinkJobNodeWaterMark.java index be03e7c263..abf1d3642c 100644 --- a/dinky-common/src/main/java/org/dinky/data/flink/watermark/FlinkJobNodeWaterMark.java +++ b/dinky-common/src/main/java/org/dinky/data/flink/watermark/FlinkJobNodeWaterMark.java @@ -21,11 +21,11 @@ import java.io.Serializable; -import com.alibaba.fastjson2.annotation.JSONField; +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.annotation.JsonProperty; import io.swagger.annotations.ApiModel; import io.swagger.annotations.ApiModelProperty; -import lombok.AllArgsConstructor; import lombok.Builder; import lombok.Data; import lombok.NoArgsConstructor; @@ -42,16 +42,21 @@ @ApiModel(value = "FlinkJobNodeWaterMark", description = "Flink Job Node WaterMark Info") @Builder @Data -@AllArgsConstructor @NoArgsConstructor public class FlinkJobNodeWaterMark implements Serializable { private static final long serialVersionUID = 1L; @ApiModelProperty(value = "Id", required = true, notes = "Id", dataType = "String", example = "1") - @JSONField(name = "id") + @JsonProperty(value = "id") private String id; @ApiModelProperty(value = "Value", required = true, notes = "Value", dataType = "String", example = "1") - @JSONField(name = "value") + @JsonProperty(value = "value") private String value; + + @JsonCreator + public FlinkJobNodeWaterMark(@JsonProperty(value = "id") String id, @JsonProperty(value = "value") String value) { + this.id = id; + this.value = value; + } } diff --git a/dinky-web/config/proxy.ts b/dinky-web/config/proxy.ts index 96bf7cffd9..d6c1b07c3d 100644 --- a/dinky-web/config/proxy.ts +++ b/dinky-web/config/proxy.ts @@ -40,7 +40,18 @@ export default { changeOrigin: true, logLevel: 'debug', pathRewrite: { '^': '' }, - onProxyRes: (proxyRes, req, res) => { + onProxyRes: ( + proxyRes: any, + req: any, + res: { + header: (arg0: { + 'Content-Type': string; + 'Cache-Control': string; + Connection: string; + 'X-Accel-Buffering': string; + }) => void; + } + ) => { res.header({ 'Content-Type': 'text/event-stream', 'Cache-Control': 'no-cache, no-transform', diff --git a/dinky-web/src/global.less b/dinky-web/src/global.less index 4ee8342362..e216d1849d 100644 --- a/dinky-web/src/global.less +++ b/dinky-web/src/global.less @@ -280,6 +280,7 @@ ol { display: flex; align-items: center; justify-content: space-between; + height: 100%; .float-button { position: absolute; @@ -398,10 +399,6 @@ h5 { scrollbar-width: thin; } -.monaco-float { - height: 100%; -} - .data-studio-tabs { .ant-tabs-nav { .ant-tabs-nav-wrap { @@ -581,3 +578,26 @@ h5 { background: #0366d6; } // ==================== lineage style end ==================== + +.card-list-item-wrapper { + display: flex; +} +.card-list-item { + width: 100%; + margin: 0; +} +.card-list-item { + transition: 0.3s; +} + +.card-button-list { + display: none; +} + +.card-list-item:hover .card-button-list { + display: block; +} + +.card-list-item:hover { + box-shadow: 0 4px 8px rgba(0, 0, 0, 0.1); +} diff --git a/dinky-web/src/locales/en-US/pages.ts b/dinky-web/src/locales/en-US/pages.ts index bcf5033482..1704553d0d 100644 --- a/dinky-web/src/locales/en-US/pages.ts +++ b/dinky-web/src/locales/en-US/pages.ts @@ -646,6 +646,7 @@ export default { 'rc.ci.alias': 'Alias', 'rc.ci.aliasPlaceholder': 'Please enter an alias!', 'rc.ci.ar': 'Auto Registration', + 'rc.ci.mr': 'Manual Registration', 'rc.ci.create': 'Create Cluster Instance', 'rc.ci.deleteConfirm': 'Are you sure to delete this Flink Cluster instance? ', 'rc.ci.heartbeat': 'Heartbeat Detection', @@ -667,6 +668,8 @@ export default { 'rc.ci.type': 'Type', 'rc.ci.typePlaceholder': 'Please select a cluster type!', 'rc.ci.version': 'Version', + 'rc.ci.desc': 'Note', + 'rc.ci.search': 'Search Name/Alias/Note', 'rc.doc.category': 'Document Type', 'rc.doc.categoryPlaceholder': 'Please select the type of this Document!', 'rc.doc.create': 'Create Document', @@ -831,7 +834,7 @@ export default { 'role.EnterRoleCode': 'Please enter Role Code!', 'role.EnterRoleName': 'Please enter Role Name!', 'role.assign': 'Assign Permissions', - 'role.assignMenu': 'Role', + 'role.assignMenu': 'Role: {roleName}, Assign Menu', 'role.belongTenant': 'Belong Tenant', 'role.create': 'Create Role', 'role.delete': 'Delete Role', diff --git a/dinky-web/src/locales/zh-CN/pages.ts b/dinky-web/src/locales/zh-CN/pages.ts index 9da8732a37..ec011318af 100644 --- a/dinky-web/src/locales/zh-CN/pages.ts +++ b/dinky-web/src/locales/zh-CN/pages.ts @@ -613,6 +613,7 @@ export default { 'rc.ci.alias': '实例别名', 'rc.ci.aliasPlaceholder': '请输入别名!', 'rc.ci.ar': '自动注册', + 'rc.ci.mr': '手动注册', 'rc.ci.create': '创建集群', 'rc.ci.deleteConfirm': '确定删除该 Flink 实例吗?', 'rc.ci.heartbeat': '心跳检测', @@ -633,6 +634,8 @@ export default { 'rc.ci.type': '类型', 'rc.ci.typePlaceholder': '请选择集群类型!', 'rc.ci.version': '版本', + 'rc.ci.desc': '描述', + 'rc.ci.search': '搜索 名称/别名/备注', 'rc.doc.category': '文档类型', 'rc.doc.categoryPlaceholder': '请选择该文档所属类型!', 'rc.doc.create': '创建文档', @@ -792,7 +795,7 @@ export default { 'role.EnterRoleCode': '请输入角色编码!', 'role.EnterRoleName': '请输入角色名称!', 'role.assign': '分配权限', - 'role.assignMenu': '角色', + 'role.assignMenu': '角色: {roleName} 分配菜单', 'role.belongTenant': '所属租户', 'role.create': '创建角色', 'role.delete': '删除角色', diff --git a/dinky-web/src/pages/AuthCenter/Role/components/RoleProTable/index.tsx b/dinky-web/src/pages/AuthCenter/Role/components/RoleProTable/index.tsx index 252d56177d..8a34879975 100644 --- a/dinky-web/src/pages/AuthCenter/Role/components/RoleProTable/index.tsx +++ b/dinky-web/src/pages/AuthCenter/Role/components/RoleProTable/index.tsx @@ -187,7 +187,8 @@ const RoleProTable: React.FC = () => { { title: l('global.table.operate'), valueType: 'option', - width: '10vh', + width: '10%', + fixed: 'right', render: (_: any, record: UserBaseInfo.Role) => [ handleEditVisible(record)} /> diff --git a/dinky-web/src/pages/AuthCenter/RowPermissions/components/PermissionsProTable/index.tsx b/dinky-web/src/pages/AuthCenter/RowPermissions/components/PermissionsProTable/index.tsx index 6323ba6bbc..7fc7ad01f6 100644 --- a/dinky-web/src/pages/AuthCenter/RowPermissions/components/PermissionsProTable/index.tsx +++ b/dinky-web/src/pages/AuthCenter/RowPermissions/components/PermissionsProTable/index.tsx @@ -141,7 +141,8 @@ const PermissionsProTable: React.FC = () => { title: l('global.table.operate'), dataIndex: 'option', valueType: 'option', - width: '10vh', + width: '10%', + fixed: 'right', render: (_: any, record: RowPermissions) => [ handleEditVisible(record)} /> diff --git a/dinky-web/src/pages/AuthCenter/Tenant/components/TenantProTable/index.tsx b/dinky-web/src/pages/AuthCenter/Tenant/components/TenantProTable/index.tsx index 5c24896de3..de45d65639 100644 --- a/dinky-web/src/pages/AuthCenter/Tenant/components/TenantProTable/index.tsx +++ b/dinky-web/src/pages/AuthCenter/Tenant/components/TenantProTable/index.tsx @@ -178,7 +178,8 @@ const TenantProTable: React.FC = () => { { title: l('global.table.operate'), valueType: 'option', - width: '10vh', + width: '10%', + fixed: 'right', render: (_: any, record: UserBaseInfo.Tenant) => [ handleEditVisible(record)} /> diff --git a/dinky-web/src/pages/AuthCenter/Tenant/components/TenantUserList/index.tsx b/dinky-web/src/pages/AuthCenter/Tenant/components/TenantUserList/index.tsx index 4fd9f55fb7..60c2a46b8c 100644 --- a/dinky-web/src/pages/AuthCenter/Tenant/components/TenantUserList/index.tsx +++ b/dinky-web/src/pages/AuthCenter/Tenant/components/TenantUserList/index.tsx @@ -75,7 +75,7 @@ const TenantUserList: React.FC = (props) => { { title: l('global.table.operate'), valueType: 'option', - width: '12vh', + width: '10%', fixed: 'right', render: (_: any, record: UserBaseInfo.User) => [ diff --git a/dinky-web/src/pages/AuthCenter/Token/component/TokenList/index.tsx b/dinky-web/src/pages/AuthCenter/Token/component/TokenList/index.tsx index cf8bc173c0..ba73539564 100644 --- a/dinky-web/src/pages/AuthCenter/Token/component/TokenList/index.tsx +++ b/dinky-web/src/pages/AuthCenter/Token/component/TokenList/index.tsx @@ -110,7 +110,7 @@ const TokenList = (props: any) => { { title: l('global.table.operate'), valueType: 'option', - width: '5vw', + width: '10%', fixed: 'right', render: (_, record: SysToken) => [ diff --git a/dinky-web/src/pages/AuthCenter/User/components/UserProTable/index.tsx b/dinky-web/src/pages/AuthCenter/User/components/UserProTable/index.tsx index 8090f43d5f..cfd8e7d127 100644 --- a/dinky-web/src/pages/AuthCenter/User/components/UserProTable/index.tsx +++ b/dinky-web/src/pages/AuthCenter/User/components/UserProTable/index.tsx @@ -245,7 +245,7 @@ const UserProTable = () => { { title: l('global.table.operate'), valueType: 'option', - width: '10vw', + width: '12%', fixed: 'right', render: (_: any, record: UserBaseInfo.User) => [ diff --git a/dinky-web/src/pages/DataStudio/RightContainer/JobConfig/function.tsx b/dinky-web/src/pages/DataStudio/RightContainer/JobConfig/function.tsx index 1e5f2830fa..fbd5d479e5 100644 --- a/dinky-web/src/pages/DataStudio/RightContainer/JobConfig/function.tsx +++ b/dinky-web/src/pages/DataStudio/RightContainer/JobConfig/function.tsx @@ -121,8 +121,8 @@ export const buildEnvOptions = (env: any[]) => { const envList: DefaultOptionType[] = [ { label: l('button.disable'), - value: 0, - key: 0 + value: -1, + key: -1 } ]; @@ -151,7 +151,7 @@ export const buildAlertGroupOptions = (alertGroups: Alert.AlertGroup[]) => { const alertGroupOptions: DefaultOptionType[] = [ { label: l('button.disable'), - value: 0 + value: -1 } ]; for (const item of alertGroups) { diff --git a/dinky-web/src/pages/DevOps/JobDetail/CheckPointsTab/components/CkDesc.tsx b/dinky-web/src/pages/DevOps/JobDetail/CheckPointsTab/components/CkDesc.tsx index c6fe465195..7bf8ca91ab 100644 --- a/dinky-web/src/pages/DevOps/JobDetail/CheckPointsTab/components/CkDesc.tsx +++ b/dinky-web/src/pages/DevOps/JobDetail/CheckPointsTab/components/CkDesc.tsx @@ -67,7 +67,7 @@ const CkDesc = (props: JobProps) => { - id: {latest?.failed.id ?? 'None'} + id: {latest?.failed?.id ?? 'None'} {/**/} {/* {'Cause: ' + latest?.failed?.failure_message ?? 'None'}*/} diff --git a/dinky-web/src/pages/RegCenter/Cluster/Configuration/components/contants.tsx b/dinky-web/src/pages/RegCenter/Cluster/Configuration/components/contants.tsx index e58509e483..a9e32a0b69 100644 --- a/dinky-web/src/pages/RegCenter/Cluster/Configuration/components/contants.tsx +++ b/dinky-web/src/pages/RegCenter/Cluster/Configuration/components/contants.tsx @@ -38,6 +38,21 @@ export const CLUSTER_CONFIG_TYPE: DefaultOptionType[] = [ value: ClusterType.KUBERNETES_OPERATOR, label: 'Kubernetes Operator', key: ClusterType.KUBERNETES_OPERATOR + }, + { + value: ClusterType.STANDALONE, + label: 'Standalone', + key: ClusterType.STANDALONE + }, + { + value: ClusterType.YARN_SESSION, + label: 'Yarn Session', + key: ClusterType.YARN_SESSION + }, + { + value: ClusterType.KUBERNETES_SESSION, + label: 'Kubernetes Session', + key: ClusterType.KUBERNETES_SESSION } ]; diff --git a/dinky-web/src/pages/RegCenter/Cluster/Instance/components/InstanceList/index.tsx b/dinky-web/src/pages/RegCenter/Cluster/Instance/components/InstanceList/index.tsx index 72c840d769..28747ac3fe 100644 --- a/dinky-web/src/pages/RegCenter/Cluster/Instance/components/InstanceList/index.tsx +++ b/dinky-web/src/pages/RegCenter/Cluster/Instance/components/InstanceList/index.tsx @@ -22,28 +22,47 @@ import { EditBtn } from '@/components/CallBackButton/EditBtn'; import { EnableSwitchBtn } from '@/components/CallBackButton/EnableSwitchBtn'; import { PopconfirmDeleteBtn } from '@/components/CallBackButton/PopconfirmDeleteBtn'; import { Authorized, HasAuthority } from '@/hooks/useAccess'; -import { CLUSTER_INSTANCE_STATUS_ENUM } from '@/pages/RegCenter/Cluster/Instance/components/contants'; +import { CLUSTER_CONFIG_TYPE } from '@/pages/RegCenter/Cluster/Configuration/components/contants'; import { renderWebUiRedirect } from '@/pages/RegCenter/Cluster/Instance/components/function'; import InstanceModal from '@/pages/RegCenter/Cluster/Instance/components/InstanceModal'; -import { queryList } from '@/services/api'; import { handleAddOrUpdate, handleOption, handleRemoveById, + queryDataByParams, updateDataByParam } from '@/services/BusinessCrud'; -import { PROTABLE_OPTIONS_PUBLIC, STATUS_ENUM, STATUS_MAPPING } from '@/services/constants'; +import { PROTABLE_OPTIONS_PUBLIC, PRO_LIST_CARD_OPTIONS } from '@/services/constants'; import { API_CONSTANTS } from '@/services/endpoints'; -import { YES_OR_NO_ENUM } from '@/types/Public/constants'; import { Cluster } from '@/types/RegCenter/data.d'; import { InitClusterInstanceState } from '@/types/RegCenter/init.d'; import { ClusterInstanceState } from '@/types/RegCenter/state.d'; import { l } from '@/utils/intl'; -import { ClearOutlined, HeartTwoTone } from '@ant-design/icons'; -import { ActionType, ProTable } from '@ant-design/pro-components'; -import { ProColumns } from '@ant-design/pro-table'; -import { Button, Popconfirm } from 'antd'; -import { useRef, useState } from 'react'; +import { + CheckCircleOutlined, + ClearOutlined, + ExclamationCircleOutlined, + HeartTwoTone +} from '@ant-design/icons'; +import { ActionType, ProList } from '@ant-design/pro-components'; +import { + Badge, + Button, + Card, + Col, + Descriptions, + Divider, + Input, + List, + Popconfirm, + Row, + Space, + Tag, + Tooltip, + Typography +} from 'antd'; +import { useEffect, useRef, useState } from 'react'; +const { Text, Paragraph, Link } = Typography; export default () => { /** @@ -53,6 +72,14 @@ export default () => { useState(InitClusterInstanceState); const actionRef = useRef(); + const queryClusterInstanceList = async (keyword = '') => { + queryDataByParams(API_CONSTANTS.CLUSTER_INSTANCE_LIST, { keyword }).then((res) => { + setClusterInstanceStatus((prevState) => ({ + ...prevState, + instanceList: res as Cluster.Instance[] + })); + }); + }; /** * execute and callback function * @param {() => void} callback @@ -61,10 +88,15 @@ export default () => { const executeAndCallback = async (callback: () => void) => { setClusterInstanceStatus((prevState) => ({ ...prevState, loading: true })); await callback(); + queryClusterInstanceList(); setClusterInstanceStatus((prevState) => ({ ...prevState, loading: false })); actionRef.current?.reload?.(); }; + useEffect(() => { + queryClusterInstanceList(); + }, []); + /** * cancel */ @@ -139,125 +171,114 @@ export default () => { }; /** - * columns + * tool bar render */ - const columns: ProColumns[] = [ - { - title: l('rc.ci.name'), - dataIndex: 'name', - ellipsis: true - }, - { - title: l('rc.ci.alias'), - dataIndex: 'alias', - ellipsis: true - }, - { - title: l('rc.ci.type'), - dataIndex: 'type', - hideInSearch: true, - width: '8%' - }, - { - title: l('rc.ci.jma'), - dataIndex: 'jobManagerHost', - copyable: true, - hideInSearch: true - }, - { - title: l('rc.ci.ar'), - dataIndex: 'autoRegisters', - hideInSearch: true, - width: '8%', - valueEnum: YES_OR_NO_ENUM - }, - { - title: l('rc.ci.version'), - dataIndex: 'version', - hideInSearch: true, - width: '5%' - }, - { - title: l('rc.ci.status'), - dataIndex: 'status', - hideInSearch: true, - width: '8%', - valueEnum: CLUSTER_INSTANCE_STATUS_ENUM - }, - { - title: l('global.table.note'), - dataIndex: 'note', - width: '5%', - ellipsis: true - }, - { - title: l('global.table.isEnable'), - dataIndex: 'enabled', - width: '6%', - hideInSearch: true, - filters: STATUS_MAPPING(), - filterMultiple: false, - valueEnum: STATUS_ENUM(), - render: (_: any, record: Cluster.Instance) => { - return ( - handleChangeEnable(record)} - /> - ); - } - }, - { - title: l('global.table.createTime'), - dataIndex: 'createTime', - hideInSearch: true, - hideInTable: true - }, - { - title: l('global.table.updateTime'), - dataIndex: 'updateTime', - hideInSearch: true, - hideInTable: true - }, - { - title: l('global.table.operate'), - hideInSearch: true, - valueType: 'option', - width: '8vw', - render: (_: any, record: Cluster.Instance) => [ - - handleEdit(record)} /> - , - - handleDelete(record.id)} - description={l('rc.ci.deleteConfirm')} - /> - , - renderWebUiRedirect(record) - ] - } - ]; + const renderActionButton = (record: Cluster.Instance) => ( + +
+ + handleEdit(record)} /> + + + handleDelete(record.id)} + description={l('rc.ci.deleteConfirm')} + /> + + {renderWebUiRedirect(record)} +
+ ); + + /** + * render content + * @param item + */ + const renderDataContent = (item: Cluster.Instance) => { + return ( + <> + + + +
+ {l('rc.ci.jma')}: {item.jobManagerHost} +
+
+ {l('rc.ci.version')}: {item.version} +
+ + {(item.alias || item.alias === '') && ( +
+ {l('rc.ci.alias')}: {item.alias} +
+ )} +
+
+ + + handleChangeEnable(item)} + disabled={!HasAuthority('/registration/cluster/instance/edit')} + /> + + {CLUSTER_CONFIG_TYPE.find((record) => item.type === record.value)?.label} + + : } + color={item.status === 1 ? 'success' : 'warning'} + > + {item.status === 1 + ? l('global.table.status.normal') + : l('global.table.status.abnormal')} + + + + + + {renderActionButton(item)} + +
+ + ); + }; + + /** + * render sub title + * @param item + */ + const renderTitle = (item: Cluster.Instance) => { + return ( + + + + {item.name} + + + + ); + }; /** * tool bar render */ const toolBarRender = () => [ - + queryClusterInstanceList(value)} + />, + setClusterInstanceStatus((prevState) => ({ ...prevState, addedOpen: true }))} /> , - + , - + { ]; + const renderListItem = (item: Cluster.Instance) => { + return ( + + + + + + + + ); + }; + /** * render */ return ( <> - + headerTitle={l('rc.ci.management')} + toolBarRender={toolBarRender} {...PROTABLE_OPTIONS_PUBLIC} - columns={columns} + {...(PRO_LIST_CARD_OPTIONS as any)} + grid={{ gutter: 24, column: 4 }} + pagination={{ size: 'small', defaultPageSize: 12, hideOnSinglePage: true }} actionRef={actionRef} + dataSource={clusterInstanceStatus.instanceList} loading={clusterInstanceStatus.loading} - toolBarRender={toolBarRender} - request={(params, sorter, filter: any) => - queryList(API_CONSTANTS.CLUSTER_INSTANCE, { - ...params, - sorter, - filter - }) - } + itemLayout={'vertical'} + renderItem={renderListItem} /> + {/*added*/} = (props) => { - +
{!excludeFormItem && ( { { title: l('global.table.operate'), valueType: 'option', - width: '10vh', + width: '10%', + fixed: 'right', hideInDescriptions: true, render: (_, record) => [ diff --git a/dinky-web/src/pages/RegCenter/GitProject/components/ProjectProTable/index.tsx b/dinky-web/src/pages/RegCenter/GitProject/components/ProjectProTable/index.tsx index cb1c9fedd1..710511a4d6 100644 --- a/dinky-web/src/pages/RegCenter/GitProject/components/ProjectProTable/index.tsx +++ b/dinky-web/src/pages/RegCenter/GitProject/components/ProjectProTable/index.tsx @@ -272,7 +272,8 @@ const ProjectProTable: React.FC = () => { { title: l('global.table.operate'), valueType: 'option', - width: '10vw', + width: '10%', + fixed: 'right', render: (text: any, record: GitProject) => [ { }, { title: l('global.table.operate'), - width: '10vh', + width: '8%', + fixed: 'right', valueType: 'option', hideInDescriptions: true, render: (_, record) => [ diff --git a/dinky-web/src/pages/RegCenter/UDF/components/TemplateTable/index.tsx b/dinky-web/src/pages/RegCenter/UDF/components/TemplateTable/index.tsx index 53d98d2f31..b7f839bbf7 100644 --- a/dinky-web/src/pages/RegCenter/UDF/components/TemplateTable/index.tsx +++ b/dinky-web/src/pages/RegCenter/UDF/components/TemplateTable/index.tsx @@ -191,7 +191,8 @@ const TemplateTable: React.FC = () => { }, { title: l('global.table.operate'), - width: '10vh', + width: '8%', + fixed: 'right', hideInSearch: true, hideInDescriptions: true, render: (text: any, record: UDFTemplate) => [ @@ -230,7 +231,7 @@ const TemplateTable: React.FC = () => { columns={columns} /> {/* added */} - {templateState.editOpen && ( + {templateState.addedOpen && ( ; + instanceList: Cluster.Instance[]; } /** diff --git a/script/sql/dinky-mysql.sql b/script/sql/dinky-mysql.sql index 3e9f8716dc..36d39c3b44 100644 --- a/script/sql/dinky-mysql.sql +++ b/script/sql/dinky-mysql.sql @@ -1371,17 +1371,17 @@ BEGIN; INSERT INTO dinky_alert_rules (id, name, rule, template_id, rule_type, trigger_conditions, description, enabled, create_time, update_time) VALUES (3, 'alert.rule.jobFail', '[{"ruleKey":"jobInstance.status","ruleOperator":"EQ","ruleValue":"\'FAILED\'","rulePriority":"1"}]', 1, 'SYSTEM', ' or ', '', 1, '1970-01-01 00:00:00', '2023-09-04 23:03:02'); INSERT INTO dinky_alert_rules (id, name, rule, template_id, rule_type, trigger_conditions, description, enabled, create_time, update_time) VALUES (4, 'alert.rule.getJobInfoFail', '[{"ruleKey":"jobInstance.status","ruleOperator":"EQ","ruleValue":"\'UNKNOWN\'","rulePriority":"1"}]', 1, 'SYSTEM', ' or ', '', 1, '1970-01-01 00:00:00', '2023-09-05 18:03:43'); INSERT INTO dinky_alert_rules (id, name, rule, template_id, rule_type, trigger_conditions, description, enabled, create_time, update_time) VALUES (5, 'alert.rule.jobRestart', '[{"ruleKey":"jobInstance.status","ruleOperator":"EQ","ruleValue":"\'RESTARTING\'","rulePriority":"1"}]', 1, 'SYSTEM', ' or ', '', 1, '1970-01-01 00:00:00', '2023-09-06 21:35:12'); -INSERT INTO dinky_alert_rules (id, name, rule, template_id, rule_type, trigger_conditions, description, enabled, create_time, update_time) VALUES (6, 'alert.rule.checkpointFail', '[{"ruleKey":"checkPoints.checkFailed(#key,#checkPoints)","ruleOperator":"EQ","ruleValue":"true"}]', 1, 'SYSTEM', ' or ', '', 1, '1970-01-01 00:00:00', '2023-09-06 21:49:03'); +INSERT INTO dinky_alert_rules (id, name, rule, template_id, rule_type, trigger_conditions, description, enabled, create_time, update_time) VALUES (6, 'alert.rule.checkpointFail', '[{"ruleKey":"checkpointRule.checkFailed(#key,#checkPoints)","ruleOperator":"EQ","ruleValue":"true"}]', 1, 'SYSTEM', ' or ', '', 1, '1970-01-01 00:00:00', '2023-09-06 21:49:03'); INSERT INTO dinky_alert_rules (id, name, rule, template_id, rule_type, trigger_conditions, description, enabled, create_time, update_time) VALUES (7, 'alert.rule.jobRunException', '[{"ruleKey":"exceptionRule.isException(#key,#exceptions)","ruleOperator":"EQ","ruleValue":"true"}]', 1, 'SYSTEM', ' or ', '', 1, '1970-01-01 00:00:00', '2023-09-06 21:50:12'); -INSERT INTO dinky_alert_rules (id, name, rule, template_id, rule_type, trigger_conditions, description, enabled, create_time, update_time) VALUES (8, 'alert.rule.checkpointTimeout', '[{"ruleKey":"checkPoints.checkpointTime(#key,#checkPoints)","ruleOperator":"GE","ruleValue":"1000"}]', 1, 'CUSTOM', ' or ', '', 1, '1970-01-01 00:00:00', '2023-09-06 22:23:35'); +INSERT INTO dinky_alert_rules (id, name, rule, template_id, rule_type, trigger_conditions, description, enabled, create_time, update_time) VALUES (8, 'alert.rule.checkpointTimeout', '[{"ruleKey":"checkpointRule.checkpointTime(#key,#checkPoints)","ruleOperator":"GE","ruleValue":"1000"}]', 1, 'CUSTOM', ' or ', '', 1, '1970-01-01 00:00:00', '2023-09-06 22:23:35'); -INSERT INTO dinky_alert_template (id, name, template_content, enabled, create_time, update_time) VALUES (1, 'Default', ' +INSERT INTO dinky_alert_template VALUES (1, 'Default', ' - **Job Name :** ${task.name} - **Job Status :** ${jobInstance.status} - **Alert Time :** ${time} - **Start Time :** ${startTime} - **End Time :** ${endTime} -- **${exceptions.get("root-exception").toString()?substring(0,20)}** +- **${exceptions.rootException.substring(0,20)}** [Go toTask Web](http://${taskUrl}) ', 1, null, null); diff --git a/script/sql/dinky-pg.sql b/script/sql/dinky-pg.sql index 50967d91c8..add1807e74 100644 --- a/script/sql/dinky-pg.sql +++ b/script/sql/dinky-pg.sql @@ -2424,20 +2424,20 @@ comment on column public.dinky_alert_rules.update_time is 'update time'; -- ---------------------------- -- Records of dinky_alert_rule -- ---------------------------- -INSERT INTO public.dinky_alert_rules VALUES (3, 'alert.rule.jobFail', '[{"ruleKey":"jobInstance.status","ruleOperator":"EQ","ruleValue":"\'FAILED\'","rulePriority":"1"}]', 1, 'SYSTEM', ' or ', '', 1, '1970-01-01 00:00:00', '2023-09-04 23:03:02'); -INSERT INTO public.dinky_alert_rules VALUES (4, 'alert.rule.getJobInfoFail', '[{"ruleKey":"jobInstance.status","ruleOperator":"EQ","ruleValue":"\'UNKNOWN\'","rulePriority":"1"}]', 1, 'SYSTEM', ' or ', '', 1, '1970-01-01 00:00:00', '2023-09-05 18:03:43'); -INSERT INTO public.dinky_alert_rules VALUES (5, 'alert.rule.jobRestart', '[{"ruleKey":"jobInstance.status","ruleOperator":"EQ","ruleValue":"\'RESTARTING\'","rulePriority":"1"}]', 1, 'SYSTEM', ' or ', '', 1, '1970-01-01 00:00:00', '2023-09-06 21:35:12'); -INSERT INTO public.dinky_alert_rules VALUES (6, 'alert.rule.checkpointFail', '[{"ruleKey":"checkPoints.checkFailed(#key,#checkPoints)","ruleOperator":"EQ","ruleValue":"true"}]', 1, 'SYSTEM', ' or ', '', 1, '1970-01-01 00:00:00', '2023-09-06 21:49:03'); -INSERT INTO public.dinky_alert_rules VALUES (7, 'alert.rule.jobRunException', '[{"ruleKey":"exceptionRule.isException(#key,#exceptions)","ruleOperator":"EQ","ruleValue":"true"}]', 1, 'SYSTEM', ' or ', '', 1, '1970-01-01 00:00:00', '2023-09-06 21:50:12'); -INSERT INTO public.dinky_alert_rules VALUES (8, 'alert.rule.checkpointTimeout', '[{"ruleKey":"checkPoints.checkpointTime(#key,#checkPoints)","ruleOperator":"GE","ruleValue":"1000"}]', 1, 'CUSTOM', ' or ', '', 1, '1970-01-01 00:00:00', '2023-09-06 22:23:35'); - -INSERT INTO dinky_alert_template VALUES (1, 'Default', ' +INSERT INTO public.dinky_alert_rules (id, name, rule, template_id, rule_type, trigger_conditions, description, enabled, create_time, update_time) VALUES (3, 'alert.rule.jobFail', '[{"ruleKey":"jobInstance.status","ruleOperator":"EQ","ruleValue":"\'FAILED\'","rulePriority":"1"}]', 1, 'SYSTEM', ' or ', '', 1, '1970-01-01 00:00:00', '2023-09-04 23:03:02'); +INSERT INTO public.dinky_alert_rules (id, name, rule, template_id, rule_type, trigger_conditions, description, enabled, create_time, update_time) VALUES (4, 'alert.rule.getJobInfoFail', '[{"ruleKey":"jobInstance.status","ruleOperator":"EQ","ruleValue":"\'UNKNOWN\'","rulePriority":"1"}]', 1, 'SYSTEM', ' or ', '', 1, '1970-01-01 00:00:00', '2023-09-05 18:03:43'); +INSERT INTO public.dinky_alert_rules (id, name, rule, template_id, rule_type, trigger_conditions, description, enabled, create_time, update_time) VALUES (5, 'alert.rule.jobRestart', '[{"ruleKey":"jobInstance.status","ruleOperator":"EQ","ruleValue":"\'RESTARTING\'","rulePriority":"1"}]', 1, 'SYSTEM', ' or ', '', 1, '1970-01-01 00:00:00', '2023-09-06 21:35:12'); +INSERT INTO public.dinky_alert_rules (id, name, rule, template_id, rule_type, trigger_conditions, description, enabled, create_time, update_time) VALUES (6, 'alert.rule.checkpointFail', '[{"ruleKey":"checkpointRule.checkFailed(#key,#checkPoints)","ruleOperator":"EQ","ruleValue":"true"}]', 1, 'SYSTEM', ' or ', '', 1, '1970-01-01 00:00:00', '2023-09-06 21:49:03'); +INSERT INTO public.dinky_alert_rules (id, name, rule, template_id, rule_type, trigger_conditions, description, enabled, create_time, update_time) VALUES (7, 'alert.rule.jobRunException', '[{"ruleKey":"exceptionRule.isException(#key,#exceptions)","ruleOperator":"EQ","ruleValue":"true"}]', 1, 'SYSTEM', ' or ', '', 1, '1970-01-01 00:00:00', '2023-09-06 21:50:12'); +INSERT INTO public.dinky_alert_rules (id, name, rule, template_id, rule_type, trigger_conditions, description, enabled, create_time, update_time) VALUES (8, 'alert.rule.checkpointTimeout', '[{"ruleKey":"checkpointRule.checkpointTime(#key,#checkPoints)","ruleOperator":"GE","ruleValue":"1000"}]', 1, 'CUSTOM', ' or ', '', 1, '1970-01-01 00:00:00', '2023-09-06 22:23:35'); + +INSERT INTO public.dinky_alert_template VALUES (1, 'Default', ' - **Job Name :** ${task.name} - **Job Status :** ${jobInstance.status} - **Alert Time :** ${time} - **Start Time :** ${startTime} - **End Time :** ${endTime} -- **${exceptions.get("root-exception").toString()?substring(0,20)}** +- **${exceptions.rootException.substring(0,20)}** [Go toTask Web](http://${taskUrl}) ', 1, null, null); diff --git a/script/sql/upgrade/1.0.0-SNAPSHOT_schema/mysql/dinky_ddl.sql b/script/sql/upgrade/1.0.0-SNAPSHOT_schema/mysql/dinky_ddl.sql index 582ff4d8e7..35c9317cfd 100644 --- a/script/sql/upgrade/1.0.0-SNAPSHOT_schema/mysql/dinky_ddl.sql +++ b/script/sql/upgrade/1.0.0-SNAPSHOT_schema/mysql/dinky_ddl.sql @@ -202,8 +202,7 @@ alter table dinky_user alter table dinky_user_tenant add tenant_admin_flag tinyint default 0 comment 'tenant admin flag(0:false,1:true)' after tenant_id; -alter table dinky_task - add statement text null; +alter table dinky_task add column `statement` longtext DEFAULT NULL COMMENT 'job statement'; drop table dinky_namespace; drop table dinky_role_namespace; diff --git a/script/sql/upgrade/1.0.0-SNAPSHOT_schema/mysql/dinky_dml.sql b/script/sql/upgrade/1.0.0-SNAPSHOT_schema/mysql/dinky_dml.sql index 3e28e5bf47..5d438fc944 100644 --- a/script/sql/upgrade/1.0.0-SNAPSHOT_schema/mysql/dinky_dml.sql +++ b/script/sql/upgrade/1.0.0-SNAPSHOT_schema/mysql/dinky_dml.sql @@ -212,12 +212,12 @@ INSERT INTO `dinky_sys_menu` VALUES (146, 10, ' 拷贝', '/registration/datasour -- ---------------------------- -- Records of dinky_alert_rule -- ---------------------------- -INSERT INTO dinky_alert_rules VALUES (3, 'alert.rule.jobFail', '[{"ruleKey":"jobInstance.status","ruleOperator":"EQ","ruleValue":"\'FAILED\'","rulePriority":"1"}]', 1, 'SYSTEM', ' or ', '', 1, '1970-01-01 00:00:00', '2023-09-04 23:03:02'); -INSERT INTO dinky_alert_rules VALUES (4, 'alert.rule.getJobInfoFail', '[{"ruleKey":"jobInstance.status","ruleOperator":"EQ","ruleValue":"\'UNKNOWN\'","rulePriority":"1"}]', 1, 'SYSTEM', ' or ', '', 1, '1970-01-01 00:00:00', '2023-09-05 18:03:43'); -INSERT INTO dinky_alert_rules VALUES (5, 'alert.rule.jobRestart', '[{"ruleKey":"jobInstance.status","ruleOperator":"EQ","ruleValue":"\'RESTARTING\'","rulePriority":"1"}]', 1, 'SYSTEM', ' or ', '', 1, '1970-01-01 00:00:00', '2023-09-06 21:35:12'); -INSERT INTO dinky_alert_rules VALUES (6, 'alert.rule.checkpointFail', '[{"ruleKey":"checkPoints.checkFailed(#key,#checkPoints)","ruleOperator":"EQ","ruleValue":"true"}]', 1, 'SYSTEM', ' or ', '', 1, '1970-01-01 00:00:00', '2023-09-06 21:49:03'); -INSERT INTO dinky_alert_rules VALUES (7, 'alert.rule.jobRunException', '[{"ruleKey":"exceptionRule.isException(#key,#exceptions)","ruleOperator":"EQ","ruleValue":"true"}]', 1, 'SYSTEM', ' or ', '', 1, '1970-01-01 00:00:00', '2023-09-06 21:50:12'); -INSERT INTO dinky_alert_rules VALUES (8, 'alert.rule.checkpointTimeout', '[{"ruleKey":"checkPoints.checkpointTime(#key,#checkPoints)","ruleOperator":"GE","ruleValue":"1000"}]', 1, 'CUSTOM', ' or ', '', 1, '1970-01-01 00:00:00', '2023-09-06 22:23:35'); +INSERT INTO dinky_alert_rules (id, name, rule, template_id, rule_type, trigger_conditions, description, enabled, create_time, update_time) VALUES (3, 'alert.rule.jobFail', '[{"ruleKey":"jobInstance.status","ruleOperator":"EQ","ruleValue":"\'FAILED\'","rulePriority":"1"}]', 1, 'SYSTEM', ' or ', '', 1, '1970-01-01 00:00:00', '2023-09-04 23:03:02'); +INSERT INTO dinky_alert_rules (id, name, rule, template_id, rule_type, trigger_conditions, description, enabled, create_time, update_time) VALUES (4, 'alert.rule.getJobInfoFail', '[{"ruleKey":"jobInstance.status","ruleOperator":"EQ","ruleValue":"\'UNKNOWN\'","rulePriority":"1"}]', 1, 'SYSTEM', ' or ', '', 1, '1970-01-01 00:00:00', '2023-09-05 18:03:43'); +INSERT INTO dinky_alert_rules (id, name, rule, template_id, rule_type, trigger_conditions, description, enabled, create_time, update_time) VALUES (5, 'alert.rule.jobRestart', '[{"ruleKey":"jobInstance.status","ruleOperator":"EQ","ruleValue":"\'RESTARTING\'","rulePriority":"1"}]', 1, 'SYSTEM', ' or ', '', 1, '1970-01-01 00:00:00', '2023-09-06 21:35:12'); +INSERT INTO dinky_alert_rules (id, name, rule, template_id, rule_type, trigger_conditions, description, enabled, create_time, update_time) VALUES (6, 'alert.rule.checkpointFail', '[{"ruleKey":"checkpointRule.checkFailed(#key,#checkPoints)","ruleOperator":"EQ","ruleValue":"true"}]', 1, 'SYSTEM', ' or ', '', 1, '1970-01-01 00:00:00', '2023-09-06 21:49:03'); +INSERT INTO dinky_alert_rules (id, name, rule, template_id, rule_type, trigger_conditions, description, enabled, create_time, update_time) VALUES (7, 'alert.rule.jobRunException', '[{"ruleKey":"exceptionRule.isException(#key,#exceptions)","ruleOperator":"EQ","ruleValue":"true"}]', 1, 'SYSTEM', ' or ', '', 1, '1970-01-01 00:00:00', '2023-09-06 21:50:12'); +INSERT INTO dinky_alert_rules (id, name, rule, template_id, rule_type, trigger_conditions, description, enabled, create_time, update_time) VALUES (8, 'alert.rule.checkpointTimeout', '[{"ruleKey":"checkpointRule.checkpointTime(#key,#checkPoints)","ruleOperator":"GE","ruleValue":"1000"}]', 1, 'CUSTOM', ' or ', '', 1, '1970-01-01 00:00:00', '2023-09-06 22:23:35'); INSERT INTO dinky_alert_template VALUES (1, 'Default', ' - **Job Name :** ${task.name} @@ -225,7 +225,7 @@ INSERT INTO dinky_alert_template VALUES (1, 'Default', ' - **Alert Time :** ${time} - **Start Time :** ${startTime} - **End Time :** ${endTime} -- **<#if exceptions_msg?length gt 100>${exceptions_msg?substring(0,100)}<#else>${exceptions_msg}** +- **${exceptions.rootException.substring(0,20)}** [Go toTask Web](http://${taskUrl}) ', 1, null, null); @@ -237,3 +237,41 @@ COMMIT; update dinky_user set super_admin_flag =1 where id =1; alter table dinky_task alter column `step` set default 1; +-- todo: 需要修改历史作业的默认值 , 过滤条件待定 + + +replace INTO dinky_task SELECT + t.id, + t.`name`, + t.tenant_id, + t.dialect, + t.type, + t.check_point, + t.save_point_strategy, + t.save_point_path, + t.parallelism, + t.fragment, + t.statement_set, + t.batch_model, + t.cluster_id, + t.cluster_configuration_id, + t.database_id, + t.jar_id, + t.env_id, + t.alert_group_id, + t.config_json, + t.note, + t.step, + t.job_instance_id, + t.enabled, + t.create_time, + t.update_time, + t.version_id, + s.statement +FROM + dinky_task AS t +LEFT JOIN + dinky_task_statement AS s ON t.id = s.id; + + +drop table if exists dinky_task_statement; \ No newline at end of file