diff --git a/linkis-extensions/linkis-et-monitor/pom.xml b/linkis-extensions/linkis-et-monitor/pom.xml index 5e62fa60b5..6972c9e1a4 100644 --- a/linkis-extensions/linkis-et-monitor/pom.xml +++ b/linkis-extensions/linkis-et-monitor/pom.xml @@ -87,11 +87,11 @@ ${project.version} provided - - org.apache.linkis - linkis-rpc - ${project.version} - + + org.apache.linkis + linkis-rpc + ${project.version} + diff --git a/linkis-extensions/linkis-et-monitor/src/main/assembly/distribution.xml b/linkis-extensions/linkis-et-monitor/src/main/assembly/distribution.xml index 8cc9e7ea7b..647b84c833 100644 --- a/linkis-extensions/linkis-et-monitor/src/main/assembly/distribution.xml +++ b/linkis-extensions/linkis-et-monitor/src/main/assembly/distribution.xml @@ -16,10 +16,8 @@ ~ limitations under the License. --> - + linkis-et-monitor dir diff --git a/linkis-extensions/linkis-et-monitor/src/main/java/org/apache/linkis/monitor/scan/app/LinkisDirtyDataCleanApplication.java b/linkis-extensions/linkis-et-monitor/src/main/java/org/apache/linkis/monitor/scan/app/LinkisDirtyDataCleanApplication.java deleted file mode 100644 index 3530ef5139..0000000000 --- a/linkis-extensions/linkis-et-monitor/src/main/java/org/apache/linkis/monitor/scan/app/LinkisDirtyDataCleanApplication.java +++ /dev/null @@ -1,111 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.monitor.scan.app; - -import org.apache.linkis.monitor.scan.app.dirtydata.entrance.EntranceDirtyDataHandler; -import org.apache.linkis.monitor.scan.utils.log.LogUtils; -import org.apache.linkis.server.utils.LinkisMainHelper; - -import org.apache.commons.lang3.StringUtils; - -import org.springframework.context.annotation.AnnotationConfigApplicationContext; -import org.springframework.context.support.AbstractApplicationContext; - -import org.slf4j.Logger; - -public class LinkisDirtyDataCleanApplication { - private static final Logger logger = LogUtils.stdOutLogger(); - - /** @param args: args[0]: host args[1] port */ - public static void main(String[] args) throws ReflectiveOperationException { - if (args.length == 0 - || StringUtils.equalsIgnoreCase(args[0], "help") - || StringUtils.equalsIgnoreCase(args[0], "--help")) { - LogUtils.stdOutLogger() - .info( - "[help-message]this app cleans entrance dirty-data. args[0]: command-type (entrance/help/...) args[1]: entrance-hostname(not null), args[2]: entrance-port(can be null)"); - return; - } - String serviceName = System.getProperty(LinkisMainHelper.SERVER_NAME_KEY()); - LinkisMainHelper.formatPropertyFiles(serviceName); - - if (StringUtils.equalsIgnoreCase(args[0], "entrance")) { - AbstractApplicationContext context = - new AnnotationConfigApplicationContext(LinkisJobHistoryScanSpringConfiguration.class); - - String host = ""; - String port = ""; - if (args.length > 1) { - host = args[1]; - } - if (args.length > 2) { - port = args[2]; - } - if (args.length > 3) { - printIllegalInput("wrong number of arguments"); - return; - } - try { - removeDirtyEurekaInstance(host, port); - } catch (Exception e) { - LogUtils.stdOutLogger().error("Failed to remove dirty eureka-instance", e); - } - try { - removeDbDirtyData(host, port); - } catch (Exception e) { - LogUtils.stdOutLogger().error("Failed to remove dirty db-data", e); - } - - context.close(); - } else { - LogUtils.stdOutLogger().error("Upsupported command type: " + args[0]); - } - } - - private static void printIllegalInput(String msg) { - LogUtils.stdOutLogger().error("illegal input: " + msg); - LogUtils.stdOutLogger() - .info( - "[help-message] this app cleans entrance dirty-data. args[0]: entrance-hostname, args[1]: entrance-port"); - return; - } - - private static void removeDirtyEurekaInstance(String host, String port) { - if (StringUtils.isBlank(host)) { - printIllegalInput("host cannot be blank"); - return; - } - if (StringUtils.isBlank(port)) { - EntranceDirtyDataHandler.handleEurekaDirtyData(host); - } else { - EntranceDirtyDataHandler.handleEurekaDirtyData(host, port); - } - } - - private static void removeDbDirtyData(String host, String port) { - if (StringUtils.isBlank(host)) { - printIllegalInput("host cannot be blank"); - return; - } - if (StringUtils.isBlank(port)) { - EntranceDirtyDataHandler.handleDbDirtData(host); - } else { - EntranceDirtyDataHandler.handleDbDirtData(host, port); - } - } -} diff --git a/linkis-extensions/linkis-et-monitor/src/main/java/org/apache/linkis/monitor/scan/app/LinkisJobHistoryScanApplication.java b/linkis-extensions/linkis-et-monitor/src/main/java/org/apache/linkis/monitor/scan/app/LinkisJobHistoryScanApplication.java deleted file mode 100644 index 8b4366805e..0000000000 --- a/linkis-extensions/linkis-et-monitor/src/main/java/org/apache/linkis/monitor/scan/app/LinkisJobHistoryScanApplication.java +++ /dev/null @@ -1,172 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.monitor.scan.app; - -import org.apache.linkis.monitor.scan.app.factory.MapperFactory; -import org.apache.linkis.monitor.scan.app.jobhistory.JobHistoryDataFetcher; -import org.apache.linkis.monitor.scan.app.jobhistory.errorcode.JobHistoryErrCodeRule; -import org.apache.linkis.monitor.scan.app.jobhistory.errorcode.JobHistoryErrorCodeAlertSender; -import org.apache.linkis.monitor.scan.app.jobhistory.jobtime.JobTimeExceedAlertSender; -import org.apache.linkis.monitor.scan.app.jobhistory.jobtime.JobTimeExceedRule; -import org.apache.linkis.monitor.scan.constants.Constants; -import org.apache.linkis.monitor.scan.core.pac.DataFetcher; -import org.apache.linkis.monitor.scan.core.scanner.AnomalyScanner; -import org.apache.linkis.monitor.scan.core.scanner.DefaultScanner; -import org.apache.linkis.monitor.scan.utils.alert.AlertDesc; -import org.apache.linkis.monitor.scan.utils.alert.ims.ImsAlertDesc; -import org.apache.linkis.monitor.scan.utils.alert.ims.JobHistoryScanImsAlertPropFileParserUtils; -import org.apache.linkis.monitor.scan.utils.log.LogUtils; -import org.apache.linkis.server.utils.LinkisMainHelper; - -import org.apache.commons.lang3.exception.ExceptionUtils; - -import org.springframework.context.annotation.AnnotationConfigApplicationContext; -import org.springframework.context.support.AbstractApplicationContext; - -import java.text.ParseException; -import java.text.SimpleDateFormat; -import java.util.ArrayList; -import java.util.Date; -import java.util.List; -import java.util.Map; - -import org.slf4j.Logger; - -public class LinkisJobHistoryScanApplication { - private static final Logger logger = LogUtils.stdOutLogger(); - - /** @param args: args[0]: startTime, args[1] endTime */ - public static void main(String[] args) throws ReflectiveOperationException { - - String serviceName = System.getProperty(LinkisMainHelper.SERVER_NAME_KEY()); - LinkisMainHelper.formatPropertyFiles(serviceName); - - long intervalMs = Constants.SCAN_INTERVALS_SECONDS() * 1000; - long maxIntervalMs = Constants.MAX_INTERVALS_SECONDS() * 1000; - long endTime = System.currentTimeMillis(); - long startTime = endTime - intervalMs; - /** parse input into timestamp */ - if (args != null && args.length == 2) { - SimpleDateFormat format = new SimpleDateFormat("yyyyMMddHH"); - Long tmpStart; - Long tmpEnd; - try { - tmpStart = format.parse(args[0]).getTime(); - tmpEnd = format.parse(args[1]).getTime(); - } catch (ParseException e) { - logger.info( - "Failed to parse input into startTime and endTime." + ExceptionUtils.getMessage(e)); - return; - } - if (endTime <= startTime) { - logger.info("[error] startTime larger than endTime"); - return; - } - if (tmpStart != null && tmpEnd != null) { - startTime = tmpStart; - endTime = tmpEnd; - } - } - long realIntervals = endTime - startTime < maxIntervalMs ? endTime - startTime : maxIntervalMs; - - runApp(startTime, endTime, realIntervals, maxIntervalMs); - } - - private static void runApp(long startTime, long endTime, long realIntervals, long maxIntervalMs) { - - AbstractApplicationContext context = - new AnnotationConfigApplicationContext(LinkisJobHistoryScanSpringConfiguration.class); - - AnomalyScanner scanner = new DefaultScanner(); - boolean shouldStart = false; - - List fetchers = generateFetchers(startTime, endTime, maxIntervalMs); - if (fetchers == null) { - logger.warn("generated 0 dataFetchers, plz check input"); - return; - } - - Map errorCodeAlerts = - JobHistoryScanImsAlertPropFileParserUtils.getAlerts(Constants.SCAN_PREFIX_ERRORCODE()); - if (errorCodeAlerts == null || errorCodeAlerts.size() == 0) { - logger.info("[INFO] Loaded 0 errorcode alert from alert-rule properties file."); - } else { - logger.info( - "[INFO] Loaded {} errorcode alert from alert-rules properties file.", - errorCodeAlerts.size()); - shouldStart = true; - addIntervalToImsAlerts(errorCodeAlerts, realIntervals); - JobHistoryErrCodeRule jobHistoryErrCodeRule = - new JobHistoryErrCodeRule( - errorCodeAlerts.keySet(), new JobHistoryErrorCodeAlertSender(errorCodeAlerts)); - scanner.addScanRule(jobHistoryErrCodeRule); - } - - Map jobTimeAlerts = - JobHistoryScanImsAlertPropFileParserUtils.getAlerts( - Constants.SCAN_PREFIX_UNFINISHED_JOBTIME_EXCEED_SEC()); - if (jobTimeAlerts == null || jobTimeAlerts.size() == 0) { - logger.info("[INFO] Loaded 0 alerts jobtime alert-rule from alert properties file."); - } else { - logger.info( - "[INFO] Loaded {} alerts jobtime alert-rules from alert properties file.", - jobTimeAlerts.size()); - shouldStart = true; - addIntervalToImsAlerts(jobTimeAlerts, realIntervals); - JobTimeExceedRule jobTimeExceedRule = - new JobTimeExceedRule( - jobTimeAlerts.keySet(), new JobTimeExceedAlertSender(jobTimeAlerts)); - scanner.addScanRule(jobTimeExceedRule); - } - if (shouldStart) { - scanner.addDataFetchers(fetchers); - scanner.run(); - scanner.shutdown(); // wait all alert to be send - } - context.close(); - } - - private static List generateFetchers( - long startTime, long endTime, long maxIntervalMs) { - List ret = new ArrayList<>(); - long pe = endTime; - long ps; - while (pe > startTime) { - ps = pe - maxIntervalMs > startTime ? pe - maxIntervalMs : startTime; - String[] fetcherArgs = new String[] {String.valueOf(ps), String.valueOf(pe)}; - ret.add(new JobHistoryDataFetcher(fetcherArgs, MapperFactory.getJobHistoryMapper())); - logger.info( - "Generated dataFetcher for startTime: " - + new Date(ps).toString() - + ". EndTime: " - + new Date(pe).toString()); - pe = pe - maxIntervalMs; - } - return ret; - } - - private static void addIntervalToImsAlerts(Map alerts, long realIntervals) { - for (AlertDesc alert : alerts.values()) { - if (!(alert instanceof ImsAlertDesc)) { - logger.info("[warn] ignore wrong alert" + alert); - } else { - ((ImsAlertDesc) alert).hitIntervalMs_$eq(realIntervals); - } - } - } -} diff --git a/linkis-extensions/linkis-et-monitor/src/main/java/org/apache/linkis/monitor/scan/app/instance/dao/InstanceLabelDao.java b/linkis-extensions/linkis-et-monitor/src/main/java/org/apache/linkis/monitor/scan/app/instance/dao/InstanceLabelDao.java index 8f73f0349a..5fa3b7a5c0 100644 --- a/linkis-extensions/linkis-et-monitor/src/main/java/org/apache/linkis/monitor/scan/app/instance/dao/InstanceLabelDao.java +++ b/linkis-extensions/linkis-et-monitor/src/main/java/org/apache/linkis/monitor/scan/app/instance/dao/InstanceLabelDao.java @@ -21,7 +21,6 @@ import java.util.List; - public interface InstanceLabelDao { /** * Remove label diff --git a/linkis-extensions/linkis-et-monitor/src/main/java/org/apache/linkis/monitor/scan/app/instance/entity/InsPersistenceLabel.java b/linkis-extensions/linkis-et-monitor/src/main/java/org/apache/linkis/monitor/scan/app/instance/entity/InsPersistenceLabel.java index e828328b43..36fb21c13b 100644 --- a/linkis-extensions/linkis-et-monitor/src/main/java/org/apache/linkis/monitor/scan/app/instance/entity/InsPersistenceLabel.java +++ b/linkis-extensions/linkis-et-monitor/src/main/java/org/apache/linkis/monitor/scan/app/instance/entity/InsPersistenceLabel.java @@ -79,4 +79,20 @@ public Date getCreateTime() { public void setCreateTime(Date createTime) { this.createTime = createTime; } + + public String getLabelKey() { + return labelKey; + } + + public void setLabelKey(String labelKey) { + this.labelKey = labelKey; + } + + public String getFeature() { + return feature; + } + + public void setFeature(String feature) { + this.feature = feature; + } } diff --git a/linkis-extensions/linkis-et-monitor/src/main/java/org/apache/linkis/monitor/scan/app/jobhistory/dao/JobHistoryMapper.java b/linkis-extensions/linkis-et-monitor/src/main/java/org/apache/linkis/monitor/scan/app/jobhistory/dao/JobHistoryMapper.java index b3774860af..bfdc640652 100644 --- a/linkis-extensions/linkis-et-monitor/src/main/java/org/apache/linkis/monitor/scan/app/jobhistory/dao/JobHistoryMapper.java +++ b/linkis-extensions/linkis-et-monitor/src/main/java/org/apache/linkis/monitor/scan/app/jobhistory/dao/JobHistoryMapper.java @@ -24,7 +24,6 @@ import java.util.Date; import java.util.List; - public interface JobHistoryMapper { List selectJobHistory(JobHistory jobReq); diff --git a/linkis-extensions/linkis-et-monitor/src/main/java/org/apache/linkis/monitor/scan/app/monitor/config/MonitorConfig.java b/linkis-extensions/linkis-et-monitor/src/main/java/org/apache/linkis/monitor/scan/app/monitor/config/MonitorConfig.java index fab9a5cbe7..32b47c289c 100644 --- a/linkis-extensions/linkis-et-monitor/src/main/java/org/apache/linkis/monitor/scan/app/monitor/config/MonitorConfig.java +++ b/linkis-extensions/linkis-et-monitor/src/main/java/org/apache/linkis/monitor/scan/app/monitor/config/MonitorConfig.java @@ -29,9 +29,9 @@ public class MonitorConfig { public static final CommonVars USER_MODE_TIMEOUT = CommonVars.apply("linkis.monitor.user.timeOut", 300); public static final CommonVars USER_MODE_AUTHTOKEN = - CommonVars.apply("linkis.monitor.user.authToken","VALIDATOR-AUTH"); + CommonVars.apply("linkis.monitor.user.authToken", "VALIDATOR-AUTH"); public static final CommonVars USER_MODE_ENGINE = - CommonVars.apply("linkis.monitor.user.enginelist","[]"); + CommonVars.apply("linkis.monitor.user.enginelist", "[]"); public static final CommonVars ECM_TASK_MAJOR = CommonVars.apply("linkis.monitor.ecmResourceTask.major", 0.03); @@ -43,7 +43,7 @@ public class MonitorConfig { CommonVars.apply("linkis.monitor.metrics.userAuthKey"); public static final CommonVars JOB_HISTORY_TIME_EXCEED = - CommonVars.apply("linkis.monitor.jobhistory.id.timeExceed",0L); + CommonVars.apply("linkis.monitor.jobhistory.id.timeExceed", 0L); public static final CommonVars ENTRANCE_TASK_USERTOTAL = CommonVars.apply("linkis.monitor.entranceTask.userTotalTask", 1000); @@ -52,7 +52,7 @@ public class MonitorConfig { public static final CommonVars ENTRANCE_TASK_TOTAL_MINOR = CommonVars.apply("linkis.monitor.entranceTask.linkisTotalTaskMinor", 10000); public static final CommonVars ENTRANCE_TASK_USERLIST = - CommonVars.apply("linkis.monitor.entranceTask.userlist","[]"); + CommonVars.apply("linkis.monitor.entranceTask.userlist", "[]"); public static final CommonVars SCHEDULED_CONFIG_NUM = CommonVars.apply("linkis.monitor.scheduled.pool.cores.num", 10); @@ -61,11 +61,24 @@ public class MonitorConfig { CommonVars.apply("linkis.monitor.shell.time.out.minute", 30); public static final CommonVars USER_MODE_INTERFACE_TIMEOUT = - CommonVars.apply("linkis.monitor.user.mode.time.out", 30*1000); + CommonVars.apply("linkis.monitor.user.mode.time.out", 30 * 1000); - public static final CommonVars CHATBOT_KEY_ID = CommonVars.apply("linkis.monitor.chatbot.key.id","23e6afad1b78a0c5eed67e4d24de7063"); - public static final CommonVars CHATBOT_TYPE = CommonVars.apply("linkis.monitor.chatbot.type","text"); - public static final CommonVars CHATBOT_SERVICE_NAME= CommonVars.apply("linkis.monitor.chatbot.serviceName","大数据生产助手(BDP_PRD)"); - public static final CommonVars CHATBOT_URL= CommonVars.apply("linkis.monitor.chatbot.url","http://172.21.3.43:1377/pros-chatbot/yuanfang/sendEMsg"); - public static final CommonVars SOLUTION_URL = CommonVars.apply("linkis.monitor.jobhistory.solution.url", "http://kn.dss.weoa.com/linkis/qa"); + public static final CommonVars CHATBOT_KEY_ID = + CommonVars.apply("linkis.monitor.chatbot.key.id", "23e6afad1b78a0c5eed67e4d24de7063"); + public static final CommonVars CHATBOT_TYPE = + CommonVars.apply("linkis.monitor.chatbot.type", "text"); + public static final CommonVars CHATBOT_SERVICE_NAME = + CommonVars.apply("linkis.monitor.chatbot.serviceName", ""); + public static final CommonVars CHATBOT_URL = + CommonVars.apply("linkis.monitor.chatbot.url", ""); + public static final CommonVars SOLUTION_URL = + CommonVars.apply( + "linkis.monitor.jobhistory.solution.url", + "https://linkis.apache.org/docs/latest/tuning-and-troubleshooting/error-guide/error-code"); + + public static final CommonVars TASK_RUNTIME_TIMEOUT_DESC = + CommonVars.apply( + "linkis.monitor.jobhistory.task.timeout.desc", + "[Linkis任务信息]您好,您在Linkis/DSS提交的任务(任务ID:{0}),已经运行超过{1}h," + + "请关注是否任务正常,如果不正常您可以到Linkis/DSS管理台进行任务的kill,集群信息为BDAP({2})。详细解决方案见Q47:{3} "); } diff --git a/linkis-extensions/linkis-et-monitor/src/main/java/org/apache/linkis/monitor/scan/app/monitor/entity/ChatbotEntity.java b/linkis-extensions/linkis-et-monitor/src/main/java/org/apache/linkis/monitor/scan/app/monitor/entity/ChatbotEntity.java index 3cf288adbb..4f912b9e82 100644 --- a/linkis-extensions/linkis-et-monitor/src/main/java/org/apache/linkis/monitor/scan/app/monitor/entity/ChatbotEntity.java +++ b/linkis-extensions/linkis-et-monitor/src/main/java/org/apache/linkis/monitor/scan/app/monitor/entity/ChatbotEntity.java @@ -20,68 +20,78 @@ import org.apache.linkis.monitor.scan.app.monitor.config.MonitorConfig; public class ChatbotEntity { - private String keyid; - private String content; - private String type; - private String userName; - private String serviceName; + private String keyid; + private String content; + private String type; + private String userName; + private String serviceName; - public ChatbotEntity(String content, String userName) { - this.keyid = MonitorConfig.CHATBOT_KEY_ID.getValue(); - this.content = content; - this.type = MonitorConfig.CHATBOT_TYPE.getValue(); - this.userName = userName; - this.serviceName = MonitorConfig.CHATBOT_SERVICE_NAME.getValue(); - } + public ChatbotEntity(String content, String userName) { + this.keyid = MonitorConfig.CHATBOT_KEY_ID.getValue(); + this.content = content; + this.type = MonitorConfig.CHATBOT_TYPE.getValue(); + this.userName = userName; + this.serviceName = MonitorConfig.CHATBOT_SERVICE_NAME.getValue(); + } - public String getKeyid() { - return keyid; - } + public String getKeyid() { + return keyid; + } - public void setKeyid(String keyid) { - this.keyid = keyid; - } + public void setKeyid(String keyid) { + this.keyid = keyid; + } - public String getContent() { - return content; - } + public String getContent() { + return content; + } - public void setContent(String content) { - this.content = content; - } + public void setContent(String content) { + this.content = content; + } - public String getType() { - return type; - } + public String getType() { + return type; + } - public void setType(String type) { - this.type = type; - } + public void setType(String type) { + this.type = type; + } - public String getUserName() { - return userName; - } + public String getUserName() { + return userName; + } - public void setUserName(String userName) { - this.userName = userName; - } + public void setUserName(String userName) { + this.userName = userName; + } - public String getServiceName() { - return serviceName; - } + public String getServiceName() { + return serviceName; + } - public void setServiceName(String serviceNameuserName) { - this.serviceName = serviceNameuserName; - } + public void setServiceName(String serviceNameuserName) { + this.serviceName = serviceNameuserName; + } - @Override - public String toString() { - return "ChatbotEntity{" + - "keyid='" + keyid + '\'' + - ", content='" + content + '\'' + - ", type='" + type + '\'' + - ", userName='" + userName + '\'' + - ", serviceName='" + serviceName + '\'' + - '}'; - } + @Override + public String toString() { + return "ChatbotEntity{" + + "keyid='" + + keyid + + '\'' + + ", content='" + + content + + '\'' + + ", type='" + + type + + '\'' + + ", userName='" + + userName + + '\'' + + ", serviceName='" + + serviceName + + '\'' + + '}'; + } } diff --git a/linkis-extensions/linkis-et-monitor/src/main/java/org/apache/linkis/monitor/scan/app/monitor/scheduled/BmlClear.java b/linkis-extensions/linkis-et-monitor/src/main/java/org/apache/linkis/monitor/scan/app/monitor/scheduled/BmlClear.java index 75d415ac0d..11ee06226f 100644 --- a/linkis-extensions/linkis-et-monitor/src/main/java/org/apache/linkis/monitor/scan/app/monitor/scheduled/BmlClear.java +++ b/linkis-extensions/linkis-et-monitor/src/main/java/org/apache/linkis/monitor/scan/app/monitor/scheduled/BmlClear.java @@ -29,7 +29,6 @@ import org.slf4j.Logger; - @Component @PropertySource(value = "classpath:linkis-et-monitor.properties", encoding = "UTF-8") public class BmlClear { diff --git a/linkis-extensions/linkis-et-monitor/src/main/java/org/apache/linkis/monitor/scan/app/monitor/scheduled/EcRecordClear.java b/linkis-extensions/linkis-et-monitor/src/main/java/org/apache/linkis/monitor/scan/app/monitor/scheduled/EcRecordClear.java index dced6117b1..ac9cb4ea61 100644 --- a/linkis-extensions/linkis-et-monitor/src/main/java/org/apache/linkis/monitor/scan/app/monitor/scheduled/EcRecordClear.java +++ b/linkis-extensions/linkis-et-monitor/src/main/java/org/apache/linkis/monitor/scan/app/monitor/scheduled/EcRecordClear.java @@ -30,9 +30,7 @@ import org.slf4j.Logger; -/*** - * Task: clean up linkis_cg_ec_resource_info_record data - */ +/** * Task: clean up linkis_cg_ec_resource_info_record data */ @Component @PropertySource(value = "classpath:linkis-et-monitor.properties", encoding = "UTF-8") public class EcRecordClear { diff --git a/linkis-extensions/linkis-et-monitor/src/main/java/org/apache/linkis/monitor/scan/app/monitor/scheduled/JobHistoryClear.java b/linkis-extensions/linkis-et-monitor/src/main/java/org/apache/linkis/monitor/scan/app/monitor/scheduled/JobHistoryClear.java index 4880a600cc..ae14770420 100644 --- a/linkis-extensions/linkis-et-monitor/src/main/java/org/apache/linkis/monitor/scan/app/monitor/scheduled/JobHistoryClear.java +++ b/linkis-extensions/linkis-et-monitor/src/main/java/org/apache/linkis/monitor/scan/app/monitor/scheduled/JobHistoryClear.java @@ -30,9 +30,7 @@ import org.slf4j.Logger; -/*** - * Task: clean up linkis_ps_job_history_group_history data - */ +/** * Task: clean up linkis_ps_job_history_group_history data */ @Component @PropertySource(value = "classpath:linkis-et-monitor.properties", encoding = "UTF-8") public class JobHistoryClear { diff --git a/linkis-extensions/linkis-et-monitor/src/main/java/org/apache/linkis/monitor/scan/app/monitor/scheduled/JobHistoryMonitor.java b/linkis-extensions/linkis-et-monitor/src/main/java/org/apache/linkis/monitor/scan/app/monitor/scheduled/JobHistoryMonitor.java index 01873a988b..fbca945a8d 100644 --- a/linkis-extensions/linkis-et-monitor/src/main/java/org/apache/linkis/monitor/scan/app/monitor/scheduled/JobHistoryMonitor.java +++ b/linkis-extensions/linkis-et-monitor/src/main/java/org/apache/linkis/monitor/scan/app/monitor/scheduled/JobHistoryMonitor.java @@ -25,8 +25,8 @@ import org.apache.linkis.monitor.scan.app.jobhistory.jobtime.JobTimeExceedRule; import org.apache.linkis.monitor.scan.app.jobhistory.labels.JobHistoryLabelsAlertSender; import org.apache.linkis.monitor.scan.app.jobhistory.labels.JobHistoryLabelsRule; -import org.apache.linkis.monitor.scan.app.jobhistory.runtime.CommonRunTimeAlertSender; import org.apache.linkis.monitor.scan.app.jobhistory.runtime.CommonJobRunTimeRule; +import org.apache.linkis.monitor.scan.app.jobhistory.runtime.CommonRunTimeAlertSender; import org.apache.linkis.monitor.scan.app.jobhistory.runtime.JobHistoryRunTimeAlertSender; import org.apache.linkis.monitor.scan.app.jobhistory.runtime.JobHistoryRunTimeRule; import org.apache.linkis.monitor.scan.app.monitor.config.MonitorConfig; @@ -37,7 +37,7 @@ import org.apache.linkis.monitor.scan.core.scanner.DefaultScanner; import org.apache.linkis.monitor.scan.utils.alert.AlertDesc; import org.apache.linkis.monitor.scan.utils.alert.ims.ImsAlertDesc; -import org.apache.linkis.monitor.scan.utils.alert.ims.JobHistoryScanImsAlertPropFileParserUtils; +import org.apache.linkis.monitor.scan.utils.alert.ims.MonitorAlertUtils; import org.apache.linkis.monitor.scan.utils.alert.ims.UserLabelAlertUtils; import org.apache.linkis.monitor.scan.utils.log.LogUtils; @@ -49,216 +49,219 @@ import org.slf4j.Logger; -/*** - * jobHistory monitor - * 1.errorCode: Monitor the error code - * 2.userLabel: tenant label monitoring, scan the execution data within the first 20 minutes, and judge the labels field of the data - * 3.jobResultRunTime: Scan the execution data within the first 20 minutes, and judge the completed tasks. If the parm field in the jobhistory contains (task.notification.conditions) and the result of executing the task is (Succeed, Failed, Canceled, Timeout, ALL) any one of them, an alarm is triggered and the result of the job is that it has ended. All three are indispensable - * 4.jobResultRunTimeForDSS: Scan the execution data within the first 20 minutes, scan the tasks that have been marked for notification, if the task has ended, a notification will be initiated - * 5.jobHistoryUnfinishedScan: monitor the status of the execution task, scan the data outside 12 hours and within 24 hours +/** + * * jobHistory monitor 1.errorCode: Monitor the error code 2.userLabel: tenant label monitoring, + * scan the execution data within the first 20 minutes, and judge the labels field of the data + * 3.jobResultRunTime: Scan the execution data within the first 20 minutes, and judge the completed + * tasks. If the parm field in the jobhistory contains (task.notification.conditions) and the result + * of executing the task is (Succeed, Failed, Canceled, Timeout, ALL) any one of them, an alarm is + * triggered and the result of the job is that it has ended. All three are indispensable + * 4.jobResultRunTimeForDSS: Scan the execution data within the first 20 minutes, scan the tasks + * that have been marked for notification, if the task has ended, a notification will be initiated + * 5.jobHistoryUnfinishedScan: monitor the status of the execution task, scan the data outside 12 + * hours and within 24 hours */ @Component @PropertySource(value = "classpath:linkis-et-monitor.properties", encoding = "UTF-8") public class JobHistoryMonitor { - private static final Logger logger = LogUtils.stdOutLogger(); - private static final long backtrackNum = 1000000L; + private static final Logger logger = LogUtils.stdOutLogger(); + private static final long backtrackNum = 1000000L; - /** - * Scan tasks completed within 20 minutes - */ - @Scheduled(cron = "0 0/10 * * * ?") - public void jobHistoryFinishedScan() { - long intervalMs = 20 * 60 * 1000; // 20分钟 - long maxIntervalMs = Constants.MAX_INTERVALS_SECONDS() * 1000; - long endTime = System.currentTimeMillis(); - long startTime = endTime - intervalMs; - long realIntervals = endTime - startTime < maxIntervalMs ? endTime - startTime : maxIntervalMs; - AnomalyScanner scanner = new DefaultScanner(); - boolean shouldStart = false; - long id; - if (null == CacheUtils.cacheBuilder.getIfPresent("jobHistoryId")) { - //20230206:新增获取最大值-100W 作为初始id进行查询,防止慢查询 - long maxId = MapperFactory.getJobHistoryMapper().selectMaxId(); - long beginId = 0L; - if (maxId > backtrackNum) { - beginId = maxId - backtrackNum; - } - id = MapperFactory.getJobHistoryMapper().selectIdByHalfDay(beginId); - CacheUtils.cacheBuilder.put("jobHistoryId", id); - } else { - id = CacheUtils.cacheBuilder.getIfPresent("jobHistoryId"); - } - List fetchers = generateFetchersfortime(startTime, endTime, id, "updated_time"); - if (fetchers == null) { - logger.warn("generated 0 dataFetchers, plz check input"); - return; - } - // errorCode - try { - Map errorCodeAlerts = JobHistoryScanImsAlertPropFileParserUtils.getAlerts(Constants.SCAN_PREFIX_ERRORCODE()); - - if (errorCodeAlerts == null || errorCodeAlerts.size() == 0) { - logger.info("[INFO] Loaded 0 errorcode alert from alert-rule properties file."); - } else { - logger.info( - "[INFO] Loaded {} errorcode alert from alert-rules properties file.", - errorCodeAlerts.size()); - shouldStart = true; - addIntervalToImsAlerts(errorCodeAlerts, realIntervals); - JobHistoryErrCodeRule jobHistoryErrCodeRule = - new JobHistoryErrCodeRule( - errorCodeAlerts.keySet(), new JobHistoryErrorCodeAlertSender(errorCodeAlerts)); - scanner.addScanRule(jobHistoryErrCodeRule); - } - } catch (Exception e) { - logger.warn("Jobhistory Monitor ErrorCode Faily: "+ e.getMessage()); - } - // userLabel - try { - Map userLabelAlerts = - UserLabelAlertUtils.getAlerts(Constants.USER_LABEL_MONITOR(), ""); - if (userLabelAlerts == null || userLabelAlerts.size() == 0) { - logger.info("[INFO] Loaded 0 alerts userLabel alert-rule from alert properties file."); - } else { - logger.info( - "[INFO] Loaded {} alerts userLabel alert-rules from alert properties file.", - userLabelAlerts.size()); - shouldStart = true; - JobHistoryLabelsRule jobHistoryLabelsRule = - new JobHistoryLabelsRule(new JobHistoryLabelsAlertSender()); - scanner.addScanRule(jobHistoryLabelsRule); - } - } catch (Exception e) { - logger.warn("Jobhistory Monitor UserLabel Faily: "+ e.getMessage()); - } - // jobResultRunTime - try { - Map jobResultAlerts = - JobHistoryScanImsAlertPropFileParserUtils.getAlerts(Constants.SCAN_PREFIX_ERRORCODE()); - if (jobResultAlerts == null || jobResultAlerts.size() == 0) { - logger.info("[INFO] Loaded 0 jobResult alert from alert-rule properties file."); - } else { - logger.info( - "[INFO] Loaded {} alerts jobResult alert-rules from alert properties file.", - jobResultAlerts.size()); - shouldStart = true; - JobHistoryRunTimeRule jobHistoryRunTimeRule = - new JobHistoryRunTimeRule(new JobHistoryRunTimeAlertSender()); - scanner.addScanRule(jobHistoryRunTimeRule); - } - } catch (Exception e) { - logger.warn("Jobhistory Monitor JobResultRunTime Faily: "+ e.getMessage()); - } - // jobResultRunTimeForDSS - try { - Map dssJobResultAlerts = - JobHistoryScanImsAlertPropFileParserUtils.getAlerts(Constants.SCAN_PREFIX_ERRORCODE()); - if (dssJobResultAlerts == null || dssJobResultAlerts.size() == 0) { - logger.info("[INFO] Loaded 0 jobResult alert from alert-rule properties file."); - } else { - logger.info( - "[INFO] Loaded {} alerts jobResult alert-rules from alert properties file.", - dssJobResultAlerts.size()); - shouldStart = true; + /** Scan tasks completed within 20 minutes */ + @Scheduled(cron = "0 0/10 * * * ?") + public void jobHistoryFinishedScan() { + long intervalMs = 20 * 60 * 1000; // 20分钟 + long maxIntervalMs = Constants.ERRORCODE_MAX_INTERVALS_SECONDS() * 1000; + long endTime = System.currentTimeMillis(); + long startTime = endTime - intervalMs; + long realIntervals = endTime - startTime < maxIntervalMs ? endTime - startTime : maxIntervalMs; + AnomalyScanner scanner = new DefaultScanner(); + boolean shouldStart = false; + long id; + if (null == CacheUtils.cacheBuilder.getIfPresent("jobHistoryId")) { + long maxId = MapperFactory.getJobHistoryMapper().selectMaxId(); + long beginId = 0L; + if (maxId > backtrackNum) { + beginId = maxId - backtrackNum; + } + id = MapperFactory.getJobHistoryMapper().selectIdByHalfDay(beginId); + CacheUtils.cacheBuilder.put("jobHistoryId", id); + } else { + id = CacheUtils.cacheBuilder.getIfPresent("jobHistoryId"); + } + List fetchers = generateFetchersfortime(startTime, endTime, id, "updated_time"); + if (fetchers == null) { + logger.warn("generated 0 dataFetchers, plz check input"); + return; + } + // errorCode + try { + Map errorCodeAlerts = + MonitorAlertUtils.getAlerts(Constants.SCAN_PREFIX_ERRORCODE(), null); - CommonJobRunTimeRule commonJobRunTimeRule = - new CommonJobRunTimeRule(new CommonRunTimeAlertSender()); - scanner.addScanRule(commonJobRunTimeRule); - } - } catch (Exception e) { - logger.warn("Jobhistory JobResultRunTimeForDSS ErrorCode Faily: "+ e.getMessage()); - } - run(scanner, fetchers, shouldStart); + if (errorCodeAlerts == null || errorCodeAlerts.size() == 0) { + logger.info("[INFO] Loaded 0 errorcode alert from alert-rule properties file."); + } else { + logger.info( + "[INFO] Loaded {} errorcode alert from alert-rules properties file.", + errorCodeAlerts.size()); + shouldStart = true; + addIntervalToImsAlerts(errorCodeAlerts, realIntervals); + JobHistoryErrCodeRule jobHistoryErrCodeRule = + new JobHistoryErrCodeRule( + errorCodeAlerts.keySet(), new JobHistoryErrorCodeAlertSender(errorCodeAlerts)); + scanner.addScanRule(jobHistoryErrCodeRule); + } + } catch (Exception e) { + logger.warn("Jobhistory Monitor ErrorCode Faily: " + e.getMessage()); + } + // userLabel + try { + Map userLabelAlerts = + UserLabelAlertUtils.getAlerts(Constants.USER_LABEL_MONITOR(), ""); + if (userLabelAlerts == null || userLabelAlerts.size() == 0) { + logger.info("[INFO] Loaded 0 alerts userLabel alert-rule from alert properties file."); + } else { + logger.info( + "[INFO] Loaded {} alerts userLabel alert-rules from alert properties file.", + userLabelAlerts.size()); + shouldStart = true; + JobHistoryLabelsRule jobHistoryLabelsRule = + new JobHistoryLabelsRule(new JobHistoryLabelsAlertSender()); + scanner.addScanRule(jobHistoryLabelsRule); + } + } catch (Exception e) { + logger.warn("Jobhistory Monitor UserLabel Faily: " + e.getMessage()); + } + // jobResultRunTime + try { + Map jobResultAlerts = + MonitorAlertUtils.getAlerts((Constants.SCAN_PREFIX_ERRORCODE()), null); + if (jobResultAlerts == null || jobResultAlerts.size() == 0) { + logger.info("[INFO] Loaded 0 jobResult alert from alert-rule properties file."); + } else { + logger.info( + "[INFO] Loaded {} alerts jobResult alert-rules from alert properties file.", + jobResultAlerts.size()); + shouldStart = true; + JobHistoryRunTimeRule jobHistoryRunTimeRule = + new JobHistoryRunTimeRule(new JobHistoryRunTimeAlertSender()); + scanner.addScanRule(jobHistoryRunTimeRule); + } + } catch (Exception e) { + logger.warn("Jobhistory Monitor JobResultRunTime Faily: " + e.getMessage()); } + // jobResultRunTimeForDSS + try { + Map dssJobResultAlerts = + MonitorAlertUtils.getAlerts((Constants.SCAN_PREFIX_ERRORCODE()), null); + if (dssJobResultAlerts == null || dssJobResultAlerts.size() == 0) { + logger.info("[INFO] Loaded 0 jobResult alert from alert-rule properties file."); + } else { + logger.info( + "[INFO] Loaded {} alerts jobResult alert-rules from alert properties file.", + dssJobResultAlerts.size()); + shouldStart = true; - /*** - * Whether scanning data within 12 hours has timed out - */ - @Scheduled(cron = "${linkis.monitor.jobHistory.ScanTask.cron}") - public void jobHistoryUnfinishedScan() { - long id = - Optional.ofNullable(CacheUtils.cacheBuilder.getIfPresent("jobhistoryScan")) - .orElse(MonitorConfig.JOB_HISTORY_TIME_EXCEED.getValue()); - long intervalMs = Constants.SCAN_INTERVALS_SECONDS() * 1000; - long maxIntervalMs = Constants.MAX_INTERVALS_SECONDS() * 1000; - long endTime = System.currentTimeMillis(); - long startTime = endTime - intervalMs; - long realIntervals = endTime - startTime < maxIntervalMs ? endTime - startTime : maxIntervalMs; - AnomalyScanner scanner = new DefaultScanner(); - boolean shouldStart = false; - List fetchers = generateFetchers(startTime, endTime, maxIntervalMs, id, "created_time"); - if (fetchers == null) { - logger.warn("generated 0 dataFetchers, plz check input"); - return; - } - Map jobTimeAlerts = - JobHistoryScanImsAlertPropFileParserUtils.getAlerts( - Constants.SCAN_PREFIX_UNFINISHED_JOBTIME_EXCEED_SEC()); - if (jobTimeAlerts == null || jobTimeAlerts.size() == 0) { - logger.info("[INFO] Loaded 0 alerts jobtime alert-rule from alert properties file."); - } else { - logger.info( - "[INFO] Loaded {} alerts jobtime alert-rules from alert properties file.", - jobTimeAlerts.size()); - shouldStart = true; - addIntervalToImsAlerts(jobTimeAlerts, realIntervals); - JobTimeExceedRule jobTimeExceedRule = - new JobTimeExceedRule( - jobTimeAlerts.keySet(), new JobTimeExceedAlertSender(jobTimeAlerts)); - scanner.addScanRule(jobTimeExceedRule); - } - run(scanner, fetchers, shouldStart); + CommonJobRunTimeRule commonJobRunTimeRule = + new CommonJobRunTimeRule(new CommonRunTimeAlertSender()); + scanner.addScanRule(commonJobRunTimeRule); + } + } catch (Exception e) { + logger.warn("Jobhistory JobResultRunTimeForDSS ErrorCode Faily: " + e.getMessage()); } + run(scanner, fetchers, shouldStart); + } - public static void run(AnomalyScanner scanner, List fetchers, Boolean shouldStart) { - if (shouldStart) { - scanner.addDataFetchers(fetchers); - scanner.run(); - // scanner.shutdown(); // wait all alert to be send - } + /** * Whether scanning data within 12 hours has timed out */ + @Scheduled(cron = "${linkis.monitor.jobHistory.ScanTask.cron}") + public void jobHistoryUnfinishedScan() { + long id = + Optional.ofNullable(CacheUtils.cacheBuilder.getIfPresent("jobhistoryScan")) + .orElse(MonitorConfig.JOB_HISTORY_TIME_EXCEED.getValue()); + long intervalMs = Constants.ERRORCODE_SCAN_INTERVALS_SECONDS() * 1000; + long maxIntervalMs = Constants.ERRORCODE_MAX_INTERVALS_SECONDS() * 1000; + long endTime = System.currentTimeMillis(); + long startTime = endTime - intervalMs; + long realIntervals = endTime - startTime < maxIntervalMs ? endTime - startTime : maxIntervalMs; + AnomalyScanner scanner = new DefaultScanner(); + boolean shouldStart = false; + List fetchers = + generateFetchers(startTime, endTime, maxIntervalMs, id, "created_time"); + if (fetchers == null) { + logger.warn("generated 0 dataFetchers, plz check input"); + return; } + Map jobTimeAlerts = + MonitorAlertUtils.getAlerts((Constants.SCAN_PREFIX_UNFINISHED_JOBTIME_EXCEED_SEC()), null); + if (jobTimeAlerts == null || jobTimeAlerts.size() == 0) { + logger.info("[INFO] Loaded 0 alerts jobtime alert-rule from alert properties file."); + } else { + logger.info( + "[INFO] Loaded {} alerts jobtime alert-rules from alert properties file.", + jobTimeAlerts.size()); + shouldStart = true; + addIntervalToImsAlerts(jobTimeAlerts, realIntervals); + JobTimeExceedRule jobTimeExceedRule = + new JobTimeExceedRule( + jobTimeAlerts.keySet(), new JobTimeExceedAlertSender(jobTimeAlerts)); + scanner.addScanRule(jobTimeExceedRule); + } + run(scanner, fetchers, shouldStart); + } - private static List generateFetchers( - long startTime, long endTime, long maxIntervalMs, long id, String timeType) { - List ret = new ArrayList<>(); - long pe = endTime; - long ps; - while (pe > startTime) { - ps = pe - maxIntervalMs > startTime ? pe - maxIntervalMs : startTime; - String[] fetcherArgs = - new String[]{String.valueOf(ps), String.valueOf(pe), String.valueOf(id), timeType}; - ret.add(new JobHistoryDataFetcher(fetcherArgs, MapperFactory.getJobHistoryMapper())); - logger.info( - "Generated dataFetcher for startTime: " - + new Date(ps).toString() - + ". EndTime: " - + new Date(pe).toString()); - pe = pe - maxIntervalMs; - } - return ret; + public static void run(AnomalyScanner scanner, List fetchers, Boolean shouldStart) { + if (shouldStart) { + scanner.addDataFetchers(fetchers); + scanner.run(); + // scanner.shutdown(); // wait all alert to be send } + } - private static List generateFetchersfortime(long startTime, long endTime, long id, String timeType) { - List fetchers = new ArrayList<>(); - String[] fetcherArgs = - new String[]{String.valueOf(startTime), String.valueOf(endTime), String.valueOf(id), timeType}; - fetchers.add(new JobHistoryDataFetcher(fetcherArgs, MapperFactory.getJobHistoryMapper())); - logger.info( - "Generated dataFetcher for startTime: " - + new Date(startTime).toString() - + ". EndTime: " - + new Date(endTime).toString()); - return fetchers; + private static List generateFetchers( + long startTime, long endTime, long maxIntervalMs, long id, String timeType) { + List ret = new ArrayList<>(); + long pe = endTime; + long ps; + while (pe > startTime) { + ps = pe - maxIntervalMs > startTime ? pe - maxIntervalMs : startTime; + String[] fetcherArgs = + new String[] {String.valueOf(ps), String.valueOf(pe), String.valueOf(id), timeType}; + ret.add(new JobHistoryDataFetcher(fetcherArgs, MapperFactory.getJobHistoryMapper())); + logger.info( + "Generated dataFetcher for startTime: " + + new Date(ps).toString() + + ". EndTime: " + + new Date(pe).toString()); + pe = pe - maxIntervalMs; } + return ret; + } + + private static List generateFetchersfortime( + long startTime, long endTime, long id, String timeType) { + List fetchers = new ArrayList<>(); + String[] fetcherArgs = + new String[] { + String.valueOf(startTime), String.valueOf(endTime), String.valueOf(id), timeType + }; + fetchers.add(new JobHistoryDataFetcher(fetcherArgs, MapperFactory.getJobHistoryMapper())); + logger.info( + "Generated dataFetcher for startTime: " + + new Date(startTime).toString() + + ". EndTime: " + + new Date(endTime).toString()); + return fetchers; + } - private static void addIntervalToImsAlerts(Map alerts, long realIntervals) { - for (AlertDesc alert : alerts.values()) { - if (!(alert instanceof ImsAlertDesc)) { - logger.info("[warn] ignore wrong alert" + alert); - } else { - ((ImsAlertDesc) alert).hitIntervalMs_$eq(realIntervals); - } - } + private static void addIntervalToImsAlerts(Map alerts, long realIntervals) { + for (AlertDesc alert : alerts.values()) { + if (!(alert instanceof ImsAlertDesc)) { + logger.info("[warn] ignore wrong alert" + alert); + } else { + ((ImsAlertDesc) alert).hitIntervalMs_$eq(realIntervals); + } } + } } diff --git a/linkis-extensions/linkis-et-monitor/src/main/java/org/apache/linkis/monitor/scan/app/monitor/scheduled/ResourceMonitor.java b/linkis-extensions/linkis-et-monitor/src/main/java/org/apache/linkis/monitor/scan/app/monitor/scheduled/ResourceMonitor.java index b7066ba420..459aaf70ff 100644 --- a/linkis-extensions/linkis-et-monitor/src/main/java/org/apache/linkis/monitor/scan/app/monitor/scheduled/ResourceMonitor.java +++ b/linkis-extensions/linkis-et-monitor/src/main/java/org/apache/linkis/monitor/scan/app/monitor/scheduled/ResourceMonitor.java @@ -41,9 +41,7 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; -/*** - * Monitor the usage of ECM resources for monitoring and metrics reporting - */ +/** * Monitor the usage of ECM resources for monitoring and metrics reporting */ @Component @PropertySource(value = "classpath:linkis-et-monitor.properties", encoding = "UTF-8") public class ResourceMonitor { @@ -68,7 +66,7 @@ public void ecmResourceTask() { List> emNodeVoList = data.getOrDefault("EMs", new ArrayList<>()); StringJoiner minor = new StringJoiner(","); StringJoiner major = new StringJoiner(","); - // deal ecm resource + // deal ecm resource emNodeVoList.forEach( emNodeVo -> { Map leftResource = MapUtils.getMap(emNodeVo, "leftResource"); @@ -78,8 +76,10 @@ public void ecmResourceTask() { labels.stream() .filter(labelmap -> labelmap.containsKey("tenant")) .forEach(map -> tenant.set("租户标签:" + map.get("stringValue").toString())); - String leftmemory = ByteTimeUtils.bytesToString((long) leftResource.getOrDefault("memory",0)); - String maxmemory = ByteTimeUtils.bytesToString((long) maxResource.getOrDefault("memory",0)); + String leftmemory = + ByteTimeUtils.bytesToString((long) leftResource.getOrDefault("memory", 0)); + String maxmemory = + ByteTimeUtils.bytesToString((long) maxResource.getOrDefault("memory", 0)); String leftmemoryStr = leftmemory.split(" ")[0]; String maxmemoryStr = maxmemory.split(" ")[0]; @@ -94,9 +94,12 @@ public void ecmResourceTask() { BigDecimal maxMemory = new BigDecimal(maxmemoryStr); BigDecimal maxCores = new BigDecimal((int) maxResource.get("cores")); BigDecimal maxInstance = new BigDecimal((int) maxResource.get("instance")); - double memorydouble = leftMemory.divide(maxMemory,2, BigDecimal.ROUND_HALF_DOWN).doubleValue(); - double coresdouble = leftCores.divide(maxCores,2, BigDecimal.ROUND_HALF_DOWN).doubleValue(); - double instancedouble = leftInstance.divide(maxInstance,2, BigDecimal.ROUND_HALF_DOWN).doubleValue(); + double memorydouble = + leftMemory.divide(maxMemory, 2, BigDecimal.ROUND_HALF_DOWN).doubleValue(); + double coresdouble = + leftCores.divide(maxCores, 2, BigDecimal.ROUND_HALF_DOWN).doubleValue(); + double instancedouble = + leftInstance.divide(maxInstance, 2, BigDecimal.ROUND_HALF_DOWN).doubleValue(); Double majorValue = MonitorConfig.ECM_TASK_MAJOR.getValue(); Double minorValue = MonitorConfig.ECM_TASK_MINOR.getValue(); if (((memorydouble) <= majorValue) @@ -126,26 +129,38 @@ public void ecmResourceTask() { MonitorAlertUtils.getAlerts(Constants.ALERT_RESOURCE_MONITOR(), replaceParm); PooledImsAlertUtils.addAlert(ecmResourceAlerts.get("12003")); } - // ECM资源占比上报 - resourceSendToIms(coresdouble, memorydouble, instancedouble, HttpsUntils.localHost,"USED"); - }); - //ECM 剩余资源总数上报 - resourceSendToIms(totalCores.get(), totalMemory.get(), totalInstance.get(), HttpsUntils.localHost,"TOTAL"); - } + // ECM资源占比上报 + resourceSendToIms( + coresdouble, memorydouble, instancedouble, HttpsUntils.localHost, "USED"); + }); + // ECM 剩余资源总数上报 + resourceSendToIms( + totalCores.get(), totalMemory.get(), totalInstance.get(), HttpsUntils.localHost, "TOTAL"); + } - private void resourceSendToIms(Double coresdouble, Double memorydouble, Double instancedouble, String loaclhost, String name) { - List list = new ArrayList<>(); - logger.info("ResourceMonitor send index "); - String core ="ECM_CPU_"; - String memory ="ECM_MEMORY_"; - String instance ="ECM_INSTANCE_"; - list.add(new IndexEntity(core.concat(name), "CPU", "INDEX", loaclhost, String.valueOf(coresdouble))); - list.add(new IndexEntity(memory.concat(name), "MEMORY", "INDEX", loaclhost, String.valueOf(memorydouble))); - list.add(new IndexEntity(instance.concat(name), "INSTANCE", "INDEX", loaclhost, String.valueOf(instancedouble))); - try { - HttpsUntils.sendIndex(list); - } catch (IOException e) { - logger.warn("failed to send EcmResource index"); - } + private void resourceSendToIms( + Double coresdouble, + Double memorydouble, + Double instancedouble, + String loaclhost, + String name) { + List list = new ArrayList<>(); + logger.info("ResourceMonitor send index "); + String core = "ECM_CPU_"; + String memory = "ECM_MEMORY_"; + String instance = "ECM_INSTANCE_"; + list.add( + new IndexEntity(core.concat(name), "CPU", "INDEX", loaclhost, String.valueOf(coresdouble))); + list.add( + new IndexEntity( + memory.concat(name), "MEMORY", "INDEX", loaclhost, String.valueOf(memorydouble))); + list.add( + new IndexEntity( + instance.concat(name), "INSTANCE", "INDEX", loaclhost, String.valueOf(instancedouble))); + try { + HttpsUntils.sendIndex(list); + } catch (IOException e) { + logger.warn("failed to send EcmResource index"); } + } } diff --git a/linkis-extensions/linkis-et-monitor/src/main/java/org/apache/linkis/monitor/scan/app/monitor/scheduled/TaskLogClear.java b/linkis-extensions/linkis-et-monitor/src/main/java/org/apache/linkis/monitor/scan/app/monitor/scheduled/TaskLogClear.java index bae3dc53f7..ae7e1bc21d 100644 --- a/linkis-extensions/linkis-et-monitor/src/main/java/org/apache/linkis/monitor/scan/app/monitor/scheduled/TaskLogClear.java +++ b/linkis-extensions/linkis-et-monitor/src/main/java/org/apache/linkis/monitor/scan/app/monitor/scheduled/TaskLogClear.java @@ -30,9 +30,7 @@ import org.slf4j.Logger; -/*** - * Task: clean up logs, file data of ec materials - */ +/** * Task: clean up logs, file data of ec materials */ @Component @PropertySource(value = "classpath:linkis-et-monitor.properties", encoding = "UTF-8") public class TaskLogClear { diff --git a/linkis-extensions/linkis-et-monitor/src/main/java/org/apache/linkis/monitor/scan/app/monitor/scheduled/TaskMonitor.java b/linkis-extensions/linkis-et-monitor/src/main/java/org/apache/linkis/monitor/scan/app/monitor/scheduled/TaskMonitor.java index 87bc6d6e22..5f6728ef62 100644 --- a/linkis-extensions/linkis-et-monitor/src/main/java/org/apache/linkis/monitor/scan/app/monitor/scheduled/TaskMonitor.java +++ b/linkis-extensions/linkis-et-monitor/src/main/java/org/apache/linkis/monitor/scan/app/monitor/scheduled/TaskMonitor.java @@ -43,9 +43,9 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; -/*** - * Entrance monitors the number of tasks for specified users and systems. - * If the configured threshold is exceeded, an alarm will be triggered. +/** + * * Entrance monitors the number of tasks for specified users and systems. If the configured + * threshold is exceeded, an alarm will be triggered. */ @Component @PropertySource(value = "classpath:linkis-et-monitor.properties", encoding = "UTF-8") @@ -56,7 +56,6 @@ public class TaskMonitor { private static final String ENTRANCE_RUNNING_TASK = "entrance_running_task"; private static final String ENTRANCE_QUEUED_TASK = "entrance_queued_task"; - @Scheduled(cron = "${linkis.monitor.entranceTask.cron}") public void entranceTask() throws IOException { List> userlist = new ArrayList<>(); @@ -71,7 +70,8 @@ public void entranceTask() throws IOException { try { data = MapUtils.getMap( - HttpsUntils.getEntranceTask(null, entranceEntity.get("username"),null), "data"); + HttpsUntils.getEntranceTask(null, entranceEntity.get("username"), null), + "data"); logger.info("TaskMonitor userlist response {}:", data); } catch (IOException e) { logger.warn("failed to get EntranceTask data"); @@ -119,7 +119,7 @@ public void entranceTask() throws IOException { }); Map likisData = null; try { - likisData = MapUtils.getMap(HttpsUntils.getEntranceTask(null, "hadoop",null), "data"); + likisData = MapUtils.getMap(HttpsUntils.getEntranceTask(null, "hadoop", null), "data"); logger.info("TaskMonitor hadoop response {}:", likisData); } catch (IOException e) { logger.warn("failed to get EntranceTask data"); @@ -149,35 +149,49 @@ public void entranceTask() throws IOException { MonitorAlertUtils.getAlerts(Constants.ALERT_RESOURCE_MONITOR(), parms); PooledImsAlertUtils.addAlert(ecmResourceAlerts.get("12008")); } - //指标上报 + // 指标上报 resourceSendToIms(); } - public static void resourceSendToIms() { - //获取所有的entrance实例,逐个上送IMS - ServiceInstance[] instances = Sender.getInstances(Constants.DIRTY_DATA_ENTRANCE_APPLICATIONNAME()); - if (null != instances) { - for (ServiceInstance instance : instances) { - String serviceInstance = instance.getInstance(); - try { - Map instanceData = MapUtils.getMap(HttpsUntils.getEntranceTask(null, "hadoop", serviceInstance), "data"); - int runningNumber = 0; - int queuedNumber = 0; - if (instanceData.containsKey("runningNumber")) { - runningNumber = (int) instanceData.get("runningNumber"); - } - if (instanceData.containsKey("queuedNumber")) { - queuedNumber = (int) instanceData.get("queuedNumber"); - } - logger.info("ResourceMonitor send index "); - List list = new ArrayList<>(); - list.add(new IndexEntity(serviceInstance, "entrance", ENTRANCE_RUNNING_TASK, HttpsUntils.localHost, String.valueOf(runningNumber))); - list.add(new IndexEntity(serviceInstance, "entrance", ENTRANCE_QUEUED_TASK, HttpsUntils.localHost, String.valueOf(queuedNumber))); - HttpsUntils.sendIndex(list); - } catch (IOException e) { - logger.warn("failed to send EcmResource index :" + e); - } - } + public static void resourceSendToIms() { + // 获取所有的entrance实例,逐个上送IMS + ServiceInstance[] instances = + Sender.getInstances(Constants.DIRTY_DATA_ENTRANCE_APPLICATIONNAME()); + if (null != instances) { + for (ServiceInstance instance : instances) { + String serviceInstance = instance.getInstance(); + try { + Map instanceData = + MapUtils.getMap(HttpsUntils.getEntranceTask(null, "hadoop", serviceInstance), "data"); + int runningNumber = 0; + int queuedNumber = 0; + if (instanceData.containsKey("runningNumber")) { + runningNumber = (int) instanceData.get("runningNumber"); + } + if (instanceData.containsKey("queuedNumber")) { + queuedNumber = (int) instanceData.get("queuedNumber"); + } + logger.info("ResourceMonitor send index "); + List list = new ArrayList<>(); + list.add( + new IndexEntity( + serviceInstance, + "entrance", + ENTRANCE_RUNNING_TASK, + HttpsUntils.localHost, + String.valueOf(runningNumber))); + list.add( + new IndexEntity( + serviceInstance, + "entrance", + ENTRANCE_QUEUED_TASK, + HttpsUntils.localHost, + String.valueOf(queuedNumber))); + HttpsUntils.sendIndex(list); + } catch (IOException e) { + logger.warn("failed to send EcmResource index :" + e); } + } } + } } diff --git a/linkis-extensions/linkis-et-monitor/src/main/java/org/apache/linkis/monitor/scan/app/monitor/scheduled/UserModeMonitor.java b/linkis-extensions/linkis-et-monitor/src/main/java/org/apache/linkis/monitor/scan/app/monitor/scheduled/UserModeMonitor.java index 03db536344..c642a24762 100644 --- a/linkis-extensions/linkis-et-monitor/src/main/java/org/apache/linkis/monitor/scan/app/monitor/scheduled/UserModeMonitor.java +++ b/linkis-extensions/linkis-et-monitor/src/main/java/org/apache/linkis/monitor/scan/app/monitor/scheduled/UserModeMonitor.java @@ -35,8 +35,10 @@ import org.apache.linkis.ujes.client.response.GetTableStatisticInfoResult; import org.apache.linkis.ujes.client.response.JobExecuteResult; import org.apache.linkis.ujes.client.response.JobInfoResult; + import org.springframework.scheduling.annotation.Scheduled; import org.springframework.stereotype.Component; + import java.net.SocketTimeoutException; import java.util.*; import java.util.concurrent.TimeUnit; @@ -45,112 +47,130 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; -/*** - * User mode monitoring: regularly trigger scripts to monitor whether the engine status is running normally +/** + * * User mode monitoring: regularly trigger scripts to monitor whether the engine status is running + * normally */ @Component public class UserModeMonitor { - private static final Logger logger = LoggerFactory.getLogger(UserModeMonitor.class); + private static final Logger logger = LoggerFactory.getLogger(UserModeMonitor.class); - private static final DWSClientConfig clientConfig = HttpsUntils.dwsClientConfig; + private static final DWSClientConfig clientConfig = HttpsUntils.dwsClientConfig; - private static final UJESClient client = new UJESClientImpl(clientConfig); + private static final UJESClient client = new UJESClientImpl(clientConfig); - @Scheduled(cron = "${linkis.monitor.user.cron}") - public void job() { - Optional.ofNullable(MonitorConfig.USER_MODE_ENGINE.getValue()).ifPresent(configStr -> { - ArrayList> userModeStr = - BDPJettyServerHelper.gson().fromJson(configStr, ArrayList.class); - userModeStr.forEach(engine -> { - // 3. build job and execute - JobExecuteResult jobExecuteResult = toSubmit(engine); + @Scheduled(cron = "${linkis.monitor.user.cron}") + public void job() { + Optional.ofNullable(MonitorConfig.USER_MODE_ENGINE.getValue()) + .ifPresent( + configStr -> { + ArrayList> userModeStr = + BDPJettyServerHelper.gson().fromJson(configStr, ArrayList.class); + userModeStr.forEach( + engine -> { + // 3. build job and execute + JobExecuteResult jobExecuteResult = toSubmit(engine); + logger.info( + "start run engineType: {},job id : {}", + engine.get("engineType"), + jobExecuteResult.taskID()); + HashMap parms = new HashMap<>(); + parms.put("$engineType", engine.get("engineType")); + parms.put("$url", MonitorConfig.GATEWAY_URL.getValue()); + parms.put("$jobId", jobExecuteResult.taskID()); + Utils.sleepQuietly(MonitorConfig.USER_MODE_TIMEOUT.getValue() * 1000); + JobInfoResult jobInfo = client.getJobInfo(jobExecuteResult); + if (jobInfo.isCompleted()) { + if (jobInfo.getJobStatus().equals("Failed")) { logger.info( - "start run engineType: {},job id : {}", - engine.get("engineType"), - jobExecuteResult.taskID()); - HashMap parms = new HashMap<>(); - parms.put("$engineType", engine.get("engineType")); - parms.put("$url", MonitorConfig.GATEWAY_URL.getValue()); - parms.put("$jobId", jobExecuteResult.taskID()); - Utils.sleepQuietly(MonitorConfig.USER_MODE_TIMEOUT.getValue() * 1000); - JobInfoResult jobInfo = client.getJobInfo(jobExecuteResult); - if (jobInfo.isCompleted()) { - if (jobInfo.getJobStatus().equals("Failed")) { - logger.info("run fail engineType: {},job id : {}", engine.get("engineType"), jobExecuteResult.taskID()); - RequestPersistTask requestPersistTask = jobInfo.getRequestPersistTask(); - parms.put("$errorCode", String.valueOf(requestPersistTask.getErrCode())); - parms.put("$errorMsg", requestPersistTask.getErrDesc()); - Map failedAlerts = MonitorAlertUtils.getAlerts(Constants.USER_RESOURCE_MONITOR(), parms); - PooledImsAlertUtils.addAlert(failedAlerts.get("12012")); - } - } else { - logger.info("run timeout engineType: {},job id : {}", engine.get("engineType"), jobExecuteResult.taskID()); - Map alerts = MonitorAlertUtils.getAlerts(Constants.USER_RESOURCE_MONITOR(), parms); - PooledImsAlertUtils.addAlert(alerts.get("12011")); - } + "run fail engineType: {},job id : {}", + engine.get("engineType"), + jobExecuteResult.taskID()); + RequestPersistTask requestPersistTask = jobInfo.getRequestPersistTask(); + parms.put("$errorCode", String.valueOf(requestPersistTask.getErrCode())); + parms.put("$errorMsg", requestPersistTask.getErrDesc()); + Map failedAlerts = + MonitorAlertUtils.getAlerts(Constants.USER_RESOURCE_MONITOR(), parms); + PooledImsAlertUtils.addAlert(failedAlerts.get("12012")); + } + } else { + logger.info( + "run timeout engineType: {},job id : {}", + engine.get("engineType"), + jobExecuteResult.taskID()); + Map alerts = + MonitorAlertUtils.getAlerts(Constants.USER_RESOURCE_MONITOR(), parms); + PooledImsAlertUtils.addAlert(alerts.get("12011")); } - ); - }); - } + }); + }); + } - private static JobExecuteResult toSubmit(LinkedTreeMap engine) { - // 1. build params - // set label map :EngineTypeLabel/UserCreatorLabel/EngineRunTypeLabel/Tenant - Map labels = new HashMap(); - labels.put(LabelKeyConstant.ENGINE_TYPE_KEY, engine.get("engineType")); // required engineType Label - labels.put(LabelKeyConstant.USER_CREATOR_TYPE_KEY, engine.get("executeUser") + "-IDE");// required execute user and creator eg:hadoop-IDE - labels.put(LabelKeyConstant.CODE_TYPE_KEY, engine.get("runType")); // required codeType - Map startupMap = new HashMap(16); - // setting linkis params - //startupMap.put("wds.linkis.rm.yarnqueue", "dws"); - // 2. build jobSubmitAction - JobSubmitAction jobSubmitAction = JobSubmitAction.builder() - .addExecuteCode(engine.get("code")) - .setStartupParams(startupMap) - .setUser(engine.get("executeUser")) //submit user - .addExecuteUser(engine.get("executeUser")) // execute user - .setLabels(labels) - .build(); - // 3. to execute - return client.submit(jobSubmitAction); - } + private static JobExecuteResult toSubmit(LinkedTreeMap engine) { + // 1. build params + // set label map :EngineTypeLabel/UserCreatorLabel/EngineRunTypeLabel/Tenant + Map labels = new HashMap(); + labels.put( + LabelKeyConstant.ENGINE_TYPE_KEY, engine.get("engineType")); // required engineType Label + labels.put( + LabelKeyConstant.USER_CREATOR_TYPE_KEY, + engine.get("executeUser") + "-IDE"); // required execute user and creator eg:hadoop-IDE + labels.put(LabelKeyConstant.CODE_TYPE_KEY, engine.get("runType")); // required codeType + Map startupMap = new HashMap(16); + // setting linkis params + // startupMap.put("wds.linkis.rm.yarnqueue", "dws"); + // 2. build jobSubmitAction + JobSubmitAction jobSubmitAction = + JobSubmitAction.builder() + .addExecuteCode(engine.get("code")) + .setStartupParams(startupMap) + .setUser(engine.get("executeUser")) // submit user + .addExecuteUser(engine.get("executeUser")) // execute user + .setLabels(labels) + .build(); + // 3. to execute + return client.submit(jobSubmitAction); + } - @Scheduled(cron = "${linkis.monitor.user.db.cron:0 0/10 * * * ?}") - public void dbJob() { - Map properties= new HashMap<>(); - properties.put("readTimeout",MonitorConfig.USER_MODE_INTERFACE_TIMEOUT.getValue()); - DWSClientConfig clientConfig = HttpsUntils.createClientConfig(null, properties); - UJESClientImpl ujesClient = new UJESClientImpl(clientConfig); - GetTableStatisticInfoAction builder = GetTableStatisticInfoAction - .builder() - .setUser("hadoop") - .setDatabase("default") - .setTable("dual") - .builder(); - HashMap parms = new HashMap<>(); - try { - GetTableStatisticInfoResult tableStatisticInfo = ujesClient.getTableStatisticInfo(builder); - if (tableStatisticInfo.getStatus() != 0) { - logger.info("元数据查询服务用户态,执行失败,异常信息:"+tableStatisticInfo.getMessage()); -// parms.put("$msg", tableStatisticInfo.getMessage()); -// Map failedAlerts = MonitorAlertUtils.getAlerts(Constants.USER_RESOURCE_MONITOR(), parms); -// PooledImsAlertUtils.addAlert(failedAlerts.get("12017")); - } - } catch (Exception e) { - if(e instanceof SocketTimeoutException){ - Integer timeoutValue = MonitorConfig.USER_MODE_INTERFACE_TIMEOUT.getValue(); - long timeout = TimeUnit.MILLISECONDS.toSeconds(timeoutValue); - logger.info("元数据查询服务用户态,执行超时:"+timeout+"秒"); -// parms.put("$timeout", String.valueOf(timeout)); -// Map failedAlerts = MonitorAlertUtils.getAlerts(Constants.USER_RESOURCE_MONITOR(), parms); -// PooledImsAlertUtils.addAlert(failedAlerts.get("12018")); - } else { - logger.error("元数据查询服务用户态,执行异常:"+ e); -// parms.put("$msg", e.getMessage()); -// Map failedAlerts = MonitorAlertUtils.getAlerts(Constants.USER_RESOURCE_MONITOR(), parms); -// PooledImsAlertUtils.addAlert(failedAlerts.get("12017")); - } - } + @Scheduled(cron = "${linkis.monitor.user.db.cron:0 0/10 * * * ?}") + public void dbJob() { + Map properties = new HashMap<>(); + properties.put("readTimeout", MonitorConfig.USER_MODE_INTERFACE_TIMEOUT.getValue()); + DWSClientConfig clientConfig = HttpsUntils.createClientConfig(null, properties); + UJESClientImpl ujesClient = new UJESClientImpl(clientConfig); + GetTableStatisticInfoAction builder = + GetTableStatisticInfoAction.builder() + .setUser("hadoop") + .setDatabase("default") + .setTable("dual") + .builder(); + HashMap parms = new HashMap<>(); + try { + GetTableStatisticInfoResult tableStatisticInfo = ujesClient.getTableStatisticInfo(builder); + if (tableStatisticInfo.getStatus() != 0) { + logger.info("元数据查询服务用户态,执行失败,异常信息:" + tableStatisticInfo.getMessage()); + // parms.put("$msg", tableStatisticInfo.getMessage()); + // Map failedAlerts = + // MonitorAlertUtils.getAlerts(Constants.USER_RESOURCE_MONITOR(), parms); + // PooledImsAlertUtils.addAlert(failedAlerts.get("12017")); + } + } catch (Exception e) { + if (e instanceof SocketTimeoutException) { + Integer timeoutValue = MonitorConfig.USER_MODE_INTERFACE_TIMEOUT.getValue(); + long timeout = TimeUnit.MILLISECONDS.toSeconds(timeoutValue); + logger.info("元数据查询服务用户态,执行超时:" + timeout + "秒"); + // parms.put("$timeout", String.valueOf(timeout)); + // Map failedAlerts = + // MonitorAlertUtils.getAlerts(Constants.USER_RESOURCE_MONITOR(), parms); + // PooledImsAlertUtils.addAlert(failedAlerts.get("12018")); + } else { + logger.error("元数据查询服务用户态,执行异常:" + e); + // parms.put("$msg", e.getMessage()); + // Map failedAlerts = + // MonitorAlertUtils.getAlerts(Constants.USER_RESOURCE_MONITOR(), parms); + // PooledImsAlertUtils.addAlert(failedAlerts.get("12017")); + } } + } } diff --git a/linkis-extensions/linkis-et-monitor/src/main/java/org/apache/linkis/monitor/scan/app/monitor/scheduled/ValidatorClear.java b/linkis-extensions/linkis-et-monitor/src/main/java/org/apache/linkis/monitor/scan/app/monitor/scheduled/ValidatorClear.java index 440b7a6bc4..4df828e36c 100644 --- a/linkis-extensions/linkis-et-monitor/src/main/java/org/apache/linkis/monitor/scan/app/monitor/scheduled/ValidatorClear.java +++ b/linkis-extensions/linkis-et-monitor/src/main/java/org/apache/linkis/monitor/scan/app/monitor/scheduled/ValidatorClear.java @@ -20,32 +20,32 @@ import org.apache.linkis.monitor.scan.app.monitor.config.MonitorConfig; import org.apache.linkis.monitor.scan.app.monitor.until.ThreadUtils; import org.apache.linkis.monitor.scan.utils.log.LogUtils; -import org.slf4j.Logger; + import org.springframework.context.annotation.PropertySource; import org.springframework.scheduling.annotation.Scheduled; import org.springframework.stereotype.Component; import java.util.ArrayList; import java.util.List; -/*** - * Task: clean up linkis_et_validator_checkinfo data - */ +import org.slf4j.Logger; + +/** * Task: clean up linkis_et_validator_checkinfo data */ @Component @PropertySource(value = "classpath:linkis-et-monitor.properties", encoding = "UTF-8") public class ValidatorClear { - private static final Logger logger = LogUtils.stdOutLogger(); - - @Scheduled(cron = "${linkis.monitor.clear.validator.cron}") - public void ValidatorClear() { - logger.info("Start to clear_validator_record shell"); - List cmdlist = new ArrayList<>(); - cmdlist.add("sh"); - cmdlist.add(MonitorConfig.shellPath + "clear_validator_record.sh"); - logger.info("clear_validator_record shell command {}", cmdlist); - String exec = ThreadUtils.run(cmdlist, "clear_validator_record.sh"); - logger.info("shell log {}", exec); - logger.info("End to clear_validator_record shell "); - } + private static final Logger logger = LogUtils.stdOutLogger(); + + @Scheduled(cron = "${linkis.monitor.clear.validator.cron}") + public void ValidatorClear() { + logger.info("Start to clear_validator_record shell"); + List cmdlist = new ArrayList<>(); + cmdlist.add("sh"); + cmdlist.add(MonitorConfig.shellPath + "clear_validator_record.sh"); + logger.info("clear_validator_record shell command {}", cmdlist); + String exec = ThreadUtils.run(cmdlist, "clear_validator_record.sh"); + logger.info("shell log {}", exec); + logger.info("End to clear_validator_record shell "); + } } diff --git a/linkis-extensions/linkis-et-monitor/src/main/java/org/apache/linkis/monitor/scan/app/monitor/until/HttpsUntils.java b/linkis-extensions/linkis-et-monitor/src/main/java/org/apache/linkis/monitor/scan/app/monitor/until/HttpsUntils.java index 1208a6ef64..34e12d175d 100644 --- a/linkis-extensions/linkis-et-monitor/src/main/java/org/apache/linkis/monitor/scan/app/monitor/until/HttpsUntils.java +++ b/linkis-extensions/linkis-et-monitor/src/main/java/org/apache/linkis/monitor/scan/app/monitor/until/HttpsUntils.java @@ -32,6 +32,7 @@ import org.apache.linkis.monitor.scan.request.EntranceTaskAction; import org.apache.linkis.monitor.scan.response.EntranceTaskResult; import org.apache.linkis.server.BDPJettyServerHelper; +import org.apache.linkis.ujes.client.response.EmsListResult; import org.apache.commons.collections.MapUtils; import org.apache.commons.lang3.StringUtils; @@ -44,7 +45,6 @@ import org.apache.http.impl.client.HttpClients; import org.apache.http.util.EntityUtils; -import org.apache.linkis.ujes.client.response.EmsListResult; import org.springframework.util.Assert; import java.io.IOException; @@ -132,14 +132,16 @@ public static DWSClientConfig createClientConfig(String url, Map return clientConfig; } - public static Map getEntranceTask(String url, String user,String Instance) throws IOException { + public static Map getEntranceTask(String url, String user, String Instance) + throws IOException { if (null == dwsClientConfig) { dwsClientConfig = createClientConfig(null, null); } if (null == client) { client = new MonitorHTTPClientClientImpl(dwsClientConfig); } - EntranceTaskAction build = EntranceTaskAction.newBuilder().setUser(user).setInstance(Instance).build(); + EntranceTaskAction build = + EntranceTaskAction.newBuilder().setUser(user).setInstance(Instance).build(); EntranceTaskResult result = client.entranList(build); return result.getResultMap(); } @@ -170,7 +172,9 @@ public static void sendIndex(List list) throws IOException { public static void sendChatbot(ChatbotEntity chatbotEntity) throws IOException { String json = BDPJettyServerHelper.gson().toJson(chatbotEntity); - StringEntity entity = new StringEntity(json, ContentType.create(ContentType.APPLICATION_JSON.getMimeType(), "UTF-8")); + StringEntity entity = + new StringEntity( + json, ContentType.create(ContentType.APPLICATION_JSON.getMimeType(), "UTF-8")); entity.setContentEncoding("UTF-8"); HttpPost httpPost = new HttpPost(MonitorConfig.CHATBOT_URL.getValue()); httpPost.setConfig(RequestConfig.DEFAULT); @@ -179,5 +183,4 @@ public static void sendChatbot(ChatbotEntity chatbotEntity) throws IOException { String responseStr = EntityUtils.toString(execute.getEntity(), "UTF-8"); Map map = BDPJettyServerHelper.gson().fromJson(responseStr, Map.class); } - } diff --git a/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/scan/app/LinkisJobHistoryScanSpringConfiguration.scala b/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/scan/app/LinkisJobHistoryScanSpringConfiguration.scala index 7ee159d936..520c0a78c3 100644 --- a/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/scan/app/LinkisJobHistoryScanSpringConfiguration.scala +++ b/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/scan/app/LinkisJobHistoryScanSpringConfiguration.scala @@ -26,10 +26,6 @@ import org.springframework.context.annotation.{ComponentScan, Configuration} import javax.annotation.PostConstruct -/** - * Created by shangda on 2021/11/19. - */ - @Configuration @ComponentScan(Array("org.apache.linkis.monitor.scan", "org.apache.linkis.mybatis")) class LinkisJobHistoryScanSpringConfiguration { diff --git a/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/scan/app/factory/MapperFactory.scala b/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/scan/app/factory/MapperFactory.scala index 337592bf72..c7ebf10e88 100644 --- a/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/scan/app/factory/MapperFactory.scala +++ b/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/scan/app/factory/MapperFactory.scala @@ -22,12 +22,9 @@ import org.apache.linkis.monitor.scan.app.instance.dao.{ InstanceInfoDao, InstanceLabelDao } -import org.apache.linkis.monitor.scan.app.instance.dao.InstanceInfoDao import org.apache.linkis.monitor.scan.app.jobhistory.dao.JobHistoryMapper - object MapperFactory { - // val bmlVersionCleanScanOper = new BmlVersionCleanScanOper private var jobHistoryMapper: JobHistoryMapper = _ @@ -37,27 +34,27 @@ object MapperFactory { private var instanceLabelRelationMapper: InsLabelRelationDao = _ - def getJobHistoryMapper() = jobHistoryMapper + def getJobHistoryMapper(): JobHistoryMapper = jobHistoryMapper - def setJobHistoryMapper(jobHistoryMapper: JobHistoryMapper) = { + def setJobHistoryMapper(jobHistoryMapper: JobHistoryMapper): Unit = { MapperFactory.jobHistoryMapper = jobHistoryMapper } - def getInstanceInfoMapper() = instanceInfoMapper + def getInstanceInfoMapper(): InstanceInfoDao = instanceInfoMapper - def setInstanceInfoMapper(instanceInfoMapper: InstanceInfoDao) = { + def setInstanceInfoMapper(instanceInfoMapper: InstanceInfoDao): Unit = { MapperFactory.instanceInfoMapper = instanceInfoMapper } - def getInstanceLabelMapper() = instanceLabelMapper + def getInstanceLabelMapper(): InstanceLabelDao = instanceLabelMapper - def setInstanceLabelMapper(instanceLabelMapper: InstanceLabelDao) = { + def setInstanceLabelMapper(instanceLabelMapper: InstanceLabelDao): Unit = { MapperFactory.instanceLabelMapper = instanceLabelMapper } - def getInsLabelRelationMapper() = instanceLabelRelationMapper + def getInsLabelRelationMapper(): InsLabelRelationDao = instanceLabelRelationMapper - def setInsLabelRelationMapper(instanceLabelRelationMapper: InsLabelRelationDao) = { + def setInsLabelRelationMapper(instanceLabelRelationMapper: InsLabelRelationDao): Unit = { MapperFactory.instanceLabelRelationMapper = instanceLabelRelationMapper } diff --git a/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/scan/app/jobhistory/JobHistoryDataFetcher.scala b/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/scan/app/jobhistory/JobHistoryDataFetcher.scala index b8eff63ec8..c7fd3f40e8 100644 --- a/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/scan/app/jobhistory/JobHistoryDataFetcher.scala +++ b/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/scan/app/jobhistory/JobHistoryDataFetcher.scala @@ -17,16 +17,16 @@ package org.apache.linkis.monitor.scan.app.jobhistory -import java.util -import java.util.Date - -import org.apache.commons.lang3.StringUtils import org.apache.linkis.common.utils.{Logging, Utils} import org.apache.linkis.monitor.scan.app.jobhistory.dao.JobHistoryMapper import org.apache.linkis.monitor.scan.app.jobhistory.exception.AnomalyScannerException import org.apache.linkis.monitor.scan.constants.Constants import org.apache.linkis.monitor.scan.core.pac.AbstractDataFetcher +import org.apache.commons.lang3.StringUtils + +import java.util +import java.util.Date class JobHistoryDataFetcher(args: Array[Any], mapper: JobHistoryMapper) extends AbstractDataFetcher @@ -89,7 +89,11 @@ class JobHistoryDataFetcher(args: Array[Any], mapper: JobHistoryMapper) throw t } } - if (StringUtils.isNotBlank(args(3).asInstanceOf[String]) && args(3).asInstanceOf[String].equals("updated_time")) { + if ( + StringUtils.isNotBlank(args(3).asInstanceOf[String]) && args(3) + .asInstanceOf[String] + .equals("updated_time") + ) { val list = new util.ArrayList[String]() Constants.DIRTY_DATA_FINISHED_JOB_STATUS_ARRAY.foreach(list.add(_)) mapper diff --git a/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/scan/app/jobhistory/errorcode/JobHistoryErrCodeHitEvent.scala b/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/scan/app/jobhistory/errorcode/JobHistoryErrCodeHitEvent.scala index f899952c71..f89c680624 100644 --- a/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/scan/app/jobhistory/errorcode/JobHistoryErrCodeHitEvent.scala +++ b/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/scan/app/jobhistory/errorcode/JobHistoryErrCodeHitEvent.scala @@ -19,5 +19,4 @@ package org.apache.linkis.monitor.scan.app.jobhistory.errorcode import org.apache.linkis.monitor.scan.core.ob.SingleObserverEvent - class JobHistoryErrCodeHitEvent extends SingleObserverEvent diff --git a/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/scan/app/jobhistory/errorcode/JobHistoryErrCodeRule.scala b/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/scan/app/jobhistory/errorcode/JobHistoryErrCodeRule.scala index c613b3d306..32a7cbe474 100644 --- a/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/scan/app/jobhistory/errorcode/JobHistoryErrCodeRule.scala +++ b/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/scan/app/jobhistory/errorcode/JobHistoryErrCodeRule.scala @@ -17,19 +17,21 @@ package org.apache.linkis.monitor.scan.app.jobhistory.errorcode -import java.util - import org.apache.linkis.common.utils.Logging import org.apache.linkis.monitor.scan.app.jobhistory.entity.JobHistory import org.apache.linkis.monitor.scan.app.monitor.until.CacheUtils import org.apache.linkis.monitor.scan.core.ob.Observer import org.apache.linkis.monitor.scan.core.pac.{AbstractScanRule, ScannedData} -import scala.collection.JavaConverters._ +import java.util + +import scala.collection.JavaConverters._ /** - * 针对执行任务返回的错误码进行监控,执行脚本任务时,会记录执行的错误码在数据库中, - * 服务会根据数据库中记录的错误码,来进行告警,如果错误码中包含(11001,11002)即可触发告警 + * Monitor the error codes returned by executing tasks. When executing script tasks, the executed + * error codes will be recorded in the database. The service will generate an alarm based on the + * error code recorded in the database. If the error code contains (11001, 11002), the alarm will be + * triggered. */ class JobHistoryErrCodeRule(errorCodes: util.Set[String], hitObserver: Observer) extends AbstractScanRule(event = new JobHistoryErrCodeHitEvent, observer = hitObserver) @@ -60,7 +62,9 @@ class JobHistoryErrCodeRule(errorCodes: util.Set[String], hitObserver: Observer) } scanRuleList.put("jobHistoryId", history.getId) case _ => - logger.warn("Ignored wrong input data Type : " + d + ", " + d.getClass.getCanonicalName) + logger.warn( + "Ignored wrong input data Type : " + d + ", " + d.getClass.getCanonicalName + ) } } } else { @@ -71,9 +75,9 @@ class JobHistoryErrCodeRule(errorCodes: util.Set[String], hitObserver: Observer) logger.info("hit " + alertData.size() + " data in one iteration") if (alertData.size() > 0) { getHitEvent().notifyObserver(getHitEvent(), alertData) - true + true } else { - false + false } } diff --git a/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/scan/app/jobhistory/errorcode/JobHistoryErrorCodeAlertSender.scala b/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/scan/app/jobhistory/errorcode/JobHistoryErrorCodeAlertSender.scala index a037e0f017..8683dad15e 100644 --- a/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/scan/app/jobhistory/errorcode/JobHistoryErrorCodeAlertSender.scala +++ b/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/scan/app/jobhistory/errorcode/JobHistoryErrorCodeAlertSender.scala @@ -17,17 +17,16 @@ package org.apache.linkis.monitor.scan.app.jobhistory.errorcode -import java.util - import org.apache.linkis.common.utils.Logging import org.apache.linkis.monitor.scan.app.jobhistory.entity.JobHistory import org.apache.linkis.monitor.scan.app.jobhistory.exception.AnomalyScannerException import org.apache.linkis.monitor.scan.core.ob.{Event, Observer} import org.apache.linkis.monitor.scan.utils.alert.AlertDesc import org.apache.linkis.monitor.scan.utils.alert.ims.{ImsAlertDesc, PooledImsAlertUtils} -import scala.collection.JavaConverters._ +import java.util +import scala.collection.JavaConverters._ class JobHistoryErrorCodeAlertSender(alerts: util.Map[String, AlertDesc]) extends Observer diff --git a/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/scan/app/jobhistory/jobtime/JobTimeExceedAlertSender.scala b/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/scan/app/jobhistory/jobtime/JobTimeExceedAlertSender.scala index aa564ab335..fd311e7fcc 100644 --- a/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/scan/app/jobhistory/jobtime/JobTimeExceedAlertSender.scala +++ b/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/scan/app/jobhistory/jobtime/JobTimeExceedAlertSender.scala @@ -17,9 +17,6 @@ package org.apache.linkis.monitor.scan.app.jobhistory.jobtime -import java.text.MessageFormat -import java.util - import org.apache.linkis.common.utils.{Logging, Utils} import org.apache.linkis.monitor.scan.app.jobhistory.entity.JobHistory import org.apache.linkis.monitor.scan.app.jobhistory.exception.AnomalyScannerException @@ -28,13 +25,13 @@ import org.apache.linkis.monitor.scan.core.ob.{Event, Observer} import org.apache.linkis.monitor.scan.utils.alert.AlertDesc import org.apache.linkis.monitor.scan.utils.alert.ims.{ImsAlertDesc, PooledImsAlertUtils} +import java.text.MessageFormat +import java.util + import scala.collection.JavaConverters._ import scala.collection.mutable.ArrayBuffer - -class JobTimeExceedAlertSender(alerts: util.Map[String, AlertDesc]) - extends Observer - with Logging { +class JobTimeExceedAlertSender(alerts: util.Map[String, AlertDesc]) extends Observer with Logging { private val orderedThresholds: Array[Long] = { val ret = new ArrayBuffer[Long]() @@ -79,34 +76,36 @@ class JobTimeExceedAlertSender(alerts: util.Map[String, AlertDesc]) for (t <- orderedThresholds) { // search max threshold that is smaller than elapse if (elapse >= t) { ts = t - } else { - - } + } else {} } val name = ts.toString val alert = if (!toSend.containsKey(name)) { alerts .get(name) - .asInstanceOf[ - ImsAlertDesc - ] + .asInstanceOf[ImsAlertDesc] } else { toSend.get(name) } - - val newInfo = MessageFormat.format("[Linkis任务信息]您好,您在Linkis/DSS提交的任务(任务ID:{0}),已经运行超过{1}h," + - "请关注是否任务正常,如果不正常您可以到Linkis/DSS管理台进行任务的kill,集群信息为BDAP({2})。详细解决方案见Q47:{3} " - , jobHistory.getId, (elapse / 1000 / 60 / 60).toString, jobHistory.getInstances, MonitorConfig.SOLUTION_URL.getValue) + + val newInfo = MessageFormat.format( + MonitorConfig.TASK_RUNTIME_TIMEOUT_DESC.getValue, + jobHistory.getId, + (elapse / 1000 / 60 / 60).toString, + jobHistory.getInstances, + MonitorConfig.SOLUTION_URL.getValue + ) val newNumHit = alert.numHit + 1 val receiver = new util.HashSet[String]() receiver.add(jobHistory.getSubmitUser) receiver.add(jobHistory.getExecuteUser) receiver.addAll(alert.alertReceivers) - val ImsAlertDesc = alert.copy(alertInfo = newInfo, alertReceivers = receiver, numHit = newNumHit) + val ImsAlertDesc = + alert.copy(alertInfo = newInfo, alertReceivers = receiver, numHit = newNumHit) PooledImsAlertUtils.addAlert(ImsAlertDesc) } } } + } diff --git a/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/scan/app/jobhistory/jobtime/JobTimeExceedHitEvent.scala b/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/scan/app/jobhistory/jobtime/JobTimeExceedHitEvent.scala index b7b883e09b..a83cd0ee01 100644 --- a/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/scan/app/jobhistory/jobtime/JobTimeExceedHitEvent.scala +++ b/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/scan/app/jobhistory/jobtime/JobTimeExceedHitEvent.scala @@ -19,5 +19,4 @@ package org.apache.linkis.monitor.scan.app.jobhistory.jobtime import org.apache.linkis.monitor.scan.core.ob.SingleObserverEvent - class JobTimeExceedHitEvent extends SingleObserverEvent diff --git a/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/scan/app/jobhistory/jobtime/JobTimeExceedRule.scala b/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/scan/app/jobhistory/jobtime/JobTimeExceedRule.scala index b91d605d4e..821a368d65 100644 --- a/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/scan/app/jobhistory/jobtime/JobTimeExceedRule.scala +++ b/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/scan/app/jobhistory/jobtime/JobTimeExceedRule.scala @@ -17,9 +17,6 @@ package org.apache.linkis.monitor.scan.app.jobhistory.jobtime -import java.util -import java.util.Locale - import org.apache.linkis.common.utils.Logging import org.apache.linkis.monitor.scan.app.jobhistory.entity.JobHistory import org.apache.linkis.monitor.scan.app.jobhistory.exception.AnomalyScannerException @@ -28,11 +25,15 @@ import org.apache.linkis.monitor.scan.constants.Constants import org.apache.linkis.monitor.scan.core.ob.Observer import org.apache.linkis.monitor.scan.core.pac.{AbstractScanRule, ScannedData} +import java.util +import java.util.Locale + import scala.collection.JavaConverters._ /** - * 针对执行任务状态进行监控,扫描12小时之外,24小时之内的的数据, - * 如果规则范围内,有数据状态是(Inited,WaitForRetry,Scheduled,Running)其中之一,则触发告警 + * Monitor the execution status of tasks, scan data outside 12 hours and within 24 hours, If within + * the scope of the rule, there is data whose status is one of (Inited, WaitForRetry, Scheduled, + * Running), an alarm will be triggered. */ class JobTimeExceedRule(thresholds: util.Set[String], hitObserver: Observer) extends AbstractScanRule(event = new JobTimeExceedHitEvent, observer = hitObserver) @@ -94,9 +95,9 @@ class JobTimeExceedRule(thresholds: util.Set[String], hitObserver: Observer) logger.info("hit " + alertData.size() + " data in one iteration") if (alertData.size() > 0) { getHitEvent.notifyObserver(getHitEvent, alertData) - true + true } else { - false + false } } diff --git a/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/scan/app/jobhistory/labels/JobHistoryLabelsAlertSender.scala b/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/scan/app/jobhistory/labels/JobHistoryLabelsAlertSender.scala index 77bc29ffe2..e7ad384e56 100644 --- a/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/scan/app/jobhistory/labels/JobHistoryLabelsAlertSender.scala +++ b/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/scan/app/jobhistory/labels/JobHistoryLabelsAlertSender.scala @@ -17,8 +17,6 @@ package org.apache.linkis.monitor.scan.app.jobhistory.labels -import java.util - import org.apache.linkis.common.utils.Logging import org.apache.linkis.monitor.scan.app.jobhistory.entity.JobHistory import org.apache.linkis.monitor.scan.app.jobhistory.exception.AnomalyScannerException @@ -28,6 +26,8 @@ import org.apache.linkis.monitor.scan.utils.alert.AlertDesc import org.apache.linkis.monitor.scan.utils.alert.ims.{PooledImsAlertUtils, UserLabelAlertUtils} import org.apache.linkis.server.BDPJettyServerHelper +import java.util + import scala.collection.JavaConverters._ import scala.collection.mutable.ArrayBuffer diff --git a/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/scan/app/jobhistory/labels/JobHistoryLabelsRule.scala b/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/scan/app/jobhistory/labels/JobHistoryLabelsRule.scala index 6308ac28b0..f4c65f7ba4 100644 --- a/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/scan/app/jobhistory/labels/JobHistoryLabelsRule.scala +++ b/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/scan/app/jobhistory/labels/JobHistoryLabelsRule.scala @@ -17,10 +17,6 @@ package org.apache.linkis.monitor.scan.app.jobhistory.labels -import java.util - -import com.google.common.collect.HashBiMap -import org.apache.commons.lang3.StringUtils import org.apache.linkis.common.utils.Logging import org.apache.linkis.monitor.scan.app.jobhistory.entity.JobHistory import org.apache.linkis.monitor.scan.app.monitor.until.CacheUtils @@ -29,11 +25,17 @@ import org.apache.linkis.monitor.scan.core.ob.Observer import org.apache.linkis.monitor.scan.core.pac.{AbstractScanRule, ScannedData} import org.apache.linkis.server.BDPJettyServerHelper +import org.apache.commons.lang3.StringUtils + +import java.util + import scala.collection.JavaConverters._ +import com.google.common.collect.HashBiMap + /** - * 对前20分钟内的执行数据进行扫描,对数据的labels字段进行判断, - * 判断依据monitor配置(linkis.monitor.jobhistory.userLabel.tenant) + * Scan the execution data within the previous 20 minutes and judge the labels field of the data. + * Judgment based on monitor configuration (linkis.monitor.jobhistory.userLabel.tenant) */ class JobHistoryLabelsRule(hitObserver: Observer) extends AbstractScanRule(event = new JobHistoryLabelsHitEvent, observer = hitObserver) @@ -69,7 +71,6 @@ class JobHistoryLabelsRule(hitObserver: Observer) Constants.USER_LABEL_TENANT.getValue, classOf[java.util.Map[String, String]] ) - // 当任务的creator是qualitis(或dops)时,tenant不是qualitis发出告警 val listIterator = configMap.keySet.iterator while ({ listIterator.hasNext @@ -82,7 +83,6 @@ class JobHistoryLabelsRule(hitObserver: Observer) } } } - // 当任务代理tenant:Qualitis标签,但是creator不是qualitis标签也进行告警 if (configMap.values().contains(tenant)) { val bimap: HashBiMap[String, String] = HashBiMap.create(configMap) val key = bimap.inverse().get(tenant) @@ -103,9 +103,9 @@ class JobHistoryLabelsRule(hitObserver: Observer) logger.info("hit " + alertData.size() + " data in one iteration") if (alertData.size() > 0) { getHitEvent.notifyObserver(getHitEvent, alertData) - true + true } else { - false + false } } diff --git a/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/scan/app/jobhistory/runtime/CommonJobRunTimeRule.scala b/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/scan/app/jobhistory/runtime/CommonJobRunTimeRule.scala index 1912acf8b4..3c42600432 100644 --- a/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/scan/app/jobhistory/runtime/CommonJobRunTimeRule.scala +++ b/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/scan/app/jobhistory/runtime/CommonJobRunTimeRule.scala @@ -17,24 +17,26 @@ package org.apache.linkis.monitor.scan.app.jobhistory.runtime -import org.apache.commons.lang3.StringUtils import org.apache.linkis.common.utils.Logging import org.apache.linkis.monitor.scan.app.jobhistory.entity.JobHistory import org.apache.linkis.monitor.scan.constants.Constants import org.apache.linkis.monitor.scan.core.ob.Observer import org.apache.linkis.monitor.scan.core.pac.{AbstractScanRule, ScannedData} +import org.apache.commons.lang3.StringUtils + import java.util + import scala.collection.JavaConverters._ /** - * 对前20分钟内的执行数据进行扫描, - * 1.数据的ObserveInfo字段进行判断是否为空, - * 2.任务状态已经完成(Succeed,Failed,Cancelled,Timeout,ALL) - * 满足条件即可触发告警 + * Scan the execution data within the first 20 minutes, + * 1. The ObserveInfo field of the data is judged whether it is empty, 2. The task status has been + * completed (Succeed, Failed, Cancelled, Timeout, ALL) Alarms can be triggered when conditions + * are met */ class CommonJobRunTimeRule(hitObserver: Observer) - extends AbstractScanRule(event = new JobHistoryRunTimeHitEvent, observer = hitObserver) + extends AbstractScanRule(event = new JobHistoryRunTimeHitEvent, observer = hitObserver) with Logging { /** @@ -54,8 +56,12 @@ class CommonJobRunTimeRule(hitObserver: Observer) for (d <- sd.getData().asScala) { d match { case jobHistory: JobHistory => - if (Constants.DIRTY_DATA_FINISHED_JOB_STATUS.contains(jobHistory.getStatus.toUpperCase()) - &&StringUtils.isNotBlank(jobHistory.getObserveInfo)) { + if ( + Constants.DIRTY_DATA_FINISHED_JOB_STATUS.contains( + jobHistory.getStatus.toUpperCase() + ) + && StringUtils.isNotBlank(jobHistory.getObserveInfo) + ) { alertData.add(jobHistory) } else { logger.warn("jobHistory is not completely , taskid :" + d) @@ -70,9 +76,9 @@ class CommonJobRunTimeRule(hitObserver: Observer) logger.info("hit " + alertData.size() + " data in one iteration") if (alertData.size() > 0) { getHitEvent.notifyObserver(getHitEvent, alertData) - true + true } else { - false + false } } diff --git a/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/scan/app/jobhistory/runtime/CommonRunTimeAlertSender.scala b/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/scan/app/jobhistory/runtime/CommonRunTimeAlertSender.scala index 6aca4c38ff..841c430504 100644 --- a/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/scan/app/jobhistory/runtime/CommonRunTimeAlertSender.scala +++ b/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/scan/app/jobhistory/runtime/CommonRunTimeAlertSender.scala @@ -17,12 +17,6 @@ package org.apache.linkis.monitor.scan.app.jobhistory.runtime -import java.net.InetAddress -import java.text.SimpleDateFormat -import java.util -import java.util.Date - -import org.apache.commons.collections.MapUtils import org.apache.linkis.common.utils.Logging import org.apache.linkis.monitor.scan.app.jobhistory.entity.JobHistory import org.apache.linkis.monitor.scan.app.jobhistory.exception.AnomalyScannerException @@ -31,12 +25,16 @@ import org.apache.linkis.monitor.scan.core.ob.{Event, Observer} import org.apache.linkis.monitor.scan.utils.alert.ims.{MonitorAlertUtils, PooledImsAlertUtils} import org.apache.linkis.server.BDPJettyServerHelper -import scala.collection.JavaConverters._ +import org.apache.commons.collections.MapUtils +import java.net.InetAddress +import java.text.SimpleDateFormat +import java.util +import java.util.Date -class CommonRunTimeAlertSender() - extends Observer - with Logging { +import scala.collection.JavaConverters._ + +class CommonRunTimeAlertSender() extends Observer with Logging { private val dateFormat = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss") override def update(e: Event, jobHistoryList: scala.Any): Unit = { @@ -59,10 +57,21 @@ class CommonRunTimeAlertSender() logger.warn("Ignore wrong input data Type : " + a.getClass.getCanonicalName) } else { val jobHistory = a.asInstanceOf[JobHistory] - val observeInfoMap = BDPJettyServerHelper.gson.fromJson(jobHistory.getObserveInfo, classOf[java.util.Map[String, String]]) + val observeInfoMap = BDPJettyServerHelper.gson.fromJson( + jobHistory.getObserveInfo, + classOf[java.util.Map[String, String]] + ) val extraMap = MapUtils.getMap(observeInfoMap, "extra") - observeInfoMap.put("title", extraMap.get("title").toString + ",任务id:" + jobHistory.getId + ",执行结果 :" + jobHistory.getStatus) - observeInfoMap.put("$detail", extraMap.get("detail").toString + ",执行结果 :" + jobHistory.getStatus) + observeInfoMap.put( + "title", + extraMap + .get("title") + .toString + ",任务id:" + jobHistory.getId + ",执行结果 :" + jobHistory.getStatus + ) + observeInfoMap.put( + "$detail", + extraMap.get("detail").toString + ",执行结果 :" + jobHistory.getStatus + ) observeInfoMap.put("$submitUser", jobHistory.getSubmitUser) observeInfoMap.put("$status", jobHistory.getStatus) observeInfoMap.put("$id", jobHistory.getId.toString) @@ -80,9 +89,10 @@ class CommonRunTimeAlertSender() observeInfoMap.put("$ip", InetAddress.getLocalHost.getHostAddress) observeInfoMap.remove("taskId") observeInfoMap.remove("extra") - val alters = MonitorAlertUtils.getAlertsByDss(Constants.JOB_RESULT_IM, observeInfoMap) + val alters = MonitorAlertUtils.getAlerts(Constants.JOB_RESULT_IM, observeInfoMap) PooledImsAlertUtils.addAlert(alters.get("12016")) } } } + } diff --git a/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/scan/app/jobhistory/runtime/JobHistoryRunTimeAlertSender.scala b/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/scan/app/jobhistory/runtime/JobHistoryRunTimeAlertSender.scala index b9f35ce7c7..621aacb2d9 100644 --- a/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/scan/app/jobhistory/runtime/JobHistoryRunTimeAlertSender.scala +++ b/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/scan/app/jobhistory/runtime/JobHistoryRunTimeAlertSender.scala @@ -17,8 +17,6 @@ package org.apache.linkis.monitor.scan.app.jobhistory.runtime -import java.util - import org.apache.linkis.common.utils.Logging import org.apache.linkis.monitor.scan.app.jobhistory.entity.JobHistory import org.apache.linkis.monitor.scan.app.jobhistory.exception.AnomalyScannerException @@ -26,18 +24,18 @@ import org.apache.linkis.monitor.scan.constants.Constants import org.apache.linkis.monitor.scan.core.ob.{Event, Observer} import org.apache.linkis.monitor.scan.utils.alert.ims.{MonitorAlertUtils, PooledImsAlertUtils} +import java.util + import scala.collection.JavaConverters._ /** - * 对前20分钟内的执行数据进行扫描,对已结束的任务进行判断, - * 1.jobhistory中的parm字段中包含(task.notification.conditions) - * 2.执行任务的结果是(Succeed,Failed,Cancelled,Timeout,ALL)其中任意一个,则触发告警 - * 3.job的结果是已经结束 - * 同时满足上述三个条件即可触发告警 + * Scan the execution data within the first 20 minutes, judge the completed tasks, + * 1. The parm field in jobhistory contains (task.notification.conditions) 2. If the result of + * executing the task is any one of (Succeed, Failed, Canceled, Timeout, ALL), an alarm will be + * triggered 3.The result of the job is that it has ended The alarm can be triggered if the + * above three conditions are met at the same time */ -class JobHistoryRunTimeAlertSender() - extends Observer - with Logging { +class JobHistoryRunTimeAlertSender() extends Observer with Logging { override def update(e: Event, jobHistroyList: scala.Any): Unit = { if (!e.isInstanceOf[JobHistoryRunTimeHitEvent]) { @@ -70,4 +68,5 @@ class JobHistoryRunTimeAlertSender() } } } + } diff --git a/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/scan/app/jobhistory/runtime/JobHistoryRunTimeRule.scala b/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/scan/app/jobhistory/runtime/JobHistoryRunTimeRule.scala index e136ea46d0..4f91be337a 100644 --- a/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/scan/app/jobhistory/runtime/JobHistoryRunTimeRule.scala +++ b/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/scan/app/jobhistory/runtime/JobHistoryRunTimeRule.scala @@ -27,11 +27,11 @@ import org.apache.linkis.protocol.utils.TaskUtils import org.apache.linkis.server.BDPJettyServerHelper import java.util -import scala.collection.JavaConverters._ +import scala.collection.JavaConverters._ class JobHistoryRunTimeRule(hitObserver: Observer) - extends AbstractScanRule(event = new JobHistoryRunTimeHitEvent, observer = hitObserver) + extends AbstractScanRule(event = new JobHistoryRunTimeHitEvent, observer = hitObserver) with Logging { private val scanRuleList = CacheUtils.cacheBuilder @@ -52,15 +52,28 @@ class JobHistoryRunTimeRule(hitObserver: Observer) for (d <- sd.getData().asScala) { d match { case jobHistory: JobHistory => - if (Constants.DIRTY_DATA_FINISHED_JOB_STATUS.contains(jobHistory.getStatus.toUpperCase())) { - val parmsMap: util.Map[String, scala.AnyRef] = BDPJettyServerHelper.gson.fromJson(jobHistory.getParams, classOf[util.Map[String, scala.AnyRef]]) + if ( + Constants.DIRTY_DATA_FINISHED_JOB_STATUS.contains( + jobHistory.getStatus.toUpperCase() + ) + ) { + val parmsMap: util.Map[String, scala.AnyRef] = BDPJettyServerHelper.gson.fromJson( + jobHistory.getParams, + classOf[util.Map[String, scala.AnyRef]] + ) val runtimeMap = TaskUtils.getRuntimeMap(parmsMap) - if (runtimeMap.containsKey("task.notification.conditions") && - Constants.DIRTY_DATA_FINISHED_JOB_STATUS.contains(String.valueOf(runtimeMap.get("task.notification.conditions")).toUpperCase())) { - alertData.add(jobHistory) + if ( + runtimeMap.containsKey("task.notification.conditions") && + Constants.DIRTY_DATA_FINISHED_JOB_STATUS.contains( + String.valueOf(runtimeMap.get("task.notification.conditions")).toUpperCase() + ) + ) { + alertData.add(jobHistory) } } else { - logger.warn("Ignored wrong input data Type : " + d + ", " + d.getClass.getCanonicalName) + logger.warn( + "Ignored wrong input data Type : " + d + ", " + d.getClass.getCanonicalName + ) } scanRuleList.put("jobHistoryId", jobHistory.getId) case _ => @@ -73,9 +86,9 @@ class JobHistoryRunTimeRule(hitObserver: Observer) logger.info("hit " + alertData.size() + " data in one iteration") if (alertData.size() > 0) { getHitEvent.notifyObserver(getHitEvent, alertData) - true + true } else { - false + false } } diff --git a/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/scan/client/MonitorHTTPClient.scala b/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/scan/client/MonitorHTTPClient.scala index 8813aba210..97cd429887 100644 --- a/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/scan/client/MonitorHTTPClient.scala +++ b/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/scan/client/MonitorHTTPClient.scala @@ -21,7 +21,11 @@ import org.apache.linkis.httpclient.authentication.AuthenticationStrategy import org.apache.linkis.httpclient.dws.authentication.StaticAuthenticationStrategy import org.apache.linkis.httpclient.dws.config.{DWSClientConfig, DWSClientConfigBuilder} import org.apache.linkis.httpclient.response.Result -import org.apache.linkis.monitor.scan.request.{EmsListAction, EntranceTaskAction, MonitorResourceAction} +import org.apache.linkis.monitor.scan.request.{ + EmsListAction, + EntranceTaskAction, + MonitorResourceAction +} import org.apache.linkis.monitor.scan.response.EntranceTaskResult import org.apache.linkis.ujes.client.response.EmsListResult diff --git a/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/scan/constants/Constants.scala b/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/scan/constants/Constants.scala index f3e6d74c48..89ab330b4d 100644 --- a/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/scan/constants/Constants.scala +++ b/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/scan/constants/Constants.scala @@ -19,76 +19,58 @@ package org.apache.linkis.monitor.scan.constants import org.apache.linkis.common.conf.CommonVars - object Constants { - val ALERT_IMS_URL = CommonVars.properties.getProperty( - "wds.linkis.alert.url", - "http://127.0.0.1:10812/ims_data_access/send_alarm.do" - ) + val SCAN_PREFIX_ERRORCODE = "jobhistory.errorcode." + val SCAN_PREFIX_UNFINISHED_JOBTIME_EXCEED_SEC = "jobhistory.unfinished.time.exceed.sec." + val ALERT_RESOURCE_MONITOR = "ecm.resource.monitor.im." val ALERT_PROPS_FILE_PATH = CommonVars.properties.getProperty( - "wds.linkis.alert.ims.file.path", - "linkis-et-monitor-ims.properties" + "linkis.alert.conf.file.path", + "linkis-et-monitor-file.properties" ) - val ALERT_IMS_MAX_LINES = CommonVars[Int]("wds.linkis.alert.ims.max.lines", 8).getValue - - val SCAN_INTERVALS_SECONDS = - CommonVars[Long]("wds.linkis.errorcode.scanner.interval.seconds", 1 * 60 * 60).getValue - - val MAX_INTERVALS_SECONDS = - CommonVars[Long]("wds.linkis.errorcode.scanner.max.interval.seconds", 1 * 60 * 60).getValue + val ALERT_IMS_URL = CommonVars.properties.getProperty( + "linkis.alert.url", + "http://127.0.0.1:10812/ims_data_access/send_alarm.do" + ) val ALERT_SUB_SYSTEM_ID = - CommonVars.properties.getProperty("wds.linkis.alert.ims.sub_system_id", "5435") + CommonVars.properties.getProperty("linkis.alert.sub_system_id", "10001") val ALERT_DEFAULT_RECEIVERS = CommonVars.properties - .getProperty("wds.linkis.alert.receiver.default", "") + .getProperty("linkis.alert.receiver.default", "") .split(",") .toSet[String] - val SCAN_PREFIX_ERRORCODE = "jobhistory.errorcode." - val SCAN_PREFIX_UNFINISHED_JOBTIME_EXCEED_SEC = "jobhistory.unfinished.time.exceed.sec." - - val SCAN_RULE_UNFINISHED_JOB_STATUS = - "Inited,WaitForRetry,Scheduled,Running".split(",").map(s => s.toUpperCase()) + val ALERT_IMS_MAX_LINES = CommonVars[Int]("linkis.alert.content.max.lines", 8).getValue - val DIRTY_DATA_EUREKA_DELETE_INSTANCE_URL = - CommonVars.apply("wds.linkis.eureka.defaultZone", "http://localhost:20303").getValue + val ERRORCODE_SCAN_INTERVALS_SECONDS = + CommonVars[Long]("linkis.errorcode.scanner.interval.seconds", 1 * 60 * 60).getValue - val DIRTY_DATA_EUREKA_DELETE_PATH = CommonVars - .apply("wds.linkis.dirty.data.eureka.delete.path", "/apps/{springName}/{instance}") - .getValue + val ERRORCODE_MAX_INTERVALS_SECONDS = + CommonVars[Long]("linkis.errorcode.scanner.max.interval.seconds", 1 * 60 * 60).getValue - val DIRTY_DATA_UNFINISHED_JOB_STATUS = + val SCAN_RULE_UNFINISHED_JOB_STATUS = "Inited,WaitForRetry,Scheduled,Running".split(",").map(s => s.toUpperCase()) - val DIRTY_DATA_JOB_TARGET_STATUS = "Cancelled" - - val DIRTY_DATA_ENTRANCE_APPLICATIONNAME = - CommonVars("wds.linkis.entrance.spring.name", "linkis-cg-entrance").getValue - - val MODIFY_DB_DATA_DAYS = CommonVars("wds.linkis.dirty.data.modify.db.days", 1).getValue - val ALERT_RESOURCE_MONITOR = "ecm.resource.monitor.im." - val LINKIS_API_VERSION: CommonVars[String] = - CommonVars[String]("wds.linkis.bml.api.version", "v1") + CommonVars[String]("linkis.bml.api.version", "v1") val AUTH_TOKEN_KEY: CommonVars[String] = - CommonVars[String]("wds.linkis.bml.auth.token.key", "Validation-Code") + CommonVars[String]("linkis.bml.auth.token.key", "Validation-Code") val AUTH_TOKEN_VALUE: CommonVars[String] = - CommonVars[String]("wds.linkis.bml.auth.token.value", "BML-AUTH") + CommonVars[String]("linkis.bml.auth.token.value", "BML-AUTH") val CONNECTION_MAX_SIZE: CommonVars[Int] = - CommonVars[Int]("wds.linkis.bml.connection.max.size", 10) + CommonVars[Int]("linkis.bml.connection.max.size", 10) val CONNECTION_TIMEOUT: CommonVars[Int] = - CommonVars[Int]("wds.linkis.bml.connection.timeout", 5 * 60 * 1000) + CommonVars[Int]("linkis.bml.connection.timeout", 5 * 60 * 1000) val CONNECTION_READ_TIMEOUT: CommonVars[Int] = - CommonVars[Int]("wds.linkis.bml.connection.read.timeout", 10 * 60 * 1000) + CommonVars[Int]("linkis.bml.connection.read.timeout", 10 * 60 * 1000) val AUTH_TOKEN_KEY_SHORT_NAME = "tokenKey" val AUTH_TOKEN_VALUE_SHORT_NAME = "tokenValue" @@ -106,8 +88,4 @@ object Constants { val THREAD_TIME_OUT_IM = "thread.monitor.timeout.im." val JOB_RESULT_IM = "jobhistory.result.monitor.im." - val DIRTY_DATA_FINISHED_JOB_STATUS = - "Succeed,Failed,Cancelled,Timeout,ALL".split(",").map(s => s.toUpperCase()) - val DIRTY_DATA_FINISHED_JOB_STATUS_ARRAY = "Succeed,Failed,Cancelled,Timeout".split(",") - } diff --git a/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/scan/constants/ScanOperatorEnum.scala b/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/scan/constants/ScanOperatorEnum.scala index 8ff3755747..05b244a458 100644 --- a/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/scan/constants/ScanOperatorEnum.scala +++ b/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/scan/constants/ScanOperatorEnum.scala @@ -17,7 +17,6 @@ package org.apache.linkis.monitor.scan.constants - object ScanOperatorEnum extends Enumeration { type ScanOperatorEnum = Value val BML_VERSION, JOB_HISTORY = Value diff --git a/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/scan/core/ob/Event.scala b/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/scan/core/ob/Event.scala index 60322c5814..888a3aa20b 100644 --- a/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/scan/core/ob/Event.scala +++ b/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/scan/core/ob/Event.scala @@ -17,7 +17,6 @@ package org.apache.linkis.monitor.scan.core.ob - trait Event { def isRegistered: Boolean diff --git a/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/scan/core/ob/Observer.scala b/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/scan/core/ob/Observer.scala index 58849dd06c..04359e309f 100644 --- a/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/scan/core/ob/Observer.scala +++ b/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/scan/core/ob/Observer.scala @@ -17,7 +17,6 @@ package org.apache.linkis.monitor.scan.core.ob - trait Observer { /** diff --git a/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/scan/core/pac/AbstractDataFetcher.scala b/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/scan/core/pac/AbstractDataFetcher.scala index 02ad320c55..2f7f9a67e6 100644 --- a/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/scan/core/pac/AbstractDataFetcher.scala +++ b/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/scan/core/pac/AbstractDataFetcher.scala @@ -17,7 +17,6 @@ package org.apache.linkis.monitor.scan.core.pac - abstract class AbstractDataFetcher(customName: String = "") extends DataFetcher { private val name: String = if (!customName.isEmpty) { diff --git a/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/scan/core/pac/AbstractScanRule.scala b/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/scan/core/pac/AbstractScanRule.scala index 8d6762c562..f572bd90bd 100644 --- a/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/scan/core/pac/AbstractScanRule.scala +++ b/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/scan/core/pac/AbstractScanRule.scala @@ -21,7 +21,6 @@ import org.apache.linkis.common.utils.Logging import org.apache.linkis.monitor.scan.core.ob.{Event, Observer} import org.apache.linkis.monitor.scan.core.ob.Observer - abstract class AbstractScanRule(customName: String = "", event: Event, observer: Observer) extends ScanRule with Logging { diff --git a/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/scan/core/pac/BaseScannedData.scala b/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/scan/core/pac/BaseScannedData.scala index 02f32a3fc5..efc573132a 100644 --- a/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/scan/core/pac/BaseScannedData.scala +++ b/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/scan/core/pac/BaseScannedData.scala @@ -19,7 +19,6 @@ package org.apache.linkis.monitor.scan.core.pac import java.util - class BaseScannedData(owner: String, data: util.List[scala.Any]) extends ScannedData { override def getOwner(): String = this.owner diff --git a/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/scan/core/pac/ScanBuffer.scala b/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/scan/core/pac/ScanBuffer.scala index d38c02dea7..8075792161 100644 --- a/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/scan/core/pac/ScanBuffer.scala +++ b/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/scan/core/pac/ScanBuffer.scala @@ -20,7 +20,6 @@ package org.apache.linkis.monitor.scan.core.pac import java.util import java.util.concurrent.LinkedBlockingDeque - class ScanBuffer { val buffer: LinkedBlockingDeque[ScannedData] = new LinkedBlockingDeque[ScannedData] diff --git a/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/scan/core/pac/ScannedData.scala b/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/scan/core/pac/ScannedData.scala index a6914c3e60..2f5cc68208 100644 --- a/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/scan/core/pac/ScannedData.scala +++ b/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/scan/core/pac/ScannedData.scala @@ -19,7 +19,6 @@ package org.apache.linkis.monitor.scan.core.pac import java.util - trait ScannedData { def getOwner(): String diff --git a/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/scan/core/scanner/AbstractScanner.scala b/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/scan/core/scanner/AbstractScanner.scala index e6939df36e..17fae63fb0 100644 --- a/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/scan/core/scanner/AbstractScanner.scala +++ b/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/scan/core/scanner/AbstractScanner.scala @@ -17,15 +17,14 @@ package org.apache.linkis.monitor.scan.core.scanner -import java.util -import java.util.concurrent.CopyOnWriteArrayList -import java.util.concurrent.atomic.AtomicInteger - import org.apache.linkis.common.utils.Logging import org.apache.linkis.monitor.scan.app.jobhistory.exception.AnomalyScannerException import org.apache.linkis.monitor.scan.core.ob.{Event, Observer} import org.apache.linkis.monitor.scan.core.pac._ +import java.util +import java.util.concurrent.CopyOnWriteArrayList +import java.util.concurrent.atomic.AtomicInteger abstract class AbstractScanner extends AnomalyScanner with Logging { private val buffer: ScanBuffer = new ScanBuffer @@ -71,8 +70,8 @@ abstract class AbstractScanner extends AnomalyScanner with Logging { } /** - * Returns a buffer that allows read/write simultaneously buffer is allowed to be written by - * other thread + * Returns a buffer that allows read/write simultaneously buffer is allowed to be written by other + * thread */ override def getBuffer(): ScanBuffer = buffer @@ -102,10 +101,7 @@ abstract class AbstractScanner extends AnomalyScanner with Logging { */ override def run(): Unit = { if (dataFetcherList.size() == 0) { - throw new AnomalyScannerException( - 21304, - "attempting to run scanner with empty dataFetchers" - ) + throw new AnomalyScannerException(21304, "attempting to run scanner with empty dataFetchers") } if (buffer == null) { throw new AnomalyScannerException(21304, "attempting to run scanner with null buffer") @@ -138,8 +134,8 @@ abstract class AbstractScanner extends AnomalyScanner with Logging { } /** - * 1. should be a blocking call 2. read from [[ScanBuffer]] 2. see if [[ScanRule]] is matched - * 3. trigger [[Observer]] + * 1. should be a blocking call 2. read from [[ScanBuffer]] 2. see if [[ScanRule]] is matched 3. + * trigger [[Observer]] */ override def analyzeOneIteration(): Unit = { val dataToAnalyze = buffer.drain() @@ -158,8 +154,8 @@ abstract class AbstractScanner extends AnomalyScanner with Logging { } /** - * 1. should be non-blocking 2. keeps calling scanOneIteration() and analyzeOneIteration() - * utils stop() is called + * 1. should be non-blocking 2. keeps calling scanOneIteration() and analyzeOneIteration() utils + * stop() is called */ override def start(): Unit = { // TODO diff --git a/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/scan/core/scanner/AnomalyScanner.scala b/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/scan/core/scanner/AnomalyScanner.scala index 854ca78c3f..d887e7bf86 100644 --- a/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/scan/core/scanner/AnomalyScanner.scala +++ b/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/scan/core/scanner/AnomalyScanner.scala @@ -81,14 +81,14 @@ trait AnomalyScanner { protected def scanOneIteration(): Unit /** - * Returns a buffer that allows read/write simultaneously buffer is allowed to be written by - * other thread + * Returns a buffer that allows read/write simultaneously buffer is allowed to be written by other + * thread */ protected def getBuffer(): ScanBuffer /** - * 1. should be a blocking call 2. read from [[ScanBuffer]] 2. see if [[ScanRule]] is matched - * 3. trigger [[[[org.apache.linkis.tools.core.ob.Observer]]]] + * 1. should be a blocking call 2. read from [[ScanBuffer]] 2. see if [[ScanRule]] is matched 3. + * trigger [[[[org.apache.linkis.tools.core.ob.Observer]]]] */ protected def analyzeOneIteration(): Unit diff --git a/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/scan/core/scanner/DefaultScanner.scala b/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/scan/core/scanner/DefaultScanner.scala index c56a3f8182..2463439873 100644 --- a/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/scan/core/scanner/DefaultScanner.scala +++ b/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/scan/core/scanner/DefaultScanner.scala @@ -19,7 +19,6 @@ package org.apache.linkis.monitor.scan.core.scanner import org.apache.linkis.monitor.scan.utils.alert.ims.PooledImsAlertUtils - class DefaultScanner extends AbstractScanner { override def shutdown(): Unit = { diff --git a/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/scan/request/EntranceTaskAction.scala b/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/scan/request/EntranceTaskAction.scala index 6126a2b04b..94636a41ca 100644 --- a/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/scan/request/EntranceTaskAction.scala +++ b/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/scan/request/EntranceTaskAction.scala @@ -57,7 +57,8 @@ object EntranceTaskAction { def build(): EntranceTaskAction = { val entranceTaskAction = new EntranceTaskAction if (StringUtils.isNotBlank(creator)) entranceTaskAction.setParameter("creator", creator) - if (StringUtils.isNotBlank(engineTypeLabel)) entranceTaskAction.setParameter("engineTypeLabel", engineTypeLabel) + if (StringUtils.isNotBlank(engineTypeLabel)) + entranceTaskAction.setParameter("engineTypeLabel", engineTypeLabel) if (StringUtils.isNotBlank(instance)) entranceTaskAction.setParameter("instance", instance) if (StringUtils.isNotBlank(user)) { // hadoop用户应该获取全部用户entrance信息,则无需传user,即可获取全部entrance信息 diff --git a/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/scan/utils/alert/AlertDesc.scala b/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/scan/utils/alert/AlertDesc.scala index b1f29530f8..068746667c 100644 --- a/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/scan/utils/alert/AlertDesc.scala +++ b/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/scan/utils/alert/AlertDesc.scala @@ -17,7 +17,6 @@ package org.apache.linkis.monitor.scan.utils.alert - trait AlertDesc { /** diff --git a/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/scan/utils/alert/AlertSender.scala b/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/scan/utils/alert/AlertSender.scala index 5ae1b960e5..fb2cff458d 100644 --- a/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/scan/utils/alert/AlertSender.scala +++ b/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/scan/utils/alert/AlertSender.scala @@ -17,7 +17,6 @@ package org.apache.linkis.monitor.scan.utils.alert - trait AlertSender { /** diff --git a/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/scan/utils/alert/PooledAlertSender.scala b/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/scan/utils/alert/PooledAlertSender.scala index 10eb367d2a..5ff274438a 100644 --- a/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/scan/utils/alert/PooledAlertSender.scala +++ b/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/scan/utils/alert/PooledAlertSender.scala @@ -23,9 +23,8 @@ import org.apache.linkis.common.utils.{Logging, Utils} import java.util.concurrent.{Future, LinkedBlockingQueue} import java.util.concurrent.atomic.{AtomicBoolean, AtomicInteger} - abstract class PooledAlertSender extends AlertSender with Logging { - private val THREAD_POOL_SIZE = CommonVars[Int]("wds.linkis.alert.pool.size", 5).getValue + private val THREAD_POOL_SIZE = CommonVars[Int]("linkis.alert.pool.size", 5).getValue private val alertDescQ: LinkedBlockingQueue[AlertDesc] = new LinkedBlockingQueue[AlertDesc](1000) @@ -58,15 +57,15 @@ abstract class PooledAlertSender extends AlertSender with Logging { def start(): Unit = { future = Utils.defaultScheduler.submit(new Runnable() { override def run() { - info("Pooled alert thread started!") + logger.info("Pooled alert thread started!") while (!stopped.get) { executors synchronized { while (!stopped.get && runningNumber.get >= THREAD_POOL_SIZE) { - info("Pooled alert thread is full, start waiting") + logger.info("Pooled alert thread is full, start waiting") executors.wait() } } - info("Pooled alert thread continue processing") + logger.info("Pooled alert thread continue processing") if (stopped.get && alertDescQ.size() == 0) return val alertDesc = Utils.tryQuietly(alertDescQ.take) @@ -75,12 +74,12 @@ abstract class PooledAlertSender extends AlertSender with Logging { override def run() { runningNumber.addAndGet(1) Utils.tryAndWarn { - info("sending alert , information: " + alertDesc) + logger.info("sending alert , information: " + alertDesc) val ok = doSendAlert(alertDesc) if (!ok) { warn("Failed to send alert: " + alertDesc) } else { - info("successfully send alert: " + alertDesc) + logger.info("successfully send alert: " + alertDesc) } runningNumber.decrementAndGet executors synchronized executors.notify @@ -93,7 +92,7 @@ abstract class PooledAlertSender extends AlertSender with Logging { } def shutdown(waitComplete: Boolean = true, timeoutMs: Long = -1): Unit = { - info("stopping the Pooled alert thread...") + logger.info("stopping the Pooled alert thread...") if (waitComplete) { val startTime = System.currentTimeMillis() while ( @@ -106,7 +105,7 @@ abstract class PooledAlertSender extends AlertSender with Logging { executors.shutdown stopped.set(true) future.cancel(true) - info("Pooled alert thread is stopped") + logger.info("Pooled alert thread is stopped") } } diff --git a/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/scan/utils/alert/ims/ImsAlertDesc.scala b/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/scan/utils/alert/ims/ImsAlertDesc.scala index 6b12da9584..6aa19d5092 100644 --- a/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/scan/utils/alert/ims/ImsAlertDesc.scala +++ b/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/scan/utils/alert/ims/ImsAlertDesc.scala @@ -32,7 +32,6 @@ import scala.collection.JavaConverters._ import ImsAlertLevel.ImsAlertLevel import ImsAlertWay.ImsAlertWay - case class ImsAlertDesc( var subSystemId: String, var alertTitle: String, @@ -149,7 +148,7 @@ case class ImsAlertDesc( alertReceivers } - Array(subSystemId, newAlertTitle, newAlertObj, newAlertInfo, newAlertReceivers) + Array(subSystemId, newAlertTitle, newAlertObj, newAlertInfo, newAlertReceivers) } } diff --git a/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/scan/utils/alert/ims/ImsAlertLevel.scala b/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/scan/utils/alert/ims/ImsAlertLevel.scala index cb304e9e4f..13948eac82 100644 --- a/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/scan/utils/alert/ims/ImsAlertLevel.scala +++ b/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/scan/utils/alert/ims/ImsAlertLevel.scala @@ -17,7 +17,6 @@ package org.apache.linkis.monitor.scan.utils.alert.ims - object ImsAlertLevel extends Enumeration { type ImsAlertLevel = Value val INFO = Value("5") diff --git a/linkis-extensions/linkis-et-monitor/src/test/java/org/apache/linkis/monitor/scan/app/LinkisJobHistoryScanApplicationTest.java b/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/scan/utils/alert/ims/ImsAlertPropFileData.scala similarity index 52% rename from linkis-extensions/linkis-et-monitor/src/test/java/org/apache/linkis/monitor/scan/app/LinkisJobHistoryScanApplicationTest.java rename to linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/scan/utils/alert/ims/ImsAlertPropFileData.scala index 716ad5cf43..d5fb95e910 100644 --- a/linkis-extensions/linkis-et-monitor/src/test/java/org/apache/linkis/monitor/scan/app/LinkisJobHistoryScanApplicationTest.java +++ b/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/scan/utils/alert/ims/ImsAlertPropFileData.scala @@ -15,24 +15,16 @@ * limitations under the License. */ -package org.apache.linkis.monitor.scan.app; +package org.apache.linkis.monitor.scan.utils.alert.ims -import org.apache.linkis.server.utils.LinkisMainHelper; +import com.fasterxml.jackson.annotation.JsonProperty - -public class LinkisJobHistoryScanApplicationTest { - // @Before - public void before() { - System.getProperties().setProperty(LinkisMainHelper.SERVER_NAME_KEY(), "linkis-et-monitor"); - System.getProperties() - .setProperty("log4j.configurationFile", "src/test/resources/log4j2-console.xml"); - // System.getProperties().setProperty("wds.linkis.server.conf", - // "linkis-et-monitor.properties"); - } - - // @Test - public void main() throws Exception { - LinkisJobHistoryScanApplication.main(new String[] {}); - // LinkisJobHistoryScanApplication.main(new String[]{"2021122919", "2021122921"}); - } -} +case class ImsAlertPropFileData( + @JsonProperty("alert_title") alertTitle: String, + @JsonProperty("alert_info") alertInfo: String, + @JsonProperty("alert_way") alertWays: String, + @JsonProperty("alert_reciver") alertReceivers: String, + @JsonProperty("alert_level") alertLevel: String, + @JsonProperty("alert_obj") alertObj: String, + @JsonProperty("can_recover") canRecover: String +) diff --git a/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/scan/utils/alert/ims/ImsAlertWay.scala b/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/scan/utils/alert/ims/ImsAlertWay.scala index 207df613d7..244e995775 100644 --- a/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/scan/utils/alert/ims/ImsAlertWay.scala +++ b/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/scan/utils/alert/ims/ImsAlertWay.scala @@ -17,11 +17,10 @@ package org.apache.linkis.monitor.scan.utils.alert.ims - object ImsAlertWay extends Enumeration { type ImsAlertWay = Value val NoAlert = Value("0") - val RTX = Value("1") + val WXWork = Value("1") val Email = Value("2") val WeChat = Value("3") } diff --git a/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/scan/utils/alert/ims/ImsRequest.scala b/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/scan/utils/alert/ims/ImsRequest.scala index 64b80d3a95..4e93f53a05 100644 --- a/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/scan/utils/alert/ims/ImsRequest.scala +++ b/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/scan/utils/alert/ims/ImsRequest.scala @@ -21,7 +21,6 @@ import java.util import com.fasterxml.jackson.annotation.JsonProperty - case class ImsRequest(@JsonProperty("alertList") alertList: util.List[AlertEntity]) case class AlertEntity( diff --git a/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/scan/utils/alert/ims/JobHistoryScanImsAlertPropFileParserUtils.scala b/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/scan/utils/alert/ims/JobHistoryScanImsAlertPropFileParserUtils.scala deleted file mode 100644 index 95b64f3f4e..0000000000 --- a/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/scan/utils/alert/ims/JobHistoryScanImsAlertPropFileParserUtils.scala +++ /dev/null @@ -1,132 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.monitor.scan.utils.alert.ims - -import org.apache.linkis.common.utils.{JsonUtils, Logging, Utils} -import org.apache.linkis.monitor.scan.app.jobhistory.exception.AnomalyScannerException -import org.apache.linkis.monitor.scan.constants.Constants -import org.apache.linkis.monitor.scan.utils.alert.AlertDesc -import org.apache.commons.lang3.StringUtils -import org.apache.commons.lang3.exception.ExceptionUtils - -import java.io.{BufferedReader, File, FileInputStream, InputStream, InputStreamReader} -import java.text.SimpleDateFormat -import java.util -import java.util.Properties -import scala.collection.JavaConverters._ -import com.fasterxml.jackson.annotation.JsonProperty -import com.fasterxml.jackson.databind.{DeserializationFeature, ObjectMapper} -import com.fasterxml.jackson.module.scala.DefaultScalaModule -import org.apache.commons.io.IOUtils - - -object JobHistoryScanImsAlertPropFileParserUtils extends Logging { - - private val mapper = { - val ret = new ObjectMapper().setDateFormat(new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ssZ")) - ret.registerModule(DefaultScalaModule) - ret.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false) - ret - } - - def getAlerts(prefix: String): util.Map[String, AlertDesc] = { - val ret = new util.HashMap[String, AlertDesc]() - val url = getClass.getClassLoader.getResource(Constants.ALERT_PROPS_FILE_PATH) - if (url == null) { - throw new AnomalyScannerException( - 21304, - "Failed to load alerts from alert properties. Alert properties file does not exist: " + Constants.ALERT_PROPS_FILE_PATH - ) - } - logger.info("reading alert properties from: " + url.getFile) - val properties = new Properties() - var inputStream: InputStream = null - var reader: InputStreamReader = null - var buff: BufferedReader = null - - Utils.tryFinally { - Utils.tryCatch { - inputStream = new FileInputStream(new File(url.getFile)) - reader = new InputStreamReader(inputStream, "UTF-8") - buff = new BufferedReader(reader) - properties.load(buff) - } { t => { - throw new AnomalyScannerException(21304, "Failed to load alerts from alert properties. Cause: " + ExceptionUtils.getMessage(t)) - return ret - } - } - } { - IOUtils.closeQuietly(buff) - IOUtils.closeQuietly(reader) - IOUtils.closeQuietly(inputStream) - } - for ((k: String, v: String) <- properties.asScala) { - if (ret.containsKey(k)) { - logger.warn("found duplicate key in alert properties, accept only the first one") - } else if (StringUtils.startsWith(k, prefix)) { - val data = mapper.readValue(v, classOf[ImsAlertPropFileData]) - val receivers = { - val set: util.Set[String] = new util.HashSet[String] - if (StringUtils.isNotBlank(data.alertReceivers)) { - data.alertReceivers.split(",").map(r => set.add(r)) - } - Constants.ALERT_DEFAULT_RECEIVERS.foreach(e => { - if (StringUtils.isNotBlank(e)) { - set.add(e) - } - }) - set - } - val alertDesc = Utils.tryAndWarn( - new ImsAlertDesc( - Constants.ALERT_SUB_SYSTEM_ID, - data.alertTitle, - data.alertObj, - data.alertInfo, - ImsAlertLevel.withName(data.alertLevel), - null, // Not used - 0, { - val set: util.Set[ImsAlertWay.Value] = new util.HashSet[ImsAlertWay.Value] - if (StringUtils.isNotBlank(data.alertWays)) { - data.alertWays - .split(",") - .map(alertWayStr => set.add(ImsAlertWay.withName(alertWayStr))) - } - set - }, - receivers - ) - ) - val realK = StringUtils.substringAfter(k, prefix) - ret.put(realK, alertDesc) - } - } - ret - } - -} - -case class ImsAlertPropFileData( - @JsonProperty("alert_title") alertTitle: String, - @JsonProperty("alert_info") alertInfo: String, - @JsonProperty("alert_way") alertWays: String, - @JsonProperty("alert_reciver") alertReceivers: String, - @JsonProperty("alert_level") alertLevel: String, - @JsonProperty("alert_obj") alertObj: String, - @JsonProperty("can_recover") canRecover: String -) diff --git a/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/scan/utils/alert/ims/MonitorAlertUtils.scala b/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/scan/utils/alert/ims/MonitorAlertUtils.scala index 021ec3b588..285f5075dd 100644 --- a/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/scan/utils/alert/ims/MonitorAlertUtils.scala +++ b/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/scan/utils/alert/ims/MonitorAlertUtils.scala @@ -21,6 +21,8 @@ import org.apache.linkis.common.utils.{JsonUtils, Logging, Utils} import org.apache.linkis.monitor.scan.app.jobhistory.exception.AnomalyScannerException import org.apache.linkis.monitor.scan.constants.Constants import org.apache.linkis.monitor.scan.utils.alert.AlertDesc + +import org.apache.commons.io.IOUtils import org.apache.commons.lang3.StringUtils import org.apache.commons.lang3.exception.ExceptionUtils @@ -28,14 +30,14 @@ import java.io.{BufferedReader, File, FileInputStream, InputStream, InputStreamR import java.text.SimpleDateFormat import java.util import java.util.Properties + import scala.collection.JavaConverters._ + import com.fasterxml.jackson.databind.{DeserializationFeature, ObjectMapper} import com.fasterxml.jackson.module.scala.DefaultScalaModule -import org.apache.commons.io.IOUtils object MonitorAlertUtils extends Logging { - private val mapper = { val ret = new ObjectMapper().setDateFormat(new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ssZ")) ret.registerModule(DefaultScalaModule) @@ -43,11 +45,7 @@ object MonitorAlertUtils extends Logging { ret } - def getAlerts( - prefix: String, - parms: util.HashMap[String, String] - ): util.Map[String, AlertDesc] = { - val ret = new util.HashMap[String, AlertDesc]() + val properties = { val url = getClass.getClassLoader.getResource(Constants.ALERT_PROPS_FILE_PATH) if (url == null) { throw new AnomalyScannerException( @@ -58,29 +56,37 @@ object MonitorAlertUtils extends Logging { logger.info("reading alert properties from: " + url.getFile) val properties = new Properties() var inputStream: InputStream = null - var reader: InputStreamReader = null var buff: BufferedReader = null - Utils.tryFinally { - Utils.tryThrow { - inputStream = new FileInputStream(new File(url.getFile)) - reader = new InputStreamReader(inputStream, "UTF-8") - buff = new BufferedReader(reader) - properties.load(buff) - } { - case t: Throwable => - new AnomalyScannerException( + Utils.tryCatch { + inputStream = new FileInputStream(new File(url.getFile)) + reader = new InputStreamReader(inputStream, "UTF-8") + buff = new BufferedReader(reader) + properties.load(buff) + } { t => + { + throw new AnomalyScannerException( 21304, "Failed to load alerts from alert properties. Cause: " + ExceptionUtils.getMessage(t) ) } + } } { IOUtils.closeQuietly(buff) IOUtils.closeQuietly(reader) IOUtils.closeQuietly(inputStream) } - for ((k: String, v: String) <- properties.asScala) { + properties.asScala + } + + def getAlerts( + prefix: String, + params: util.HashMap[String, String] + ): util.Map[String, AlertDesc] = { + val ret = new util.HashMap[String, AlertDesc]() + + for ((k: String, v: String) <- properties) { if (ret.containsKey(k)) { logger.warn("found duplicate key in alert properties, accept only the first one") } else if (StringUtils.startsWith(k, prefix)) { @@ -89,10 +95,10 @@ object MonitorAlertUtils extends Logging { new StringBuilder().append(data.alertInfo).toString().getBytes(), "utf-8" ).replace("$name", data.alertReceivers) - val interator = parms.keySet.iterator + val interator = params.keySet.iterator while (interator.hasNext) { val key = interator.next - val value = parms.get(key) + val value = params.get(key) alertInfo = alertInfo.replace(key, value) } val receivers = { @@ -100,113 +106,38 @@ object MonitorAlertUtils extends Logging { if (StringUtils.isNotBlank(data.alertReceivers)) { data.alertReceivers.split(",").map(r => set.add(r)) } - Constants.ALERT_DEFAULT_RECEIVERS.foreach(e => { - if (StringUtils.isNotBlank(e)) { - set.add(e) - } - }) - set.add(parms.get("$alteruser")) - set - } - val alertDesc = Utils.tryAndWarn( - new ImsAlertDesc( - Constants.ALERT_SUB_SYSTEM_ID, - data.alertTitle, - data.alertObj, - alertInfo, - ImsAlertLevel.withName(data.alertLevel), - null, // Not used - 0, { - val set: util.Set[ImsAlertWay.Value] = new util.HashSet[ImsAlertWay.Value] - if (StringUtils.isNotBlank(data.alertWays)) { - data.alertWays - .split(",") - .map(alertWayStr => set.add(ImsAlertWay.withName(alertWayStr))) + if (!params.containsKey("$alteruser")) { + Constants.ALERT_DEFAULT_RECEIVERS.foreach(e => { + if (StringUtils.isNotBlank(e)) { + set.add(e) } - set - }, - receivers - ) - ) - val realK = StringUtils.substringAfter(k, prefix) - ret.put(realK, alertDesc) - } - } - ret - } - - - def getAlertsByDss(prefix: String, parms: util.Map[String, String]): util.Map[String, AlertDesc] = { - val ret = new util.HashMap[String, AlertDesc]() - val url = getClass.getClassLoader.getResource(Constants.ALERT_PROPS_FILE_PATH) - if (url == null) { - throw new AnomalyScannerException( - 21304, - "Failed to load alerts from alert properties. Alert properties file does not exist: " + Constants.ALERT_PROPS_FILE_PATH - ) - } - logger.info("reading alert properties from: " + url.getFile) - val properties = new Properties() - var inputStream: InputStream = null - - var reader: InputStreamReader = null - var buff: BufferedReader = null - - Utils.tryFinally { - Utils.tryThrow{ - inputStream = new FileInputStream(new File(url.getFile)) - reader = new InputStreamReader(inputStream, "UTF-8") - buff = new BufferedReader(reader) - properties.load(buff) - } { - case t: Throwable => - new AnomalyScannerException( - 21304, - "Failed to load alerts from alert properties. Cause: " + ExceptionUtils.getMessage(t) - ) - } - } { - IOUtils.closeQuietly(buff) - IOUtils.closeQuietly(reader) - IOUtils.closeQuietly(inputStream) - } - for ((k: String, v: String) <- properties.asScala) { - if (ret.containsKey(k)) { - warn("found duplicate key in alert properties, accept only the first one") - } else if (StringUtils.startsWith(k, prefix)) { - val data = mapper.readValue(v, classOf[ImsAlertPropFileData]) - var alertInfo = data.alertInfo - val interator = parms.keySet.iterator - while (interator.hasNext) { - val key = interator.next - val value = parms.get(key) - alertInfo = alertInfo.replace(key, value) - } -// alertInfo = parms.getOrDefault("detail", "").concat(alertInfo) - val receivers = { - val set: util.Set[String] = new util.HashSet[String] - if (StringUtils.isNotBlank(data.alertReceivers)) { - data.alertReceivers.split(",").map(r => set.add(r)) + }) + } else { + set.add(params.get("$alteruser")) } - Constants.ALERT_DEFAULT_RECEIVERS.foreach(e => { - if (StringUtils.isNotBlank(e)) { - set.add(e) - } - }) - if (StringUtils.isNotBlank(parms.get("receiver"))) { - parms.get("receiver").split(",").map(r => set.add(r)) + if (StringUtils.isNotBlank(params.get("receiver"))) { + params.get("receiver").split(",").map(r => set.add(r)) } set } + + val subSystemId = params.getOrDefault("subSystemId", Constants.ALERT_SUB_SYSTEM_ID) + val alertTitle = params.getOrDefault("title", data.alertTitle) + val alertLevel = + if (StringUtils.isNotBlank(data.alertLevel)) { + ImsAlertLevel.withName(params.getOrDefault("monitorLevel", data.alertLevel)) + } else { + ImsAlertLevel.withName(params.getOrDefault("monitorLevel", ImsAlertLevel.WARN.toString)) + } + val alertDesc = Utils.tryAndWarn( - ImsAlertDesc( - parms.getOrDefault("subSystemId", Constants.ALERT_SUB_SYSTEM_ID), - parms.getOrDefault("title", ""), + ImsAlertDesc( + subSystemId, + alertTitle, data.alertObj, - alertInfo - , - ImsAlertLevel.withName(parms.getOrDefault("monitorLevel", "4")), - null, // Not used + alertInfo, + alertLevel, + null, 0, { val set: util.Set[ImsAlertWay.Value] = new util.HashSet[ImsAlertWay.Value] if (StringUtils.isNotBlank(data.alertWays)) { diff --git a/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/scan/utils/alert/ims/PooledImsAlertSender.scala b/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/scan/utils/alert/ims/PooledImsAlertSender.scala index d2508b37a3..a553cbba89 100644 --- a/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/scan/utils/alert/ims/PooledImsAlertSender.scala +++ b/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/scan/utils/alert/ims/PooledImsAlertSender.scala @@ -34,15 +34,9 @@ import java.util import com.fasterxml.jackson.databind.ObjectMapper import com.fasterxml.jackson.module.scala.DefaultScalaModule +class PooledImsAlertSender(alertUrl: String) extends PooledAlertSender with Logging { -class PooledImsAlertSender( - subSystemId: String, - alertUrl: String, - default_Receivers: util.Set[String] -) extends PooledAlertSender - with Logging { - - protected val httpClient = HttpClients.createDefault // TODO: Linkis-httpclient + protected val httpClient = HttpClients.createDefault private val mapper = new ObjectMapper().setDateFormat(new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ssZ")) @@ -74,7 +68,7 @@ class PooledImsAlertSender( return false } if (paramContent.isEmpty) { - logger. warn("alertParams is empty, will not send alarm") + logger.warn("alertParams is empty, will not send alarm") return false } @@ -99,7 +93,7 @@ class PooledImsAlertSender( LogUtils.stdOutLogger.info("Alert: " + paramContent + "Response: " + responseInfo) if (response.getStatusLine.getStatusCode == 200) return true } - return false + false } override def shutdown(waitComplete: Boolean = true, timeoutMs: Long = -1): Unit = { diff --git a/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/scan/utils/alert/ims/PooledImsAlertUtils.scala b/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/scan/utils/alert/ims/PooledImsAlertUtils.scala index 4a50161438..37fd35724b 100644 --- a/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/scan/utils/alert/ims/PooledImsAlertUtils.scala +++ b/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/scan/utils/alert/ims/PooledImsAlertUtils.scala @@ -33,15 +33,10 @@ import scala.collection.JavaConverters._ import ImsAlertWay.ImsAlertWay - object PooledImsAlertUtils extends Logging { private val sender: PooledImsAlertSender = { - val ret = new PooledImsAlertSender( - Constants.ALERT_SUB_SYSTEM_ID, - Constants.ALERT_IMS_URL, - Constants.ALERT_DEFAULT_RECEIVERS.asJava - ) + val ret = new PooledImsAlertSender(Constants.ALERT_IMS_URL) ret.start() ret } @@ -70,11 +65,11 @@ object PooledImsAlertUtils extends Logging { if (CollectionUtils.isNotEmpty(alertWays)) alertWays else new HashSet[ImsAlertWay]() val (alertInfo, alertLevel) = if (t != null) { _alertWays.add(ImsAlertWay.Email) - _alertWays.add(ImsAlertWay.RTX) + _alertWays.add(ImsAlertWay.WXWork) _alertWays.add(ImsAlertWay.WeChat) (ExceptionUtils.getRootCauseMessage(t), ImsAlertLevel.MAJOR) } else { - _alertWays.add(ImsAlertWay.RTX) + _alertWays.add(ImsAlertWay.WXWork) (message, ImsAlertLevel.WARN) } val alertDesc = new ImsAlertDesc( diff --git a/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/scan/utils/alert/ims/UserLabelAlertUtils.scala b/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/scan/utils/alert/ims/UserLabelAlertUtils.scala index 1083967783..d5fb4a9b07 100644 --- a/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/scan/utils/alert/ims/UserLabelAlertUtils.scala +++ b/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/scan/utils/alert/ims/UserLabelAlertUtils.scala @@ -21,6 +21,8 @@ import org.apache.linkis.common.utils.{JsonUtils, Logging, Utils} import org.apache.linkis.monitor.scan.app.jobhistory.exception.AnomalyScannerException import org.apache.linkis.monitor.scan.constants.Constants import org.apache.linkis.monitor.scan.utils.alert.AlertDesc + +import org.apache.commons.io.IOUtils import org.apache.commons.lang3.StringUtils import org.apache.commons.lang3.exception.ExceptionUtils @@ -28,99 +30,18 @@ import java.io.{BufferedReader, File, FileInputStream, InputStream, InputStreamR import java.text.SimpleDateFormat import java.util import java.util.Properties + import scala.collection.JavaConverters._ + import com.fasterxml.jackson.databind.{DeserializationFeature, ObjectMapper} import com.fasterxml.jackson.module.scala.DefaultScalaModule -import org.apache.commons.io.IOUtils object UserLabelAlertUtils extends Logging { - private val mapper = { - val ret = new ObjectMapper().setDateFormat(new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ssZ")) - ret.registerModule(DefaultScalaModule) - ret.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false) - ret - } - - def getAlerts(prefix: String, instans: String): util.Map[String, AlertDesc] = { - val ret = new util.HashMap[String, AlertDesc]() - val url = getClass.getClassLoader.getResource(Constants.ALERT_PROPS_FILE_PATH) - if (url == null) { - throw new AnomalyScannerException( - 21304, - "Failed to load alerts from alert properties. Alert properties file does not exist: " + Constants.ALERT_PROPS_FILE_PATH - ) - } - logger.info("reading alert properties from: " + url.getFile) - val properties = new Properties() - var inputStream: InputStream = null - - var reader: InputStreamReader = null - var buff: BufferedReader = null - - Utils.tryFinally { - Utils.tryThrow { - inputStream = new FileInputStream(new File(url.getFile)) - reader = new InputStreamReader(inputStream, "UTF-8") - buff = new BufferedReader(reader) - properties.load(buff) - } { - case t: Throwable => - new AnomalyScannerException( - 21304, - "Failed to load alerts from alert properties. Cause: " + ExceptionUtils.getMessage(t) - ) - } - } { - IOUtils.closeQuietly(buff) - IOUtils.closeQuietly(reader) - IOUtils.closeQuietly(inputStream) - } - for ((k: String, v: String) <- properties.asScala) { - if (ret.containsKey(k)) { - logger.warn("found duplicate key in alert properties, accept only the first one") - } else if (StringUtils.startsWith(k, prefix)) { - val data = mapper.readValue(v, classOf[ImsAlertPropFileData]) - val alertInfo = - new String(new StringBuilder().append(data.alertInfo).toString().getBytes(), "utf-8") - .replace("$userCreator", instans) - val receivers = { - val set: util.Set[String] = new util.HashSet[String] - if (StringUtils.isNotBlank(data.alertReceivers)) { - data.alertReceivers.split(",").map(r => set.add(r)) - } - Constants.ALERT_DEFAULT_RECEIVERS.foreach(e => { - if (StringUtils.isNotBlank(e)) { - set.add(e) - } - }) - set - } - val alertDesc = Utils.tryAndWarn( - new ImsAlertDesc( - Constants.ALERT_SUB_SYSTEM_ID, - data.alertTitle, - data.alertObj, - alertInfo, - ImsAlertLevel.withName(data.alertLevel), - null, // Not used - 0, { - val set: util.Set[ImsAlertWay.Value] = new util.HashSet[ImsAlertWay.Value] - if (StringUtils.isNotBlank(data.alertWays)) { - data.alertWays - .split(",") - .map(alertWayStr => set.add(ImsAlertWay.withName(alertWayStr))) - } - set - }, - receivers - ) - ) - val realK = StringUtils.substringAfter(k, prefix) - ret.put(realK, alertDesc) - } - } - ret + def getAlerts(prefix: String, userCreator: String): util.Map[String, AlertDesc] = { + val replaceParams: util.HashMap[String, String] = new util.HashMap[String, String] + replaceParams.put("$userCreator", userCreator) + MonitorAlertUtils.getAlerts(prefix, replaceParams) } } diff --git a/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/scan/utils/log/LogUtils.scala b/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/scan/utils/log/LogUtils.scala index 38af7b6104..73daf86721 100644 --- a/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/scan/utils/log/LogUtils.scala +++ b/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/scan/utils/log/LogUtils.scala @@ -19,7 +19,6 @@ package org.apache.linkis.monitor.scan.utils.log import org.slf4j.LoggerFactory - object LogUtils { val stdOutLogger = LoggerFactory.getLogger("PlaintTextConsoleLogger") } diff --git a/linkis-extensions/linkis-et-monitor/src/test/java/org/apache/linkis/monitor/scan/app/JobHistoryScanImsAlertPropFileParserUtilsTest.java b/linkis-extensions/linkis-et-monitor/src/test/java/org/apache/linkis/monitor/scan/app/JobHistoryScanImsAlertPropFileParserUtilsTest.java deleted file mode 100644 index 600ccafddb..0000000000 --- a/linkis-extensions/linkis-et-monitor/src/test/java/org/apache/linkis/monitor/scan/app/JobHistoryScanImsAlertPropFileParserUtilsTest.java +++ /dev/null @@ -1,48 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.monitor.scan.app; - -import org.apache.linkis.monitor.scan.constants.Constants; -import org.apache.linkis.monitor.scan.utils.alert.AlertDesc; -import org.apache.linkis.monitor.scan.utils.alert.ims.JobHistoryScanImsAlertPropFileParserUtils; -import org.apache.linkis.server.utils.LinkisMainHelper; - -import java.util.Map; - -import org.junit.Assert; - -public class JobHistoryScanImsAlertPropFileParserUtilsTest { - // @Before - public void before() { - System.getProperties().setProperty(LinkisMainHelper.SERVER_NAME_KEY(), "linkis-et-monitor"); - System.getProperties() - .setProperty("log4j.configurationFile", "src/test/resources/log4j2-console.xml"); - // System.getProperties().setProperty("wds.linkis.server.conf", - // "linkis-et-monitor.properties"); - } - - // @Test - public void getAlerts() throws Exception { - Map alerts = - JobHistoryScanImsAlertPropFileParserUtils.getAlerts(Constants.SCAN_PREFIX_ERRORCODE()); - for (Map.Entry kv : alerts.entrySet()) { - System.out.println(kv.getKey() + ": " + kv.getValue().toString()); - } - Assert.assertEquals(alerts.size(), 2); - } -} diff --git a/linkis-extensions/linkis-et-monitor/src/test/java/org/apache/linkis/monitor/scan/utils/alert/PooledImsAlertUtilsTest.java b/linkis-extensions/linkis-et-monitor/src/test/java/org/apache/linkis/monitor/scan/utils/alert/PooledImsAlertUtilsTest.java index 8ca5ff1a98..2014058561 100644 --- a/linkis-extensions/linkis-et-monitor/src/test/java/org/apache/linkis/monitor/scan/utils/alert/PooledImsAlertUtilsTest.java +++ b/linkis-extensions/linkis-et-monitor/src/test/java/org/apache/linkis/monitor/scan/utils/alert/PooledImsAlertUtilsTest.java @@ -18,7 +18,7 @@ package org.apache.linkis.monitor.scan.utils.alert; import org.apache.linkis.monitor.scan.constants.Constants; -import org.apache.linkis.monitor.scan.utils.alert.ims.JobHistoryScanImsAlertPropFileParserUtils; +import org.apache.linkis.monitor.scan.utils.alert.ims.MonitorAlertUtils; import org.apache.linkis.monitor.scan.utils.alert.ims.PooledImsAlertUtils; import org.apache.linkis.server.utils.LinkisMainHelper; @@ -38,7 +38,7 @@ public void before() { public void addAlert() throws Exception { PooledImsAlertUtils.addAlert("1st test"); Map alerts = - JobHistoryScanImsAlertPropFileParserUtils.getAlerts(Constants.SCAN_PREFIX_ERRORCODE()); + MonitorAlertUtils.getAlerts((Constants.SCAN_PREFIX_ERRORCODE()), null); for (Map.Entry kv : alerts.entrySet()) { System.out.println(kv.getKey() + ": " + kv.getValue().toString()); PooledImsAlertUtils.addAlert(kv.getValue());