From 65132b43cf3abdf93739bf3eaa219b398cb77bd0 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E6=B8=A0=E7=A3=8A?= Date: Mon, 29 Apr 2024 17:42:01 +0800 Subject: [PATCH 1/6] support ash --- cmd.py | 32 +++++ common/ssh.py | 19 ++- conf/inner_config.yml | 1 + config.py | 1 + core.py | 10 +- handler/gather/gather_ash_report.py | 193 ++++++++++++++++++++++++++++ 6 files changed, 251 insertions(+), 5 deletions(-) create mode 100644 handler/gather/gather_ash_report.py diff --git a/cmd.py b/cmd.py index 0c0fcfb1..071face0 100644 --- a/cmd.py +++ b/cmd.py @@ -562,6 +562,38 @@ def _do_command(self, obdiag): return obdiag.gather_function('gather_scenes_run', self.opts) +class ObdiagGatherAshReportRunCommand(ObdiagOriginCommand): + + def __init__(self): + super(ObdiagGatherAshReportRunCommand, self).__init__('run', 'gather ash report') + self.parser.add_option('--trace_id', type='string', help="The TRACE.ID of the SQL to be sampled, if left blank or filled with NULL, indicates that TRACE.ID is not restricted.") + self.parser.add_option('--sql_id', type='string', help="The SQL.ID, if left blank or filled with NULL, indicates that SQL.ID is not restricted.") + #WAIT_CLASS + self.parser.add_option('--wait_class', type='string', + help='Report type.',default='TEXT') + self.parser.add_option('--report_type', type='string', + help='Event types to be sampled.') + self.parser.add_option('--from', type='string', + help="specify the start of the time range. format: 'yyyy-mm-dd hh:mm:ss'") + self.parser.add_option('--to', type='string', + help="specify the end of the time range. format: 'yyyy-mm-dd hh:mm:ss'") + self.parser.add_option('--since', type='string', + help="Specify time range that from 'n' [d]ays, 'n' [h]ours or 'n' [m]inutes. before to now. format: . example: 1h.", + default='30m') + self.parser.add_option('--store_dir', type='string', + help='the dir to store gather result, current dir by default.', default='./') + + self.parser.add_option('-c', type='string', help='obdiag custom config', + default=os.path.expanduser('~/.obdiag/config.yml')) + + def init(self, cmd, args): + super(ObdiagGatherAshReportRunCommand, self).init(cmd, args) + return self + + def _do_command(self, obdiag): + return obdiag.gather_function('gather_ash_report', self.opts) + + class ObdiagAnalyzeLogCommand(ObdiagOriginCommand): def __init__(self): diff --git a/common/ssh.py b/common/ssh.py index 9c18b6b2..f58b8994 100644 --- a/common/ssh.py +++ b/common/ssh.py @@ -811,7 +811,14 @@ def file_uploader(self, local_dir, remote_dir, stdio=None): except: stdio.exception("") stdio.verbose('Failed to get %s' % remote_dir) - +# TODO ENV_DISABLE_RSA_ALGORITHMS need get by context.inner_context +ENV_DISABLE_RSA_ALGORITHMS=0 +def dis_rsa_algorithms(state=0): + """ + Disable RSA algorithms in OpenSSH server. + """ + global ENV_DISABLE_RSA_ALGORITHMS + ENV_DISABLE_RSA_ALGORITHMS=state class SshHelper(object): def __init__(self, is_ssh=None, host_ip=None, username=None, password=None, ssh_port=None, key_file=None, node=None, stdio=None): @@ -851,17 +858,21 @@ def __init__(self, is_ssh=None, host_ip=None, username=None, password=None, ssh_ return if self.is_ssh: + self._disabled_rsa_algorithms=None + DISABLED_ALGORITHMS = dict(pubkeys=["rsa-sha2-512", "rsa-sha2-256"]) + if ENV_DISABLE_RSA_ALGORITHMS == 1: + self._disabled_rsa_algorithms = DISABLED_ALGORITHMS self.ssh_type = "remote" if len(self.key_file) > 0: try: self._ssh_fd = paramiko.SSHClient() self._ssh_fd.set_missing_host_key_policy(paramiko.client.AutoAddPolicy()) self._ssh_fd.load_system_host_keys() - self._ssh_fd.connect(hostname=host_ip, username=username, key_filename=self.key_file, port=ssh_port) + self._ssh_fd.connect(hostname=host_ip, username=username, key_filename=self.key_file, port=ssh_port,disabled_algorithms=self._disabled_rsa_algorithms) except AuthenticationException: self.password = input("Authentication failed, Input {0}@{1} password:\n".format(username, host_ip)) self.need_password = True - self._ssh_fd.connect(hostname=host_ip, username=username, password=password, port=ssh_port) + self._ssh_fd.connect(hostname=host_ip, username=username, password=password, port=ssh_port,disabled_algorithms=self._disabled_rsa_algorithms) except Exception as e: raise OBDIAGSSHConnException("ssh {0}@{1}: failed, exception:{2}".format(username, host_ip, e)) else: @@ -869,7 +880,7 @@ def __init__(self, is_ssh=None, host_ip=None, username=None, password=None, ssh_ self._ssh_fd.set_missing_host_key_policy(paramiko.client.AutoAddPolicy()) self._ssh_fd.load_system_host_keys() self.need_password = True - self._ssh_fd.connect(hostname=host_ip, username=username, password=password, port=ssh_port) + self._ssh_fd.connect(hostname=host_ip, username=username, password=password, port=ssh_port,disabled_algorithms=self._disabled_rsa_algorithms) def ssh_exec_cmd(self, cmd): if self.ssh_type == "docker": diff --git a/conf/inner_config.yml b/conf/inner_config.yml index f4bf9245..9b3fdcd1 100644 --- a/conf/inner_config.yml +++ b/conf/inner_config.yml @@ -4,6 +4,7 @@ obdiag: config_backup_dir: ~/.obdiag/backup_conf file_number_limit: 20 file_size_limit: 2G + dis_rsa_algorithms: 0 logger: log_dir: ~/.obdiag/log log_filename: obdiag.log diff --git a/config.py b/config.py index fb01eb8d..d79fcb9c 100644 --- a/config.py +++ b/config.py @@ -64,6 +64,7 @@ 'config_backup_dir': '~/.obdiag/backup_conf', 'file_number_limit': 20, 'file_size_limit': '2G', + 'dis_rsa_algorithms':0, }, 'logger': { 'log_dir': '~/.obdiag/log', diff --git a/core.py b/core.py index b5d7dcf3..ef8c41c8 100644 --- a/core.py +++ b/core.py @@ -21,9 +21,10 @@ from optparse import Values from copy import copy +from handler.gather.gather_ash_report import GatherAshReportHandler from handler.rca.rca_handler import RCAHandler from handler.rca.rca_list import RcaScenesListHandler -from common.ssh import SshClient, SshConfig +from common.ssh import SshClient, SshConfig, dis_rsa_algorithms from context import HandlerContextNamespace, HandlerContext from config import ConfigManager, InnerConfigManager from err import CheckStatus, SUG_SSH_FAILED @@ -67,6 +68,10 @@ def __init__(self, stdio=None, config_path=os.path.expanduser('~/.obdiag/config. "basic") is not None and self.inner_config_manager.config.get("obdiag").get("basic").get( "telemetry") is not None and self.inner_config_manager.config.get("obdiag").get("basic").get("telemetry") is False: telemetry.work_tag = False + if self.inner_config_manager.config.get("obdiag") is not None and self.inner_config_manager.config.get("obdiag").get( + "basic") is not None and self.inner_config_manager.config.get("obdiag").get("basic").get("dis_rsa_algorithms") is not None : + disable_rsa_algorithms=self.inner_config_manager.config.get("obdiag").get("basic").get("dis_rsa_algorithms") + dis_rsa_algorithms(disable_rsa_algorithms) def fork(self, cmds=None, options=None, stdio=None): new_obdiag = copy(self) @@ -236,6 +241,9 @@ def gather_function(self, function_type, opt): elif function_type == 'gather_scenes_run': handler = GatherSceneHandler(self.context) return handler.handle() + elif function_type == 'gather_ash_report': + handler =GatherAshReportHandler(self.context) + return handler.handle() else: self._call_stdio('error', 'Not support gather function: {0}'.format(function_type)) return False diff --git a/handler/gather/gather_ash_report.py b/handler/gather/gather_ash_report.py new file mode 100644 index 00000000..03135441 --- /dev/null +++ b/handler/gather/gather_ash_report.py @@ -0,0 +1,193 @@ +#!/usr/bin/env python +# -*- coding: UTF-8 -* +# Copyright (c) 2022 OceanBase +# OceanBase Diagnostic Tool is licensed under Mulan PSL v2. +# You can use this software according to the terms and conditions of the Mulan PSL v2. +# You may obtain a copy of Mulan PSL v2 at: +# http://license.coscl.org.cn/MulanPSL2 +# THIS SOFTWARE IS PROVIDED ON AN "AS IS" BASIS, WITHOUT WARRANTIES OF ANY KIND, +# EITHER EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO NON-INFRINGEMENT, +# MERCHANTABILITY OR FIT FOR A PARTICULAR PURPOSE. +# See the Mulan PSL v2 for more details. + +""" +@time: 2024/4/28 +@file: gather_ash_report.py +@desc: +""" +import datetime +import os + +from common.ob_connector import OBConnector +from common.obdiag_exception import OBDIAGFormatException, OBDIAGException +from common.tool import DirectoryUtil, TimeUtils, Util +from stdio import SafeStdio +from colorama import Fore, Style + + +class GatherAshReportHandler(SafeStdio): + def __init__(self, context, gather_pack_dir='./'): + super().__init__() + self.report_type = None + self.wait_class = None + self.sql_id = None + self.ash_report_file_name = None + self.from_time_str = None + self.to_time_str = None + self.ash_sql = None + self.trace_id = None + self.context = context + self.stdio = self.context.stdio + self.gather_pack_dir = gather_pack_dir + if self.context.get_variable("gather_timestamp", None) : + self.gather_timestamp=self.context.get_variable("gather_timestamp") + else: + self.gather_timestamp = TimeUtils.get_current_us_timestamp() + self.cluster = self.context.cluster_config + try: + self.obconn = OBConnector( + ip=self.cluster.get("db_host"), + port=self.cluster.get("db_port"), + username=self.cluster.get("tenant_sys").get("user"), + password=self.cluster.get("tenant_sys").get("password"), + stdio=self.stdio, + timeout=10000 + ) + except Exception as e: + self.stdio.error("Failed to connect to database: {0}".format(e)) + raise OBDIAGFormatException("Failed to connect to database: {0}".format(e)) + + + def handle(self): + if not self.init_option(): + self.stdio.error('init option failed') + return False + if not self.init_config(): + self.stdio.error('init config failed') + return False + self.__init_variables() + self.__init_report_path() + self.__init_task_names() + self.execute() + self.__print_result() + + def execute(self): + try: + self.stdio.verbose("execute_tasks. the number of tasks is {0} ,tasks is {1}".format(len(self.yaml_tasks.keys()), self.yaml_tasks.keys())) + + except Exception as e: + self.stdio.error("Internal error :{0}".format(e)) + + + def __init_report_path(self): + try: + self.report_path = os.path.join(self.gather_pack_dir, "gather_pack_{0}".format(TimeUtils.timestamp_to_filename_time(self.gather_timestamp), self.stdio)) + self.stdio.verbose("Use {0} as pack dir.".format(self.report_path)) + DirectoryUtil.mkdir(path=self.report_path, stdio=self.stdio) + except Exception as e: + self.stdio.error("init_report_path failed, error:{0}".format(e)) + + def __init_variables(self): + try: + self.variables = { + "observer_data_dir": self.ob_nodes[0].get("home_path") if self.ob_nodes and self.ob_nodes[0].get("home_path") else "", + "obproxy_data_dir": self.obproxy_nodes[0].get("home_path") if self.obproxy_nodes and self.obproxy_nodes[0].get("home_path") else "", + "from_time": self.from_time_str, + "to_time": self.to_time_str + } + self.stdio.verbose("gather scene variables: {0}".format(self.variables)) + except Exception as e: + self.stdio.error("init gather scene variables failed, error: {0}".format(e)) + + + def init_option(self): + options = self.context.options + from_option = Util.get_option(options, 'from') + to_option = Util.get_option(options, 'to') + store_dir_option = Util.get_option(options, 'store_dir',"./") + trace_id_option = Util.get_option(options, 'trace_id') + sql_id_option = Util.get_option(options, 'sql_id') + report_type_option = Util.get_option(options, 'report_type') + wait_class_option = Util.get_option(options, 'wait_class') + + if from_option is not None and to_option is not None: + try: + from_timestamp = TimeUtils.parse_time_str(from_option) + to_timestamp = TimeUtils.parse_time_str(to_option) + self.from_time_str = from_option + self.to_time_str = to_option + except OBDIAGFormatException: + self.stdio.exception('Error: Datetime is invalid. Must be in format yyyy-mm-dd hh:mm:ss. from_datetime={0}, to_datetime={1}'.format(from_option, to_option)) + return False + if to_timestamp <= from_timestamp: + self.stdio.exception('Error: from datetime is larger than to datetime, please check.') + return False + elif (from_option is None or to_option is None) and since_option is not None: + now_time = datetime.datetime.now() + self.to_time_str = (now_time + datetime.timedelta(minutes=1)).strftime('%Y-%m-%d %H:%M:%S') + self.from_time_str = (now_time - datetime.timedelta(seconds=TimeUtils.parse_time_length_to_sec(since_option))).strftime('%Y-%m-%d %H:%M:%S') + self.stdio.print('gather from_time: {0}, to_time: {1}'.format(self.from_time_str, self.to_time_str)) + else: + self.stdio.warn('No time option provided, default processing is based on the last 30 minutes') + now_time = datetime.datetime.now() + self.to_time_str = (now_time + datetime.timedelta(minutes=1)).strftime('%Y-%m-%d %H:%M:%S') + if since_option: + self.from_time_str = (now_time - datetime.timedelta(seconds=TimeUtils.parse_time_length_to_sec(since_option))).strftime('%Y-%m-%d %H:%M:%S') + else: + self.from_time_str = (now_time - datetime.timedelta(minutes=30)).strftime('%Y-%m-%d %H:%M:%S') + self.stdio.print('gather from_time: {0}, to_time: {1}'.format(self.from_time_str, self.to_time_str)) + if store_dir_option: + if not os.path.exists(os.path.abspath(store_dir_option)): + self.stdio.warn('warn: args --store_dir [{0}] incorrect: No such directory, Now create it'.format(os.path.abspath(store_dir_option))) + os.makedirs(os.path.abspath(store_dir_option)) + self.gather_pack_dir = os.path.abspath(store_dir_option) + self.ash_sql="CALL DBMS_WORKLOAD_REPOSITORY.ASH_REPORT( '{0}', '{1}'".format(self.from_time_str, self.to_time_str) + if sql_id_option: + self.sql_id = sql_id_option + self.ash_sql = self.ash_sql + ", '{0}'".format(self.sql_id) + else: + self.ash_sql = self.ash_sql + ", NULL" + if trace_id_option: + self.trace_id = trace_id_option + self.ash_sql = self.ash_sql + ", '{0}'".format(self.trace_id) + else: + self.ash_sql = self.ash_sql + ", NULL" + if wait_class_option: + self.wait_class = wait_class_option + self.ash_sql = self.ash_sql + ", '{0}'".format(self.wait_class) + else: + self.ash_sql = self.ash_sql + ", NULL" + if report_type_option: + self.report_type = report_type_option + self.ash_sql = self.ash_sql + ", '{0}'".format(self.report_type) + else: + self.ash_sql = self.ash_sql + ", NULL" + + try: + self.ash_sql = self.ash_sql + ");" + self.stdio.verbose("ash_sql: {0}".format(self.ash_sql)) + + ash_report_data=self.obconn.execute_sql(self.ash_sql) + if not ash_report_data or len(ash_report_data)==0: + self.stdio.error("ash report data is empty") + raise OBDIAGException("ash report data is empty") + ash_report=ash_report_data[0] + + # save ash_report_data + self.ash_report_file_name="ash_report_{0}.txt".format(TimeUtils.timestamp_to_filename_time(self.gather_timestamp)) + + with open(self.report_path + "/"+self.ash_report_file_name, 'w') as f: + f.write(ash_report) + except Exception as e: + self.stdio.error("ash report gather failed, error message: {0}".format(e)) + return False + + return True + + def __print_result(self): + self.stdio.print(Fore.YELLOW + "\nGather scene results stored in this directory: {0}\n".format(self.report_path + "/"+self.ash_report_file_name) + Style.RESET_ALL) + + + + + From 4d9210469d5307d497bf109501b5a5d2b2fcacf8 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E6=B8=A0=E7=A3=8A?= Date: Mon, 29 Apr 2024 20:39:20 +0800 Subject: [PATCH 2/6] support ash --- cmd.py | 23 +++-- common/ob_connector.py | 9 ++ handler/gather/gather_ash_report.py | 136 +++++++++++++--------------- init_obdiag_cmd.sh | 2 +- 4 files changed, 82 insertions(+), 88 deletions(-) diff --git a/cmd.py b/cmd.py index 071face0..3e8c9a0f 100644 --- a/cmd.py +++ b/cmd.py @@ -562,24 +562,23 @@ def _do_command(self, obdiag): return obdiag.gather_function('gather_scenes_run', self.opts) -class ObdiagGatherAshReportRunCommand(ObdiagOriginCommand): +class ObdiagGatherAshReportCommand(ObdiagOriginCommand): def __init__(self): - super(ObdiagGatherAshReportRunCommand, self).__init__('run', 'gather ash report') - self.parser.add_option('--trace_id', type='string', help="The TRACE.ID of the SQL to be sampled, if left blank or filled with NULL, indicates that TRACE.ID is not restricted.") - self.parser.add_option('--sql_id', type='string', help="The SQL.ID, if left blank or filled with NULL, indicates that SQL.ID is not restricted.") - #WAIT_CLASS + super(ObdiagGatherAshReportCommand, self).__init__('ash', 'gather ash report') + self.parser.add_option('--trace_id', type='string', + help="The TRACE.ID of the SQL to be sampled, if left blank or filled with NULL, indicates that TRACE.ID is not restricted.") + self.parser.add_option('--sql_id', type='string', + help="The SQL.ID, if left blank or filled with NULL, indicates that SQL.ID is not restricted.") + # WAIT_CLASS self.parser.add_option('--wait_class', type='string', - help='Report type.',default='TEXT') - self.parser.add_option('--report_type', type='string', help='Event types to be sampled.') + self.parser.add_option('--report_type', type='string', + help='Report type, currently only supports text type.', default='TEXT') self.parser.add_option('--from', type='string', help="specify the start of the time range. format: 'yyyy-mm-dd hh:mm:ss'") self.parser.add_option('--to', type='string', help="specify the end of the time range. format: 'yyyy-mm-dd hh:mm:ss'") - self.parser.add_option('--since', type='string', - help="Specify time range that from 'n' [d]ays, 'n' [h]ours or 'n' [m]inutes. before to now. format: . example: 1h.", - default='30m') self.parser.add_option('--store_dir', type='string', help='the dir to store gather result, current dir by default.', default='./') @@ -587,7 +586,7 @@ def __init__(self): default=os.path.expanduser('~/.obdiag/config.yml')) def init(self, cmd, args): - super(ObdiagGatherAshReportRunCommand, self).init(cmd, args) + super(ObdiagGatherAshReportCommand, self).init(cmd, args) return self def _do_command(self, obdiag): @@ -600,7 +599,6 @@ def __init__(self): super(ObdiagAnalyzeLogCommand, self).__init__('log', 'Analyze oceanbase log from online observer machines or offline oceanbase log files') self.parser.add_option('--from', type='string', help="specify the start of the time range. format: 'yyyy-mm-dd hh:mm:ss'") self.parser.add_option('--to', type='string', help="specify the end of the time range. format: 'yyyy-mm-dd hh:mm:ss'") - self.parser.add_option('--since', type='string', help="Specify time range that from 'n' [d]ays, 'n' [h]ours or 'n' [m]inutes. before to now. format: . example: 1h.", default='30m') self.parser.add_option('--scope', type='string', help="log type constrains, choices=[observer, election, rootservice, all]", default='all') self.parser.add_option('--grep', action="append", type='string', help="specify keywords constrain") self.parser.add_option('--log_level', type='string', help="oceanbase logs greater than or equal to this level will be analyze, choices=[DEBUG, TRACE, INFO, WDIAG, WARN, EDIAG, ERROR]") @@ -745,6 +743,7 @@ def __init__(self): self.register_command(ObdiagGatherAwrCommand()) self.register_command(ObdiagGatherObproxyLogCommand()) self.register_command(ObdiagGatherSceneCommand()) + self.register_command(ObdiagGatherAshReportCommand()) class ObdiagGatherSceneCommand(MajorCommand): diff --git a/common/ob_connector.py b/common/ob_connector.py index b548b89e..e002c9e4 100644 --- a/common/ob_connector.py +++ b/common/ob_connector.py @@ -114,3 +114,12 @@ def execute_sql_pretty(self, sql): ret = from_db_cursor(cursor) cursor.close() return ret + def callproc(self, procname, args=()): + if self.conn is None: + self._connect_db() + else: + self.conn.ping(reconnect=True) + cursor = self.conn.cursor() + cursor.callproc(procname, args) + ret = cursor.fetchall() + return ret diff --git a/handler/gather/gather_ash_report.py b/handler/gather/gather_ash_report.py index 03135441..6bb6bcf3 100644 --- a/handler/gather/gather_ash_report.py +++ b/handler/gather/gather_ash_report.py @@ -39,45 +39,58 @@ def __init__(self, context, gather_pack_dir='./'): self.context = context self.stdio = self.context.stdio self.gather_pack_dir = gather_pack_dir - if self.context.get_variable("gather_timestamp", None) : - self.gather_timestamp=self.context.get_variable("gather_timestamp") + if self.context.get_variable("gather_timestamp", None): + self.gather_timestamp = self.context.get_variable("gather_timestamp") else: self.gather_timestamp = TimeUtils.get_current_us_timestamp() self.cluster = self.context.cluster_config try: self.obconn = OBConnector( - ip=self.cluster.get("db_host"), - port=self.cluster.get("db_port"), - username=self.cluster.get("tenant_sys").get("user"), - password=self.cluster.get("tenant_sys").get("password"), - stdio=self.stdio, - timeout=10000 - ) + ip=self.cluster.get("db_host"), + port=self.cluster.get("db_port"), + username=self.cluster.get("tenant_sys").get("user"), + password=self.cluster.get("tenant_sys").get("password"), + stdio=self.stdio, + timeout=10000, + database="oceanbase" + ) except Exception as e: self.stdio.error("Failed to connect to database: {0}".format(e)) raise OBDIAGFormatException("Failed to connect to database: {0}".format(e)) - def handle(self): if not self.init_option(): self.stdio.error('init option failed') return False - if not self.init_config(): - self.stdio.error('init config failed') - return False - self.__init_variables() self.__init_report_path() - self.__init_task_names() self.execute() self.__print_result() def execute(self): try: - self.stdio.verbose("execute_tasks. the number of tasks is {0} ,tasks is {1}".format(len(self.yaml_tasks.keys()), self.yaml_tasks.keys())) + ash_report_arg = (self.from_time_str, self.to_time_str, self.sql_id, self.trace_id, self.wait_class, self.report_type) + self.stdio.verbose("ash report arg: {0}".format(ash_report_arg)) + ash_report_data = self.obconn.callproc("DBMS_WORKLOAD_REPOSITORY.ASH_REPORT", args=ash_report_arg) + if not ash_report_data or len(ash_report_data) == 0: + self.stdio.error("ash report data is empty") + raise OBDIAGException("ash report data is empty") + ash_report = ash_report_data[0][0] + if len(ash_report) > 1: + self.stdio.verbose("ash report: \n{0}".format(ash_report)) + else: + raise OBDIAGException("ash report data is empty") - except Exception as e: - self.stdio.error("Internal error :{0}".format(e)) + # save ash_report_data + self.ash_report_file_name = "ash_report_{0}.txt".format( + TimeUtils.timestamp_to_filename_time(self.gather_timestamp)) + self.ash_report_file_name=os.path.join(self.report_path, "result_summary.txt") + self.stdio.verbose("save ash report file name: {0}".format(self.ash_report_file_name)) + + with open(self.ash_report_file_name, 'w+') as f: + f.write(ash_report) + except Exception as e: + self.stdio.error("ash report gather failed, error message: {0}".format(e)) def __init_report_path(self): try: @@ -87,29 +100,19 @@ def __init_report_path(self): except Exception as e: self.stdio.error("init_report_path failed, error:{0}".format(e)) - def __init_variables(self): - try: - self.variables = { - "observer_data_dir": self.ob_nodes[0].get("home_path") if self.ob_nodes and self.ob_nodes[0].get("home_path") else "", - "obproxy_data_dir": self.obproxy_nodes[0].get("home_path") if self.obproxy_nodes and self.obproxy_nodes[0].get("home_path") else "", - "from_time": self.from_time_str, - "to_time": self.to_time_str - } - self.stdio.verbose("gather scene variables: {0}".format(self.variables)) - except Exception as e: - self.stdio.error("init gather scene variables failed, error: {0}".format(e)) def init_option(self): options = self.context.options from_option = Util.get_option(options, 'from') to_option = Util.get_option(options, 'to') - store_dir_option = Util.get_option(options, 'store_dir',"./") trace_id_option = Util.get_option(options, 'trace_id') sql_id_option = Util.get_option(options, 'sql_id') report_type_option = Util.get_option(options, 'report_type') wait_class_option = Util.get_option(options, 'wait_class') + store_dir_option = Util.get_option(options, 'store_dir' ) + since_option = "30m" if from_option is not None and to_option is not None: try: from_timestamp = TimeUtils.parse_time_str(from_option) @@ -117,77 +120,60 @@ def init_option(self): self.from_time_str = from_option self.to_time_str = to_option except OBDIAGFormatException: - self.stdio.exception('Error: Datetime is invalid. Must be in format yyyy-mm-dd hh:mm:ss. from_datetime={0}, to_datetime={1}'.format(from_option, to_option)) + self.stdio.exception( + 'Error: Datetime is invalid. Must be in format yyyy-mm-dd hh:mm:ss. from_datetime={0}, to_datetime={1}'.format( + from_option, to_option)) return False if to_timestamp <= from_timestamp: self.stdio.exception('Error: from datetime is larger than to datetime, please check.') return False - elif (from_option is None or to_option is None) and since_option is not None: + elif (from_option is None or to_option is None): now_time = datetime.datetime.now() - self.to_time_str = (now_time + datetime.timedelta(minutes=1)).strftime('%Y-%m-%d %H:%M:%S') - self.from_time_str = (now_time - datetime.timedelta(seconds=TimeUtils.parse_time_length_to_sec(since_option))).strftime('%Y-%m-%d %H:%M:%S') + self.to_time_str = (now_time + datetime.timedelta(minutes=0)).strftime('%Y-%m-%d %H:%M:%S') + self.from_time_str = (now_time - datetime.timedelta( + seconds=TimeUtils.parse_time_length_to_sec(since_option))).strftime('%Y-%m-%d %H:%M:%S') self.stdio.print('gather from_time: {0}, to_time: {1}'.format(self.from_time_str, self.to_time_str)) else: self.stdio.warn('No time option provided, default processing is based on the last 30 minutes') now_time = datetime.datetime.now() self.to_time_str = (now_time + datetime.timedelta(minutes=1)).strftime('%Y-%m-%d %H:%M:%S') - if since_option: - self.from_time_str = (now_time - datetime.timedelta(seconds=TimeUtils.parse_time_length_to_sec(since_option))).strftime('%Y-%m-%d %H:%M:%S') - else: - self.from_time_str = (now_time - datetime.timedelta(minutes=30)).strftime('%Y-%m-%d %H:%M:%S') + self.from_time_str = (now_time - datetime.timedelta(minutes=30)).strftime('%Y-%m-%d %H:%M:%S') self.stdio.print('gather from_time: {0}, to_time: {1}'.format(self.from_time_str, self.to_time_str)) if store_dir_option: if not os.path.exists(os.path.abspath(store_dir_option)): - self.stdio.warn('warn: args --store_dir [{0}] incorrect: No such directory, Now create it'.format(os.path.abspath(store_dir_option))) + self.stdio.warn('warn: args --store_dir [{0}] incorrect: No such directory, Now create it'.format( + os.path.abspath(store_dir_option))) os.makedirs(os.path.abspath(store_dir_option)) self.gather_pack_dir = os.path.abspath(store_dir_option) - self.ash_sql="CALL DBMS_WORKLOAD_REPOSITORY.ASH_REPORT( '{0}', '{1}'".format(self.from_time_str, self.to_time_str) if sql_id_option: self.sql_id = sql_id_option - self.ash_sql = self.ash_sql + ", '{0}'".format(self.sql_id) else: - self.ash_sql = self.ash_sql + ", NULL" + self.sql_id = None if trace_id_option: self.trace_id = trace_id_option - self.ash_sql = self.ash_sql + ", '{0}'".format(self.trace_id) else: - self.ash_sql = self.ash_sql + ", NULL" + self.trace_id = None + + if report_type_option: + self.report_type = report_type_option.strip() + if report_type_option.upper() != "TEXT": + self.stdio.error("Invalid argument for report type, Now just support TEXT") + return False + else: + self.report_type = None if wait_class_option: self.wait_class = wait_class_option - self.ash_sql = self.ash_sql + ", '{0}'".format(self.wait_class) else: - self.ash_sql = self.ash_sql + ", NULL" - if report_type_option: - self.report_type = report_type_option - self.ash_sql = self.ash_sql + ", '{0}'".format(self.report_type) + self.wait_class = None + if store_dir_option: + self.gather_pack_dir = store_dir_option else: - self.ash_sql = self.ash_sql + ", NULL" - - try: - self.ash_sql = self.ash_sql + ");" - self.stdio.verbose("ash_sql: {0}".format(self.ash_sql)) - - ash_report_data=self.obconn.execute_sql(self.ash_sql) - if not ash_report_data or len(ash_report_data)==0: - self.stdio.error("ash report data is empty") - raise OBDIAGException("ash report data is empty") - ash_report=ash_report_data[0] - - # save ash_report_data - self.ash_report_file_name="ash_report_{0}.txt".format(TimeUtils.timestamp_to_filename_time(self.gather_timestamp)) - - with open(self.report_path + "/"+self.ash_report_file_name, 'w') as f: - f.write(ash_report) - except Exception as e: - self.stdio.error("ash report gather failed, error message: {0}".format(e)) - return False + self.gather_pack_dir = "./" + self.stdio.print("from_time: {0}, to_time: {1}, sql_id: {2}, trace_id: {3}, report_type: {4}, wait_class: {5}, store_dir: {6}".format(self.from_time_str, self.to_time_str, self.sql_id, self.trace_id, self.report_type, self.wait_class,self.gather_pack_dir)) return True def __print_result(self): - self.stdio.print(Fore.YELLOW + "\nGather scene results stored in this directory: {0}\n".format(self.report_path + "/"+self.ash_report_file_name) + Style.RESET_ALL) - - - - - + self.stdio.print(Fore.YELLOW + "\nGather scene results stored in this directory: {0}".format( + self.ash_report_file_name) + Style.RESET_ALL) + self.stdio.print("") \ No newline at end of file diff --git a/init_obdiag_cmd.sh b/init_obdiag_cmd.sh index e9ea3d49..c896ecca 100644 --- a/init_obdiag_cmd.sh +++ b/init_obdiag_cmd.sh @@ -12,7 +12,7 @@ _obdiag_completion() { case "${COMP_WORDS[1]}" in gather) if [ "$COMP_CWORD" -eq 2 ]; then - type_list="log clog slog plan_monitor stack perf sysstat obproxy_log all scene" + type_list="log clog slog plan_monitor stack perf sysstat obproxy_log all scene ash" elif [ "${COMP_WORDS[2]}" = "scene" ] && [ "$COMP_CWORD" -eq 3 ]; then type_list="list run" fi From ac776ab295606371c141c926e0b166e3284b0f2a Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E6=B8=A0=E7=A3=8A?= Date: Mon, 29 Apr 2024 20:47:30 +0800 Subject: [PATCH 3/6] support ash --- cmd.py | 2 +- handler/gather/gather_ash_report.py | 14 +++++++++----- 2 files changed, 10 insertions(+), 6 deletions(-) diff --git a/cmd.py b/cmd.py index 3e8c9a0f..69282bf1 100644 --- a/cmd.py +++ b/cmd.py @@ -565,7 +565,7 @@ def _do_command(self, obdiag): class ObdiagGatherAshReportCommand(ObdiagOriginCommand): def __init__(self): - super(ObdiagGatherAshReportCommand, self).__init__('ash', 'gather ash report') + super(ObdiagGatherAshReportCommand, self).__init__('ash', 'Gather ash report') self.parser.add_option('--trace_id', type='string', help="The TRACE.ID of the SQL to be sampled, if left blank or filled with NULL, indicates that TRACE.ID is not restricted.") self.parser.add_option('--sql_id', type='string', diff --git a/handler/gather/gather_ash_report.py b/handler/gather/gather_ash_report.py index 6bb6bcf3..5e96b58f 100644 --- a/handler/gather/gather_ash_report.py +++ b/handler/gather/gather_ash_report.py @@ -28,6 +28,7 @@ class GatherAshReportHandler(SafeStdio): def __init__(self, context, gather_pack_dir='./'): super().__init__() + self.result_summary_file_name = None self.report_type = None self.wait_class = None self.sql_id = None @@ -83,12 +84,15 @@ def execute(self): # save ash_report_data self.ash_report_file_name = "ash_report_{0}.txt".format( TimeUtils.timestamp_to_filename_time(self.gather_timestamp)) - self.ash_report_file_name=os.path.join(self.report_path, "result_summary.txt") - - self.stdio.verbose("save ash report file name: {0}".format(self.ash_report_file_name)) + self.ash_report_file_name=os.path.join(self.report_path, self.ash_report_file_name) with open(self.ash_report_file_name, 'w+') as f: f.write(ash_report) + self.stdio.print("save ash report file name:"+ Fore.YELLOW +"{0}".format(self.ash_report_file_name)+Style.RESET_ALL) + self.result_summary_file_name = os.path.join(self.report_path, "result_summary.txt") + with open(self.ash_report_file_name, 'w+') as f: + f.write(self.ash_report_file_name) + except Exception as e: self.stdio.error("ash report gather failed, error message: {0}".format(e)) @@ -174,6 +178,6 @@ def init_option(self): return True def __print_result(self): - self.stdio.print(Fore.YELLOW + "\nGather scene results stored in this directory: {0}".format( - self.ash_report_file_name) + Style.RESET_ALL) + self.stdio.print(Fore.YELLOW + "\nGather ash_report results stored in this directory: {0}".format( + self.report_path) + Style.RESET_ALL) self.stdio.print("") \ No newline at end of file From ef08820033a6be1aff4f0bfde00517d764f85ed2 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E6=B8=A0=E7=A3=8A?= Date: Tue, 30 Apr 2024 10:05:15 +0800 Subject: [PATCH 4/6] support ash --- cmd.py | 1 + 1 file changed, 1 insertion(+) diff --git a/cmd.py b/cmd.py index 69282bf1..d3935ac8 100644 --- a/cmd.py +++ b/cmd.py @@ -605,6 +605,7 @@ def __init__(self): self.parser.add_option('--files', action="append", type='string', help="specify files") self.parser.add_option('--store_dir', type='string', help='the dir to store gather result, current dir by default.', default='./') self.parser.add_option('-c', type='string', help='obdiag custom config', default=os.path.expanduser('~/.obdiag/config.yml')) + self.parser.add_option('--since', type='string',help="Specify time range that from 'n' [d]ays, 'n' [h]ours or 'n' [m]inutes. before to now. format: . example: 1h.",default='30m') def init(self, cmd, args): super(ObdiagAnalyzeLogCommand, self).init(cmd, args) From 1cfd7c187a89f09a83cb60a17498ef5909b0ce3e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E6=B8=A0=E7=A3=8A?= Date: Tue, 30 Apr 2024 10:25:43 +0800 Subject: [PATCH 5/6] ash's observer version must greater than 4.0.0.0 --- handler/gather/gather_ash_report.py | 24 +++++++++++++++++++++++- 1 file changed, 23 insertions(+), 1 deletion(-) diff --git a/handler/gather/gather_ash_report.py b/handler/gather/gather_ash_report.py index 5e96b58f..6f0d2977 100644 --- a/handler/gather/gather_ash_report.py +++ b/handler/gather/gather_ash_report.py @@ -18,9 +18,10 @@ import datetime import os +from common.command import get_observer_version, get_observer_version_by_sql from common.ob_connector import OBConnector from common.obdiag_exception import OBDIAGFormatException, OBDIAGException -from common.tool import DirectoryUtil, TimeUtils, Util +from common.tool import DirectoryUtil, TimeUtils, Util, StringUtils from stdio import SafeStdio from colorama import Fore, Style @@ -45,6 +46,8 @@ def __init__(self, context, gather_pack_dir='./'): else: self.gather_timestamp = TimeUtils.get_current_us_timestamp() self.cluster = self.context.cluster_config + + self.observer_nodes = self.context.cluster_config.get("servers") try: self.obconn = OBConnector( ip=self.cluster.get("db_host"), @@ -60,12 +63,31 @@ def __init__(self, context, gather_pack_dir='./'): raise OBDIAGFormatException("Failed to connect to database: {0}".format(e)) def handle(self): + if not self.version_check(): + self.stdio.error('version check failed') + return False if not self.init_option(): self.stdio.error('init option failed') return False self.__init_report_path() self.execute() self.__print_result() + def version_check(self): + observer_version = "" + try: + observer_version = get_observer_version_by_sql(self.ob_cluster, self.stdio) + except Exception as e: + if len(self.observer_nodes) > 0: + observer_version = get_observer_version(True, self.observer_nodes[0]["ssher"], + self.observer_nodes[0]["home_path"],self.stdio) + else: + self.stdio.warn("RCAHandler Failed to get observer version:{0}".format(e)) + self.stdio.verbose("RCAHandler.init get observer version: {0}".format(observer_version)) + + if not (observer_version == "4.0.0.0" or StringUtils.compare_versions_greater(observer_version, "4.0.0.0")): + self.stdio.error("observer version: {0}, must greater than 4.0.0.0".format(observer_version)) + return False + return True def execute(self): try: From e0b3271b2dda7c82b95459b9ea41e7564802df33 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E6=B8=A0=E7=A3=8A?= Date: Tue, 30 Apr 2024 10:28:57 +0800 Subject: [PATCH 6/6] update format --- common/ob_connector.py | 1 + 1 file changed, 1 insertion(+) diff --git a/common/ob_connector.py b/common/ob_connector.py index e002c9e4..948644a5 100644 --- a/common/ob_connector.py +++ b/common/ob_connector.py @@ -114,6 +114,7 @@ def execute_sql_pretty(self, sql): ret = from_db_cursor(cursor) cursor.close() return ret + def callproc(self, procname, args=()): if self.conn is None: self._connect_db()