From 366dccd187049cf539d708dfb5e4bb59f2d985ba Mon Sep 17 00:00:00 2001 From: "jingshun.tq" <35712518+Teingi@users.noreply.github.com> Date: Thu, 5 Dec 2024 15:54:10 +0800 Subject: [PATCH 1/7] Rename Dockerfile to DockerFile --- build/{Dockerfile => DockerFile} | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) rename build/{Dockerfile => DockerFile} (93%) diff --git a/build/Dockerfile b/build/DockerFile similarity index 93% rename from build/Dockerfile rename to build/DockerFile index 60898428..4a0d7c9f 100644 --- a/build/Dockerfile +++ b/build/DockerFile @@ -8,4 +8,4 @@ RUN /opt/miniconda3/bin/conda init RUN /opt/miniconda3/bin/conda create --name obdiag python=3.8 -y RUN source /opt/miniconda3/bin/activate obdiag RUN /opt/miniconda3/envs/obdiag/bin/python3.8 -m pip install --upgrade pip setuptools wheel -RUN yum install -y gcc gcc-c++ make \ No newline at end of file +RUN yum install -y gcc gcc-c++ make From d517211425912352ed6161136686722e3db05112 Mon Sep 17 00:00:00 2001 From: Teingi Date: Mon, 9 Dec 2024 19:29:15 +0800 Subject: [PATCH 2/7] Backup the files in the ~/.obdiag/ directory --- init.sh | 14 ++++++ obdiag_backup.sh | 70 ++++++++++++++++++++++++++++++ rpm/oceanbase-diagnostic-tool.spec | 3 ++ 3 files changed, 87 insertions(+) create mode 100755 obdiag_backup.sh diff --git a/init.sh b/init.sh index 48c65911..fcf219bf 100755 --- a/init.sh +++ b/init.sh @@ -49,5 +49,19 @@ source ${WORK_DIR}/init_obdiag_cmd.sh if [ -d "${OBDIAG_HOME}/check_package.yaml" ]; then echo "${OBDIAG_HOME}/*check_package.yaml and ${OBDIAG_HOME}/tasks has been discarded. If you have made any changes to these files on your own, please transfer the relevant data to *check_package.yaml in ${OBDIAG_HOME}/check/" fi + +output_file=${OBDIAG_HOME}/version.yaml +version_line=$(obdiag --version 2>&1 | grep -oP 'OceanBase Diagnostic Tool: \K[\d.]+') +if [ -n "$version" ]; then + content="obdiag_version: \"$version\"" + + # Write or update the version information to the file + echo "$content" > "$output_file" + + echo "obdiag version information has been successfully written to $output_file" +else + echo "failed to retrieve obdiag version information." +fi + echo "Init obdiag finished" cd - diff --git a/obdiag_backup.sh b/obdiag_backup.sh new file mode 100755 index 00000000..bc7ff2be --- /dev/null +++ b/obdiag_backup.sh @@ -0,0 +1,70 @@ +#!/bin/bash + +# Define source directory and target backup directory +SOURCE_DIR=~/.obdiag/ +BACKUP_DIR=~/.obdiag/backup/ + +# Ensure the backup directory exists, create it if it does not +mkdir -p "$BACKUP_DIR" + +# List of directories to be backed up +DIRS=("display" "check" "gather" "rca") + +# Retrieve version information; if file does not exist or read fails, VERSION remains empty +VERSION="" +if [ -f "$SOURCE_DIR/version.yaml" ]; then + VERSION=$(grep 'obdiag_version:' "$SOURCE_DIR/version.yaml" | awk '{print $2}' | tr -d '"') +fi + +# Define the name of the backup archive (use timestamp for uniqueness, and optionally add version) +TIMESTAMP=$(date +"%Y%m%d_%H%M%S") +TARFILE="$BACKUP_DIR/obdiag_backup_$TIMESTAMP" +TARFILE+="${VERSION:+_v$VERSION}.tar.gz" + +# Temporary directory for staging backup files +TEMP_BACKUP_DIR="$BACKUP_DIR/tmp_obdiag_backup_$TIMESTAMP" +mkdir -p "$TEMP_BACKUP_DIR" + +# Iterate over each directory to be backed up +for dir in "${DIRS[@]}"; do + # Check if the source directory exists + if [ -d "$SOURCE_DIR$dir" ]; then + # Copy the directory into the temporary backup directory + cp -rp "$SOURCE_DIR$dir" "$TEMP_BACKUP_DIR/" + echo "Copied $dir to temporary backup directory." + else + echo "Source directory $SOURCE_DIR$dir does not exist. Skipping." + fi +done + +# Create a tar.gz archive +if tar -czf "$TARFILE" -C "$TEMP_BACKUP_DIR" .; then + echo "Backup archive created successfully at $TARFILE" +else + echo "Failed to create backup archive." + exit 1 +fi + +# Clean up the temporary backup directory +rm -rf "$TEMP_BACKUP_DIR" +echo "Temporary files removed." + +# Cleanup phase: Remove backups older than one year or delete the oldest backups if more than 12 exist +ONE_YEAR_AGO="+365" # find command uses days, so +365 means older than one year + +# Remove backups older than one year +find "$BACKUP_DIR" -maxdepth 1 -name "obdiag_backup_*.tar.gz" -type f -mtime $ONE_YEAR_AGO -exec rm -f {} \; +echo "Removed old backup files older than one year." + +# If there are more than 12 backups, remove the excess oldest ones +BACKUP_FILES=($(find "$BACKUP_DIR" -maxdepth 1 -name "obdiag_backup_*.tar.gz" -type f -printf '%T@ %p\n' | sort -n)) +NUM_BACKUPS=${#BACKUP_FILES[@]} + +if [ $NUM_BACKUPS -gt 12 ]; then + COUNT_TO_DELETE=$((NUM_BACKUPS - 12)) + for ((i = 0; i < COUNT_TO_DELETE; i++)); do + FILE_PATH=${BACKUP_FILES[i]#* } + rm -f "$FILE_PATH" + echo "Removed excess backup file: $FILE_PATH" + done +fi \ No newline at end of file diff --git a/rpm/oceanbase-diagnostic-tool.spec b/rpm/oceanbase-diagnostic-tool.spec index 8f53317d..160ef8b6 100644 --- a/rpm/oceanbase-diagnostic-tool.spec +++ b/rpm/oceanbase-diagnostic-tool.spec @@ -52,6 +52,7 @@ rm -f obdiag.py oceanbase-diagnostic-tool.spec \cp -rf $SRC_DIR/handler/gather/plugins/redact/*.py $BUILD_DIR/SOURCES/gather/redact \cp -rf $SRC_DIR/init.sh $BUILD_DIR/SOURCES/init.sh \cp -rf $SRC_DIR/init_obdiag_cmd.sh $BUILD_DIR/SOURCES/init_obdiag_cmd.sh +\cp -rf $SRC_DIR/obdiag_backup.sh $BUILD_DIR/SOURCES/obdiag_backup.sh \cp -rf $SRC_DIR/conf $BUILD_DIR/SOURCES/conf mkdir -p ${RPM_BUILD_ROOT}/usr/local/oceanbase-diagnostic-tool/lib/ mkdir -p ${RPM_BUILD_ROOT}/usr/local/oceanbase-diagnostic-tool/dependencies/bin @@ -68,6 +69,7 @@ mkdir -p ${RPM_BUILD_ROOT}/usr/local/oceanbase-diagnostic-tool/display \cp -rf $BUILD_DIR/SOURCES/conf ${RPM_BUILD_ROOT}/usr/local/oceanbase-diagnostic-tool/ \cp -rf $BUILD_DIR/SOURCES/init.sh ${RPM_BUILD_ROOT}/usr/local/oceanbase-diagnostic-tool/ \cp -rf $BUILD_DIR/SOURCES/init_obdiag_cmd.sh ${RPM_BUILD_ROOT}/usr/local/oceanbase-diagnostic-tool/ +\cp -rf $BUILD_DIR/SOURCES/obdiag_backup.sh ${RPM_BUILD_ROOT}/usr/local/oceanbase-diagnostic-tool/ \cp -rf $BUILD_DIR/SOURCES/check/* ${RPM_BUILD_ROOT}/usr/local/oceanbase-diagnostic-tool/check mv ${RPM_BUILD_ROOT}/usr/local/oceanbase-diagnostic-tool/check/tasks/*.yaml ${RPM_BUILD_ROOT}/usr/local/oceanbase-diagnostic-tool/check/ \cp -rf $BUILD_DIR/SOURCES/gather/tasks ${RPM_BUILD_ROOT}/usr/local/oceanbase-diagnostic-tool/gather @@ -87,6 +89,7 @@ find /usr/local/oceanbase-diagnostic-tool/obdiag -type f -exec chmod 644 {} \; ln -sf /usr/local/oceanbase-diagnostic-tool/obdiag /usr/bin/obdiag chmod +x /usr/local/oceanbase-diagnostic-tool/obdiag cp -rf /usr/local/oceanbase-diagnostic-tool/init_obdiag_cmd.sh /etc/profile.d/obdiag.sh +/usr/local/oceanbase-diagnostic-tool/obdiag_backup.sh /usr/local/oceanbase-diagnostic-tool/init.sh echo -e 'Please execute the following command to init obdiag:\n' echo -e '\033[32m source /usr/local/oceanbase-diagnostic-tool/init.sh \n \033[0m' From bbff0e30f84ce012c72e12ee93782287d522e000 Mon Sep 17 00:00:00 2001 From: Teingi Date: Mon, 9 Dec 2024 21:00:54 +0800 Subject: [PATCH 3/7] fix --- init.sh | 8 ++++---- obdiag_backup.sh | 26 +++++++++++++++++++++++--- 2 files changed, 27 insertions(+), 7 deletions(-) diff --git a/init.sh b/init.sh index fcf219bf..2e9b09b2 100755 --- a/init.sh +++ b/init.sh @@ -50,10 +50,11 @@ if [ -d "${OBDIAG_HOME}/check_package.yaml" ]; then echo "${OBDIAG_HOME}/*check_package.yaml and ${OBDIAG_HOME}/tasks has been discarded. If you have made any changes to these files on your own, please transfer the relevant data to *check_package.yaml in ${OBDIAG_HOME}/check/" fi +cd - output_file=${OBDIAG_HOME}/version.yaml -version_line=$(obdiag --version 2>&1 | grep -oP 'OceanBase Diagnostic Tool: \K[\d.]+') -if [ -n "$version" ]; then - content="obdiag_version: \"$version\"" +version_line=$(/usr/local/oceanbase-diagnostic-tool/obdiag --version 2>&1 | grep -oP 'OceanBase Diagnostic Tool: \K[\d.]+') +if [ -n "$version_line" ]; then + content="obdiag_version: \"$version_line\"" # Write or update the version information to the file echo "$content" > "$output_file" @@ -64,4 +65,3 @@ else fi echo "Init obdiag finished" -cd - diff --git a/obdiag_backup.sh b/obdiag_backup.sh index bc7ff2be..40a37d02 100755 --- a/obdiag_backup.sh +++ b/obdiag_backup.sh @@ -10,6 +10,20 @@ mkdir -p "$BACKUP_DIR" # List of directories to be backed up DIRS=("display" "check" "gather" "rca") +# Check if any of the directories contain files +should_backup=false +for dir in "${DIRS[@]}"; do + if [ -d "$SOURCE_DIR$dir" ] && [ "$(ls -A "$SOURCE_DIR$dir")" ]; then + should_backup=true + break + fi +done + +if ! $should_backup; then + echo "None of the specified directories contain files. Skipping backup." + exit 0 +fi + # Retrieve version information; if file does not exist or read fails, VERSION remains empty VERSION="" if [ -f "$SOURCE_DIR/version.yaml" ]; then @@ -17,9 +31,15 @@ if [ -f "$SOURCE_DIR/version.yaml" ]; then fi # Define the name of the backup archive (use timestamp for uniqueness, and optionally add version) -TIMESTAMP=$(date +"%Y%m%d_%H%M%S") -TARFILE="$BACKUP_DIR/obdiag_backup_$TIMESTAMP" -TARFILE+="${VERSION:+_v$VERSION}.tar.gz" +TIMESTAMP=$(date +"%Y%m%d%H%M%S") +BASE_NAME="obdiag_backup${VERSION:+_v$VERSION}" +TARFILE="$BACKUP_DIR/${BASE_NAME}_$TIMESTAMP.tar.gz" + +# Check if a file with the same name already exists in the BACKUP_DIR +if find "$BACKUP_DIR" -maxdepth 1 -name "${BASE_NAME}_*.tar.gz" -print -quit | grep -q .; then + echo "A backup file with the same name already exists. Skipping backup creation." + exit 0 +fi # Temporary directory for staging backup files TEMP_BACKUP_DIR="$BACKUP_DIR/tmp_obdiag_backup_$TIMESTAMP" From c6252b189bf97f2970ac5e0989ac05a1e403c5a4 Mon Sep 17 00:00:00 2001 From: Teingi Date: Tue, 10 Dec 2024 12:01:58 +0800 Subject: [PATCH 4/7] fixed: gather tabledump --- core.py | 14 ++++----- handler/gather/gather_tabledump.py | 47 ++++++++++++++++++------------ 2 files changed, 36 insertions(+), 25 deletions(-) diff --git a/core.py b/core.py index d19f9a23..5fe6210b 100644 --- a/core.py +++ b/core.py @@ -249,8 +249,8 @@ def gather_function(self, function_type, opt): return ObdiagResult(ObdiagResult.INPUT_ERROR_CODE, error_data='No such custum config') else: self.stdio.print("{0} start ...".format(function_type)) - self.update_obcluster_nodes(config) self.set_context(function_type, 'gather', config) + self.update_obcluster_nodes(config) options = self.context.options timestamp = TimeUtils.get_current_us_timestamp() self.context.set_variable('gather_timestamp', timestamp) @@ -385,8 +385,8 @@ def display_function(self, function_type, opt): return ObdiagResult(ObdiagResult.INPUT_ERROR_CODE, error_data='No such custum config') else: self.stdio.print("{0} start ...".format(function_type)) - self.update_obcluster_nodes(config) self.set_context(function_type, 'display', config) + self.update_obcluster_nodes(config) timestamp = TimeUtils.get_current_us_timestamp() self.context.set_variable('display_timestamp', timestamp) if function_type == 'display_scenes_run': @@ -409,8 +409,8 @@ def analyze_fuction(self, function_type, opt): else: self.stdio.print("{0} start ...".format(function_type)) if function_type == 'analyze_log': - self.update_obcluster_nodes(config) self.set_context(function_type, 'analyze', config) + self.update_obcluster_nodes(config) handler = AnalyzeLogHandler(self.context) return handler.handle() elif function_type == 'analyze_log_offline': @@ -422,8 +422,8 @@ def analyze_fuction(self, function_type, opt): handler = AnalyzeQueueHandler(self.context) return handler.handle() elif function_type == 'analyze_flt_trace': - self.update_obcluster_nodes(config) self.set_context(function_type, 'analyze', config) + self.update_obcluster_nodes(config) handler = AnalyzeFltTraceHandler(self.context) return handler.handle() elif function_type == 'analyze_parameter_default': @@ -456,8 +456,8 @@ def analyze_fuction(self, function_type, opt): handler = AnalyzeMemoryHandler(self.context) return handler.handle() elif function_type == 'analyze_memory': - self.update_obcluster_nodes(config) self.set_context(function_type, 'analyze', config) + self.update_obcluster_nodes(config) handler = AnalyzeMemoryHandler(self.context) return handler.handle() else: @@ -472,8 +472,8 @@ def check(self, opts): else: try: self.stdio.print("check start ...") - self.update_obcluster_nodes(config) self.set_context('check', 'check', config) + self.update_obcluster_nodes(config) obproxy_check_handler = None observer_check_handler = None result_data = {} @@ -518,9 +518,9 @@ def rca_run(self, opts): self._call_stdio('error', 'No such custum config') return ObdiagResult(ObdiagResult.INPUT_ERROR_CODE, error_data='No such custum config') else: + self.set_context('rca_run', 'rca_run', config) if config.get_ob_cluster_config.get("db_host") is not None and config.get_ob_cluster_config.get("servers") is not None: self.update_obcluster_nodes(config) - self.set_context('rca_run', 'rca_run', config) try: handler = RCAHandler(self.context) return handler.handle() diff --git a/handler/gather/gather_tabledump.py b/handler/gather/gather_tabledump.py index 46a2626c..862b904f 100644 --- a/handler/gather/gather_tabledump.py +++ b/handler/gather/gather_tabledump.py @@ -140,21 +140,6 @@ def __get_table_info(self): return self.tenant_id = tenant_data.fetchall()[0].get("tenant_id") - database_data = self.ob_connector.execute_sql_return_cursor_dictionary( - "SELECT con_id as tenant_id, object_id as database_id, object_name as database_name FROM oceanbase.cdb_objects where OBJECT_TYPE = 'DATABASE' and con_id = '{0}' and object_name='{1}' ".format(self.tenant_id, self.database) - ) - if database_data.rowcount == 0: - self.stdio.error("database is None") - return - self.database_id = database_data.fetchall()[0].get("database_id") - table_data = self.ob_connector.execute_sql_return_cursor_dictionary( - "select /*+read_consistency(weak) */ t.table_id from oceanbase.__all_virtual_table t where t.tenant_id = '{0}' and t.database_id = '{1}' and table_name = '{2}' limit 1 ".format(self.tenant_id, self.database_id, self.table) - ) - if table_data.rowcount == 0: - self.stdio.error("table is None") - return - self.table_id = table_data.fetchall()[0].get("table_id") - ## 查询行数 query_count = "select /*+read_consistency(weak) */ table_name , ifnull(num_rows,0) as num_rows from oceanbase.cdb_tables where con_id = '{0}' and owner = '{1}' and table_name = '{2}' order by num_rows desc limit 1".format( self.tenant_id, self.database, self.table @@ -167,8 +152,25 @@ def __get_table_info(self): self.__report(query_count, columns, result) ## 查询数据量 - query_data = '''select /*+read_consistency(weak) */ t1.SVR_IP,t1.role,ifnull(t2.data_size,0) as total_data_size from (select SVR_IP,tenant_id, database_name, role, table_id, tablet_id from oceanbase.cdb_ob_table_locations) t1 left join (select tenant_id, tablet_id,data_size from oceanbase.cdb_ob_tablet_replicas) t2 on t1.tenant_id = t2.tenant_id and t1.tablet_id = t2.tablet_id where t1.tenant_id = '{0}' and t1.table_id = '{1}' order by total_data_size desc limit 1'''.format( - self.tenant_id, self.table_id + + query_data = '''select y.SVR_IP,y.DATABASE_NAME, + case when y.TABLE_TYPE = 'INDEX' then '' else y.TABLE_NAME end as TABLE_NAME, + y.TABLE_TYPE, + sum(y.DATA_SIZE) AS "DATA_SIZE(MB)",sum(y.REQUIRED_SIZE) AS "REQUIRED_SIZE(MB)" + from ( + select a.TENANT_ID, a.SVR_IP, a.TABLET_ID, b.table_id, b.DATABASE_NAME, b.TABLE_NAME, b.TABLE_TYPE, ROUND(a.data_size/1024/1024,2) AS "DATA_SIZE", ROUND(a.required_size/1024/1024,2) AS "REQUIRED_SIZE" + from oceanbase.CDB_OB_TABLET_REPLICAS a join oceanbase.cdb_ob_table_locations b on a.TABLET_ID=b.TABLET_ID and a.svr_ip=b.svr_ip and a.tenant_id=b.tenant_id + where a.TENANT_ID={tenant_id} + and b.DATABASE_NAME='{database}' + and ( + b.TABLE_NAME='{table_name}' + or b.DATA_TABLE_ID in(select table_id from oceanbase.cdb_ob_table_locations where TENANT_ID={tenant_id} and TABLE_NAME='{table_name}') + )order by b.table_id + ) y + group by y.SVR_IP,y.DATABASE_NAME,y.TABLE_TYPE + order by y.SVR_IP,y.DATABASE_NAME asc,TABLE_NAME desc + '''.format( + tenant_id=self.tenant_id, database=self.database, table_name=self.table ) columns, result = self.ob_connector.execute_sql_return_columns_and_data(query_data) @@ -199,7 +201,16 @@ def __get_table_info_v3(self): self.stdio.error("table is None") return self.table_id = table_data.fetchall()[0].get("table_id") - query_count = '''select /*+read_consistency(weak) */ m.svr_ip,m.role,m.data_size total_data_size, m.row_count as total_rows_count from oceanbase.__all_virtual_meta_table m, oceanbase.__all_virtual_table t + query_count = '''select /*+read_consistency(weak) */ + m.zone, + m.svr_ip, + t.database_name, + t.table_name, + m.role, + ROUND(m.data_size / 1024 / 1024, 2) AS "DATA_SIZE(M)", + ROUND(m.required_size / 1024 / 1024, 2) AS "REQUIRED_SIZE(M)" + m.row_count as total_rows_count + from oceanbase.__all_virtual_meta_table m, oceanbase.__all_virtual_table t where m.table_id = t.table_id and m.tenant_id = '{0}' and m.table_id = '{1}' and t.table_name = '{2}' order by total_rows_count desc limit 1'''.format( self.tenant_id, self.table_id, self.table ) From 7a0c88b3b5deb9b01d5b3cacc1720b627c2f5ea6 Mon Sep 17 00:00:00 2001 From: Teingi Date: Tue, 10 Dec 2024 16:07:04 +0800 Subject: [PATCH 5/7] Simplify the display of table information in the tabledump result. --- handler/gather/gather_tabledump.py | 11 ++++++++++- 1 file changed, 10 insertions(+), 1 deletion(-) diff --git a/handler/gather/gather_tabledump.py b/handler/gather/gather_tabledump.py index 862b904f..bffd9b80 100644 --- a/handler/gather/gather_tabledump.py +++ b/handler/gather/gather_tabledump.py @@ -126,7 +126,8 @@ def __get_table_schema(self): columns, result = self.tenant_connector.execute_sql_return_columns_and_data(sql) if result is None or len(result) == 0: self.stdio.verbose("excute sql: {0}, result is None".format(sql)) - self.__report(sql, columns, result) + else: + self.__report_simple(sql, result[0][1]) return True except Exception as e: self.stdio.error("show create table error {0}".format(e)) @@ -235,6 +236,14 @@ def __report(self, sql, column_names, data): except Exception as e: self.stdio.error("report sql result to file: {0} failed, error:{1} ".format(self.file_name, e)) + def __report_simple(self, sql, data): + try: + with open(self.file_name, 'a', encoding='utf-8') as f: + f.write('\n\n' + 'obclient > ' + sql + '\n') + f.write(data) + except Exception as e: + self.stdio.error("report sql result to file: {0} failed, error:{1} ".format(self.file_name, e)) + def __extract_string(self, s): if '@' in s: at_index = s.index('@') From 442816322eb74dfa44f828514906ec4ace5f0a4b Mon Sep 17 00:00:00 2001 From: Teingi Date: Wed, 11 Dec 2024 16:46:40 +0800 Subject: [PATCH 6/7] fixed: unit test --- .DS_Store | Bin 0 -> 6148 bytes .devcontainer.json | 2 +- .github/workflows/build_base_docker.yml | 2 +- test/common/ssh_client/test_docker_client.py | 8 ++++---- test/common/ssh_client/test_kubernetes_client.py | 8 ++++---- test/common/ssh_client/test_local_client.py | 8 ++++---- test/common/ssh_client/test_remote_client.py | 14 +++++++------- 7 files changed, 21 insertions(+), 21 deletions(-) create mode 100644 .DS_Store diff --git a/.DS_Store b/.DS_Store new file mode 100644 index 0000000000000000000000000000000000000000..5c3c6c2f9724dd4dd277ebc9126dd114ea190606 GIT binary patch literal 6148 zcmeHKJ8r`;3?&nz2#_UXMqQyd5RCK$xj;WoiUJKh7}-7bTs>MJKSKhin>#dk1gIxb zd=m5q(-aYH&)uWQMnqO{L;12{YPN4au|-A{2*()*8IIlX_S*NKlkD38<37lToa9B` zFZs4XqXJZb3Qz$mKm}%3AWQ6OJ^RT#kP1+N|E_?29}3*CCXRvr>A>JE0I)&W4Rh}$ zfW-p9nm7g`0@I)ZgR0qLXwVTanO76Xz@Up}^PzdOW{0AFJI*hjE?NUQQUNM(t-vyt z8>|0M@L&4>YZ6yffC}7|0^02MyB1H%+B$ih)!G7ohFi`z+zfN4VDNGb^m2@amE)}^ bMP9Kv_G{u8=yb%L4&={(=|ZCdf33g|P&^fT literal 0 HcmV?d00001 diff --git a/.devcontainer.json b/.devcontainer.json index 0926ae67..0b56b327 100644 --- a/.devcontainer.json +++ b/.devcontainer.json @@ -1,6 +1,6 @@ { "name": "OBDIAG", - "dockerFile": "Dockerfile.dev", + "dockerFile": "DockerFile.dev", "postAttachCommand": "bash", "customizations": { "vscode": { diff --git a/.github/workflows/build_base_docker.yml b/.github/workflows/build_base_docker.yml index 6af67340..0aa8cbe2 100644 --- a/.github/workflows/build_base_docker.yml +++ b/.github/workflows/build_base_docker.yml @@ -29,7 +29,7 @@ jobs: with: context: . platforms: linux/amd64 - file: build/DockerFile.helper + file: DockerFile.helper push: true tags: | ${{ vars.DOCKER_PUSH_BASE }}/obdiag-builder:latest diff --git a/test/common/ssh_client/test_docker_client.py b/test/common/ssh_client/test_docker_client.py index 320f2e6e..7cd8e59d 100644 --- a/test/common/ssh_client/test_docker_client.py +++ b/test/common/ssh_client/test_docker_client.py @@ -26,7 +26,7 @@ class TestDockerClient(unittest.TestCase): - @patch('common.ssh_client.docker_client.docker.from_env') + @patch('src.common.ssh_client.docker_client.docker.from_env') def setUp(self, mock_docker_from_env): """ Configures the mock Docker client and sets up test parameters in a testing environment. @@ -68,7 +68,7 @@ def setUp(self, mock_docker_from_env): # Use MagicMock to simulate the Docker client object to avoid actual Docker API calls. self.docker_client.client = MagicMock() - @patch('common.ssh_client.docker_client.docker.from_env') + @patch('src.common.ssh_client.docker_client.docker.from_env') def test_init_with_valid_node(self, mock_docker_from_env): """ Test the __init__ method with a valid node response. @@ -96,7 +96,7 @@ def test_init_with_valid_node(self, mock_docker_from_env): # Verify that the client attribute of the docker_client object is of type DockerClientSDK self.assertIsInstance(docker_client.client, DockerClientSDK) - @patch('common.ssh_client.docker_client.docker.from_env') + @patch('src.common.ssh_client.docker_client.docker.from_env') def test_init_without_container_name(self, mock_docker_from_env): """ Test the initialization of DockerClient when no container name is provided. @@ -127,7 +127,7 @@ def test_init_without_container_name(self, mock_docker_from_env): # Verify that docker_client's client attribute is of type DockerClientSDK self.assertIsInstance(docker_client.client, DockerClientSDK) - @patch('common.ssh_client.docker_client.docker.from_env') + @patch('src.common.ssh_client.docker_client.docker.from_env') def test_init_with_invalid_context(self, mock_docker_from_env): """ Test the __init__ method with an invalid context. diff --git a/test/common/ssh_client/test_kubernetes_client.py b/test/common/ssh_client/test_kubernetes_client.py index 340a0a45..c9ed81cd 100644 --- a/test/common/ssh_client/test_kubernetes_client.py +++ b/test/common/ssh_client/test_kubernetes_client.py @@ -67,7 +67,7 @@ def tearDown(self): # Remove the temporary file to avoid leaving unused data os.remove(self.temp_file.name) - @patch('common.ssh_client.kubernetes_client.config.load_incluster_config') + @patch('src.common.ssh_client.kubernetes_client.config.load_incluster_config') def test_init_with_no_config_file(self, mock_load_incluster_config): """ Test the initialization of KubernetesClient without a configuration file. @@ -92,7 +92,7 @@ def test_init_with_no_config_file(self, mock_load_incluster_config): # Check if a message indicating the use of the default configuration file in the cluster was logged. self.context.stdio.verbose.assert_called_with("KubernetesClient load_kube_config from default config file in cluster.") - @patch('common.ssh_client.kubernetes_client.config.kube_config.load_kube_config') + @patch('src.common.ssh_client.kubernetes_client.config.kube_config.load_kube_config') def test_init_with_config_file(self, mock_load_kube_config): """ Test the initialization of KubernetesClient with a configuration file. @@ -117,7 +117,7 @@ def test_init_with_config_file(self, mock_load_kube_config): # Verify that stdio.verbose was called to log the configuration file loading. self.context.stdio.verbose.assert_called_with(f"KubernetesClient load_kube_config from {FILE_DIR}") - @patch('common.ssh_client.kubernetes_client.config.load_incluster_config', side_effect=config.ConfigException) + @patch('src.common.ssh_client.kubernetes_client.config.load_incluster_config', side_effect=config.ConfigException) def test_init_raises_exception(self, mock_load_incluster_config): """ Tests whether the __init__ method correctly raises an expected exception. @@ -250,7 +250,7 @@ def mock_download_method(namespace, pod_name, container_name, file_path, local_p content = file.read() self.assertEqual(content, b"test file content") # Compare byte type data - @patch('common.ssh_client.kubernetes_client.stream') + @patch('src.common.ssh_client.kubernetes_client.stream') def test_download_file_from_pod_error(self, mock_stream): """ Test the scenario of an error occurring when downloading a file from a Pod. diff --git a/test/common/ssh_client/test_local_client.py b/test/common/ssh_client/test_local_client.py index 066a237b..e18fee07 100644 --- a/test/common/ssh_client/test_local_client.py +++ b/test/common/ssh_client/test_local_client.py @@ -194,7 +194,7 @@ def test_exec_cmd_exception(self, mock_popen): # Ensure the error log is recorded as expected self.local_client.stdio.error.assert_called_with("run cmd = [exit 1] on localhost, Exception = [Popen error]") - @patch('common.ssh_client.local_client.shutil.copy') + @patch('src.common.ssh_client.local_client.shutil.copy') def test_download_success(self, mock_copy): """ Test the successful scenario of the download command. @@ -224,7 +224,7 @@ def test_download_success(self, mock_copy): # Verify that the error message method was not called self.local_client.stdio.error.assert_not_called() - @patch('common.ssh_client.local_client.shutil.copy') + @patch('src.common.ssh_client.local_client.shutil.copy') def test_download_failure(self, mock_copy): """ Tests the failure scenario of the download command. @@ -249,7 +249,7 @@ def test_download_failure(self, mock_copy): # Verify that the error message was recorded correctly self.local_client.stdio.error.assert_called_once() - @patch('common.ssh_client.local_client.shutil.copy') + @patch('src.common.ssh_client.local_client.shutil.copy') def test_upload_success(self, mock_copy): """ Tests the successful scenario of the upload command. @@ -273,7 +273,7 @@ def test_upload_success(self, mock_copy): # Verify if error messages were not called, ensuring no errors occurred during the upload self.local_client.stdio.error.assert_not_called() - @patch('common.ssh_client.local_client.shutil.copy') + @patch('src.common.ssh_client.local_client.shutil.copy') def test_upload_failure(self, mock_copy): """ Test the upload command failure. diff --git a/test/common/ssh_client/test_remote_client.py b/test/common/ssh_client/test_remote_client.py index 058f4226..c9778b3d 100644 --- a/test/common/ssh_client/test_remote_client.py +++ b/test/common/ssh_client/test_remote_client.py @@ -49,8 +49,8 @@ def setUp(self, mock_ssh_client): self.remote_client = RemoteClient(self.context, self.node) self.remote_client._ssh_fd = mock_ssh_client_instance - @patch('common.ssh_client.remote_client.paramiko.SSHClient') - @patch('common.ssh_client.remote_client.paramiko.client.AutoAddPolicy') + @patch('src.common.ssh_client.remote_client.paramiko.SSHClient') + @patch('src.common.ssh_client.remote_client.paramiko.client.AutoAddPolicy') def test_init_with_key_file(self, mock_auto_add_policy, mock_ssh_client): """ Test that the key file path is correctly expanded during initialization. @@ -77,8 +77,8 @@ def test_init_with_key_file(self, mock_auto_add_policy, mock_ssh_client): # Verify auto_add_policy was called during the SSHClient initialization. mock_auto_add_policy.assert_called_once() - @patch('common.ssh_client.remote_client.paramiko.SSHClient') - @patch('common.ssh_client.remote_client.paramiko.client.AutoAddPolicy') + @patch('src.common.ssh_client.remote_client.paramiko.SSHClient') + @patch('src.common.ssh_client.remote_client.paramiko.client.AutoAddPolicy') def test_init_without_key_file(self, mock_auto_add_policy, mock_ssh_client): """ Tests initialization without a key file. @@ -107,8 +107,8 @@ def test_init_without_key_file(self, mock_auto_add_policy, mock_ssh_client): # Verify that auto add policy was called to handle connection policies. mock_auto_add_policy.assert_called_once() - @patch('common.ssh_client.remote_client.paramiko.SSHClient') - @patch('common.ssh_client.remote_client.paramiko.client.AutoAddPolicy') + @patch('src.common.ssh_client.remote_client.paramiko.SSHClient') + @patch('src.common.ssh_client.remote_client.paramiko.client.AutoAddPolicy') def test_init_stores_expected_attributes(self, mock_auto_add_policy, mock_ssh_client): """ Test that initialization stores the expected attributes. @@ -325,7 +325,7 @@ def test_progress_bar_complete(self, mock_stdout): self.assertIn(expected_output, mock_stdout.getvalue()) self.assertIn('\r\n', mock_stdout.getvalue()) - @patch('common.ssh_client.remote_client.paramiko') + @patch('src.common.ssh_client.remote_client.paramiko') def test_upload(self, mock_paramiko): """ Set up the SSH transport object and SFTP client object. From 93409949705372df2dd3cebce0e383a528aaebe9 Mon Sep 17 00:00:00 2001 From: Teingi Date: Wed, 11 Dec 2024 17:03:00 +0800 Subject: [PATCH 7/7] fixed: unit test --- test/common/test_config_helper.py | 12 ++++++------ test/common/test_scene.py | 10 +++++----- 2 files changed, 11 insertions(+), 11 deletions(-) diff --git a/test/common/test_config_helper.py b/test/common/test_config_helper.py index d5e7a4ff..afc487a3 100644 --- a/test/common/test_config_helper.py +++ b/test/common/test_config_helper.py @@ -21,10 +21,10 @@ class TestConfigHelper(unittest.TestCase): - @mock.patch('common.config_helper.YamlUtils.write_yaml_data') - @mock.patch('common.config_helper.DirectoryUtil.mkdir') - @mock.patch('common.config_helper.os.path.expanduser') - @mock.patch('common.config_helper.TimeUtils.timestamp_to_filename_time') + @mock.patch('src.common.config_helper.YamlUtils.write_yaml_data') + @mock.patch('src.common.config_helper.DirectoryUtil.mkdir') + @mock.patch('src.common.config_helper.os.path.expanduser') + @mock.patch('src.common.config_helper.TimeUtils.timestamp_to_filename_time') def test_save_old_configuration(self, mock_timestamp_to_filename_time, mock_expanduser, mock_mkdir, mock_write_yaml_data): # 模拟时间戳生成函数,返回一个特定的值 mock_timestamp_to_filename_time.return_value = '20240806_123456' @@ -93,7 +93,7 @@ def test_input_with_default(self, mock_input): self.assertEqual(result, 'custom_user') # 测试带有默认值的密码输入方法 - @mock.patch('common.config_helper.pwinput.pwinput') + @mock.patch('src.common.config_helper.pwinput.pwinput') def test_input_password_with_default(self, mock_pwinput): # 创建一个模拟的上下文对象 context = mock.MagicMock() @@ -120,7 +120,7 @@ def test_input_password_with_default(self, mock_pwinput): self.assertEqual(result, "custom_password") # 测试带有默认选项的选择输入方法 - @mock.patch('common.config_helper.input') + @mock.patch('src.common.config_helper.input') def test_input_choice_default(self, mock_input): # 创建一个模拟的上下文对象 context = mock.MagicMock() diff --git a/test/common/test_scene.py b/test/common/test_scene.py index ec8def50..01b1bc30 100644 --- a/test/common/test_scene.py +++ b/test/common/test_scene.py @@ -100,21 +100,21 @@ def test_wildcard_max_version(self): result = filter_by_version(scene, cluster, self.stdio) self.assertEqual(result, 0) - @patch('common.scene.get_observer_version') + @patch('src.common.scene.get_observer_version') def test_get_observer_version(self, mock_get_observer_version): mock_get_observer_version.return_value = "1.0.0" result = get_version_by_type(self.context, "observer") self.assertEqual(result, "1.0.0") mock_get_observer_version.assert_called_once_with(self.context) - @patch('common.scene.get_observer_version') + @patch('src.common.scene.get_observer_version') def test_get_other_version(self, mock_get_observer_version): mock_get_observer_version.return_value = "2.0.0" result = get_version_by_type(self.context, "other") self.assertEqual(result, "2.0.0") mock_get_observer_version.assert_called_once_with(self.context) - @patch('common.scene.get_observer_version') + @patch('src.common.scene.get_observer_version') def test_get_observer_version_fail(self, mock_get_observer_version): mock_get_observer_version.side_effect = Exception("Observer error") with self.assertRaises(Exception) as context: @@ -122,7 +122,7 @@ def test_get_observer_version_fail(self, mock_get_observer_version): self.assertIn("can't get observer version", str(context.exception)) self.context.stdio.warn.assert_called_once() - @patch('common.scene.get_obproxy_version') + @patch('src.common.scene.get_obproxy_version') def test_get_obproxy_version(self, mock_get_obproxy_version): mock_get_obproxy_version.return_value = "3.0.0" result = get_version_by_type(self.context, "obproxy") @@ -134,7 +134,7 @@ def test_unsupported_type(self): get_version_by_type(self.context, "unsupported") self.assertIn("No support to get the version", str(context.exception)) - @patch('common.scene.get_observer_version') + @patch('src.common.scene.get_observer_version') def test_general_exception_handling(self, mock_get_observer_version): mock_get_observer_version.side_effect = Exception("Unexpected error") with self.assertRaises(Exception) as context: