From 38ffaf9ebfc9aba45c42d9af22c0f036e61dae20 Mon Sep 17 00:00:00 2001 From: barryyosi-panw <158817412+barryyosi-panw@users.noreply.github.com> Date: Thu, 2 Jan 2025 09:04:55 +0200 Subject: [PATCH] Ciac 11199/support new validate in validate content script (#37182) * test dockerimage * test * working new validate * working new validate with git * working * fixes * working validate * working validate * set DEMISTO_SDK_OFFLINE_ENV=false * message * read precommit results from file * working with semi-parsing * Working version - not tests * output format fix * unittests * unittests * unittests * pre-commit * redundant changes * RN * pack version * unit test fix * unit test fix * committer * new docker * updated docker image * Rename 1_40_0.md to 1_39_5.md * Update ValidateContent_test.py * fix unit tests * Bump pack from version Base to 1.39.6. * Fix unit test * fix unit tests * fix unit tests * Update README.md * PR fixes * PR fixes * TPB fix * TPB fix * TPB fix * TPB remove * no tests --------- Co-authored-by: Content Bot --- Packs/Base/ReleaseNotes/1_39_7.md | 5 + Packs/Base/Scripts/ValidateContent/README.md | 22 +- .../ValidateContent/ValidateContent.py | 885 +++++++++++++----- .../ValidateContent/ValidateContent.yml | 11 +- .../ValidateContent/ValidateContent_test.py | 275 ++++-- .../TestPlaybooks/ValidateContent_-_Test.yml | 328 ------- Packs/Base/pack_metadata.json | 2 +- 7 files changed, 844 insertions(+), 684 deletions(-) create mode 100644 Packs/Base/ReleaseNotes/1_39_7.md delete mode 100644 Packs/Base/TestPlaybooks/ValidateContent_-_Test.yml diff --git a/Packs/Base/ReleaseNotes/1_39_7.md b/Packs/Base/ReleaseNotes/1_39_7.md new file mode 100644 index 000000000000..1ef2e37caec6 --- /dev/null +++ b/Packs/Base/ReleaseNotes/1_39_7.md @@ -0,0 +1,5 @@ + +#### Scripts + +##### ValidateContent +Updated the script to utilize up-to-date validation logic from `demisto-sdk`. diff --git a/Packs/Base/Scripts/ValidateContent/README.md b/Packs/Base/Scripts/ValidateContent/README.md index d4026493ac6a..478ea12d05ad 100644 --- a/Packs/Base/Scripts/ValidateContent/README.md +++ b/Packs/Base/Scripts/ValidateContent/README.md @@ -25,13 +25,12 @@ This automation script is used as part of the content validation that runs as pa ## Outputs --- - | **Path** | **Description** | **Type** | -| --- | --- | --- | +| --- | -- | --- | | ValidationResult.Name| Name of validated item. | String | | ValidationResult.Error | The validation error message. | String | | ValidationResult.Line | The code line number in which the error was found in the lint. | String | - +| ValidationResult.ErrorCode | The error code or the name of the linter that identified the issue. | String | ## Script Example @@ -40,16 +39,17 @@ This automation script is used as part of the content validation that runs as pa ## Context Example ``` -{ "ValidationResult": [ { "Name": "MyScript", - "Error": "The docker image tag is not the latest numeric tag, please update it." + "Error": "{unterminated string literal (detected at line 166) [syntax]" + "Line": 165, + "Error Code/Linter": "mypy" }, { "Name": "MyScript", - "Error": "test for membership should be 'not in'", - "Line": "44" + "Error": "The following commands contain duplicated arguments:Command example-my-command, contains multiple appearances of the following arguments message.Please make sure to remove the duplications.", + "Error Code/Linter": "IN113" } ] } @@ -59,7 +59,7 @@ This automation script is used as part of the content validation that runs as pa ### Validation Results -|Name|Error|Line| -|---|---|---| -| MyScript | The docker image tag is not the latest numeric tag, please update it. | | -| MyScript | test for membership should be 'not in' | 44 | +| Name | Error | Line | Error Code/Linter | +|----------|-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|------|--------------------| +| MyScript | The following commands contain duplicated arguments: Command example-my-command, contains multiple appearances of the following arguments message. Please make sure to remove the duplications. | | IN113 | +| MyScript | unterminated string literal (detected at line 166) [syntax] | 166 | mypy | diff --git a/Packs/Base/Scripts/ValidateContent/ValidateContent.py b/Packs/Base/Scripts/ValidateContent/ValidateContent.py index d9c08ba8d0fc..bcc1cac2001e 100644 --- a/Packs/Base/Scripts/ValidateContent/ValidateContent.py +++ b/Packs/Base/Scripts/ValidateContent/ValidateContent.py @@ -1,39 +1,221 @@ -import demistomock as demisto # noqa: F401 -from CommonServerPython import * # noqa: F401 -import io -import json +import shutil import traceback import types -import zipfile -from base64 import b64decode -from contextlib import redirect_stderr -from datetime import datetime -from pathlib import Path -from shutil import copy -from tempfile import TemporaryDirectory, TemporaryFile -from typing import Any +from datetime import datetime, timedelta -import git -from demisto_sdk.commands.common.constants import ENTITY_TYPE_TO_DIR, TYPE_TO_EXTENSION, FileType -from demisto_sdk.commands.common.content import Content -from demisto_sdk.commands.common.logger import logging_setup, DEFAULT_CONSOLE_THRESHOLD -from demisto_sdk.commands.common.tools import find_type -from demisto_sdk.commands.init.contribution_converter import ( - AUTOMATION, INTEGRATION, INTEGRATIONS_DIR, SCRIPT, SCRIPTS_DIR, - ContributionConverter, get_child_directories, get_child_files) -from demisto_sdk.commands.lint.lint_manager import LintManager -from demisto_sdk.commands.split.ymlsplitter import YmlSplitter -from demisto_sdk.commands.validate.old_validate_manager import OldValidateManager as ValidateManager +import requests from ruamel.yaml import YAML +from concurrent.futures import ThreadPoolExecutor, as_completed +from demisto_sdk.commands.common.constants import ENTITY_TYPE_TO_DIR, FileType +from demisto_sdk.commands.split.ymlsplitter import YmlSplitter +from demisto_sdk.commands.common.tools import find_type +from demisto_sdk.commands.common.logger import DEFAULT_CONSOLE_THRESHOLD, logging_setup +from dataclasses import dataclass, asdict +from shutil import copy +from pathlib import Path +from pkg_resources import get_distribution +from base64 import b64decode +from contextlib import contextmanager, redirect_stderr +import zipfile +import git +import io +import os +import re +import json +import demistomock as demisto # noqa: F401 +from CommonServerPython import * # noqa: F401 +DEFAULT_CONFIG_CATEGORY = "xsoar_best_practices_path_based_validations" +CONTENT_DIR_PATH = '/tmp/content' +PACKS_DIR_NAME = 'Packs' +CONTENT_REPO_URL = 'https://github.com/demisto/content.git' CACHED_MODULES_DIR = '/tmp/cached_modules' -yaml = YAML() +PRE_COMMIT_TEMPLATE_PATH = os.path.join(CONTENT_DIR_PATH, '.pre-commit-config_template.yaml') +BRANCH_MASTER = 'master' +DEFAULT_ERROR_PATTERN = { + 'regex': re.compile(r'(\/[\w\/\.-]+):(\d+):(\d+): .* : (.*)'), + 'groups': ['file', 'line', 'column', 'details'] +} + +HOOK_ID_TO_PATTERN = { + 'xsoar-lint': DEFAULT_ERROR_PATTERN, + 'debug-statements': { + 'regex': re.compile(r'File\s+"(.+)",\s+line\s+(\d+)(?:.*?\n.*?\^+\n)\s+([^\n]+)'), + 'groups': ['file', 'line', 'details'] + }, + 'check-ast': { + 'regex': re.compile(r'File\s"(Packs/.*?)",\sline\s(\d+)'), + 'groups': ['file', 'line'] + }, + 'mypy': { + 'regex': re.compile(r'(.*?\.py):(\d+): error: ([\s\S]*?)(?=\nPacks\/|\nFound \d+ error)'), + 'groups': ['file', 'line', 'details'] + }, +} +FILE_TYPE_TO_ERROR_TYPE = {'py': 'Code', 'ps1': 'Code', 'yml': 'Settings', 'json': 'Settings', 'md': 'Settings'} +ALLOWED_FILE_TYPES = ['py', 'yml', 'yaml', 'json', 'ps1', 'zip'] +SKIPPED_HOOKS = [ + 'validate-deleted-files', + 'pwsh-test-in-docker', + 'pwsh-analyze-in-docker', + 'coverage-pytest-analyze', + 'merge-pytest-reports', + 'format', + 'validate', + 'validate-content-paths', + 'validate-conf-json', + 'check-merge-conflict', + 'name-tests-test', + 'check-added-large-files', + 'check-case-conflict', + 'poetry-check', + 'autopep8', + 'pycln', + 'ruff', + 'xsoar-lint', + 'check-yaml', + 'check-json', +] + + +class FormattedResultFields: + NAME = 'Name' + ERROR = 'Error' + LINE = 'Line' + ERROR_CODE_OR_LINTER = 'Error Code/Linter' + + +@dataclass +class ValidationResult: + filePath: str = '' + fileType: str = '' + errorCode: str = '' + errorType: str = '' + message: str = '' + name: str = '' + linter: str = '' + severity: str = 'error' + entityType: str = '' + col: int = 0 + row: int = 0 + relatedField: str = '' + ui: bool = True + + def to_dict(self): + return asdict(self) + + +@contextmanager +def ConstantTemporaryDirectory(path): + """ Creates a temporary directory with a constant name. """ + + def cleanup(): + # Cleanup: Remove the directory if exists. + if os.path.exists(path): + shutil.rmtree(path) + demisto.debug(f"Temporary directory {path} cleaned up.") + + cleanup() + os.makedirs(path, exist_ok=True) + yield path + + +def log_demisto_sdk_version(): + try: + demisto.debug(f'Using demisto-sdk version {get_distribution("demisto-sdk").version}') + except Exception as e: + demisto.debug(f'Could not get demisto-sdk version. Error: {e}') + + +def setup_proxy(_args: dict): + if _args.get('use_system_proxy') == 'no': + del os.environ['HTTP_PROXY'] + del os.environ['HTTPS_PROXY'] + del os.environ['http_proxy'] + del os.environ['https_proxy'] + + +def strip_ansi_codes(text): + ansi_escape = re.compile(r''' + \x1B # ESC + \[ # [ + [0-?]* # Parameter bytes + [ -/]* # Intermediate bytes + [@-~] # Final byte + ''', re.VERBOSE) + return ansi_escape.sub('', text) + + +def extract_hook_id(output: str) -> str: + """ + Extracts the hook id from a pre-commit hook's output. + + Args: + output (str): The raw output string. + + Returns: + str: The extracted hook id or None if not found. + """ + pattern = r"- hook id:\s+([\w-]+)" + match = re.search(pattern, output) + return match.group(1) if match else '' + + +def parse_pre_commit_output(output: str, pattern_obj: dict) -> list[dict]: + """ + Extracts information from pre-commit hook's output lines based on a given pattern. + + Args: + output (str): Hook's output string to be processed. + pattern_obj (str): The regular expression pattern object, including match groups to match against each line. + + Returns: + dict: Extracted information for a single matching line. + """ + results = [] + demisto.debug(f'parse_pre_commit_output got {pattern_obj}') + regex = pattern_obj['regex'] + group_names = pattern_obj['groups'] + + for match in re.finditer(regex, output): + match_data = match.groups() + # Build the result dictionary based on the group names + result = {} + result.update({name: match_data[i] for i, name in enumerate(group_names) if i < len(match_data)}) + if result not in results: + results.append(result) + + return results + + +def get_skipped_hooks(): + return SKIPPED_HOOKS + + +def resolve_entity_type(file_path: str): + """ Resolve entity type from file path. """ + parts = file_path.split("/") + if parts[0] == "Packs" and len(parts) > 2 and (entity_type_directory_name := parts[2].lower()): + entity_type = entity_type_directory_name[:-1] if entity_type_directory_name.endswith('s') else entity_type_directory_name + return entity_type + entity_type = "contentpack" + return entity_type + + +def get_pack_name(zip_filepath: str) -> str: + """ Returns the pack name from the zipped contribution file's metadata.json file. """ + with zipfile.ZipFile(zip_filepath) as zipped_contrib, zipped_contrib.open('metadata.json') as metadata_file: + metadata = json.loads(metadata_file.read()) + if pack_name := metadata.get('name'): + return pack_name + demisto.error('Could not find pack name in metadata.json') + return 'TmpPack' def _create_pack_base_files(self): """ - Create empty 'README.md', '.secrets-ignore', and '.pack-ignore' files that are expected + Creates empty 'README.md', '.secrets-ignore', and '.pack-ignore' files that are expected to be in the base directory of a pack """ fp = open(os.path.join(self.pack_dir_path, 'README.md'), 'a') @@ -46,20 +228,11 @@ def _create_pack_base_files(self): fp.close() -def get_extracted_code_filepath(extractor: YmlSplitter) -> str: - output_path = extractor.get_output_path() - base_name = os.path.basename(output_path) if not extractor.base_name else extractor.base_name - code_file = f'{output_path}/{base_name}' - script = extractor.yml_data['script'] - lang_type: str = script['type'] if extractor.file_type == 'integration' else extractor.yml_data['type'] - code_file = f'{code_file}{TYPE_TO_EXTENSION[lang_type]}' - return code_file - - def content_item_to_package_format( self, content_item_dir: str, del_unified: bool = True, source_mapping: dict | None = None, # noqa: F841 - code_fp_to_row_offset: dict = {} ) -> None: + from demisto_sdk.commands.init.contribution_converter import AUTOMATION, INTEGRATION, SCRIPT, get_child_files + child_files = get_child_files(content_item_dir) for child_file in child_files: cf_name_lower = os.path.basename(child_file).lower() @@ -69,11 +242,11 @@ def content_item_to_package_format( file_type = file_type.value if file_type else file_type try: extractor = YmlSplitter( - input=content_item_file_path, file_type=file_type, output=content_item_dir, no_logging=True, - no_pipenv=True, no_basic_fmt=True) + input=content_item_file_path, + output=content_item_dir, + file_type=file_type, + ) extractor.extract_to_package_format() - code_fp = get_extracted_code_filepath(extractor) - code_fp_to_row_offset[code_fp] = extractor.lines_inserted_at_code_start except Exception as e: err_msg = f'Error occurred while trying to split the unified YAML "{content_item_file_path}" ' \ f'into its component parts.\nError: "{e}"' @@ -82,150 +255,84 @@ def content_item_to_package_format( os.remove(content_item_file_path) -def convert_contribution_to_pack(contrib_converter: ContributionConverter) -> dict: - """Create or updates a pack in the content repo from the contents of a contribution zipfile - +def convert_contribution_to_pack(contrib_converter): + """ + Creates or updates a pack in the Content repo from the contents of a contributed zip file. Args: contrib_converter (ContributionConverter): Contribution contributor object """ - # only create pack_metadata.json and base pack files if creating a new pack + from demisto_sdk.commands.init.contribution_converter import INTEGRATIONS_DIR, SCRIPTS_DIR, get_child_directories + + # Only create pack_metadata.json and base pack files if creating a new pack. if contrib_converter.create_new: if contrib_converter.contribution: - # create pack metadata file + # Create pack metadata file. with (zipfile.ZipFile(contrib_converter.contribution) as zipped_contrib, zipped_contrib.open('metadata.json') as metadata_file): metadata = json.loads(metadata_file.read()) + demisto.debug(f'convert_contribution_to_pack {metadata=}') contrib_converter.create_metadata_file(metadata) - # create base files + + # Create base files. contrib_converter.create_pack_base_files = types.MethodType(_create_pack_base_files, contrib_converter) contrib_converter.create_pack_base_files() - # unpack + + # Unpack. contrib_converter.unpack_contribution_to_dst_pack_directory() - # convert + + # Convert. unpacked_contribution_dirs = get_child_directories(contrib_converter.pack_dir_path) for unpacked_contribution_dir in unpacked_contribution_dirs: contrib_converter.convert_contribution_dir_to_pack_contents(unpacked_contribution_dir) - # extract to package format - code_fp_to_row_offset: dict[str, int] = {} + + # Extract to package format. for pack_subdir in get_child_directories(contrib_converter.pack_dir_path): basename = os.path.basename(pack_subdir) if basename in {SCRIPTS_DIR, INTEGRATIONS_DIR}: - contrib_converter.content_item_to_package_format = types.MethodType(content_item_to_package_format, - contrib_converter) - contrib_converter.content_item_to_package_format( - pack_subdir, del_unified=True, source_mapping=None, code_fp_to_row_offset=code_fp_to_row_offset + contrib_converter.content_item_to_package_format = types.MethodType( + content_item_to_package_format, contrib_converter ) - return code_fp_to_row_offset - - -def get_pack_name(zip_fp: str) -> str: - """returns the pack name from the zipped contribution file's metadata.json file""" - with zipfile.ZipFile(zip_fp) as zipped_contrib, zipped_contrib.open('metadata.json') as metadata_file: - metadata = json.loads(metadata_file.read()) - return metadata.get('name', 'ServerSidePackValidationDefaultName') - -def adjust_linter_row_and_col( - error_output: dict, code_fp_to_row_offset: dict | None = None, - row_offset: int = 2, row_start: int = 1, col_offset: int = 1, col_start: int = 0 -) -> None: - """Update the linter errors row and column numbering - - Accounts for lines inserted during demisto-sdk extract, and that row numbering starts with one. We - take the max between the adjusted vector number and the vector start because the lowest the adjusted - vector number should be is its associated vector start number. e.g. the adjusted column number should - never be less than the column start number aka zero - so if the adjusted column number is -1, we set - it to the column start number instead, aka zero. + contrib_converter.content_item_to_package_format( + pack_subdir, del_unified=False, source_mapping=None, + ) - Args: - error_output (Dict): A single validation result dictionary (validate and lint) from the total list - code_fp_to_row_offset (Optional[Dict]): Mapping of file paths to the row offset for that code file - row_offset (int): The number of rows to adjust by - row_start (int): The lowest allowable number for rows - col_offset (int): The number of columns to adjust by - col_start (int): The lowest allowable number for columns - """ - row, col = 'row', 'col' - vector_details = [ - (row, row_offset, row_start), - (col, col_offset, col_start) - ] - try: - for vector, offset, start in vector_details: - if vector in error_output: - # grab and set the row offset from the file to row offset mapping if it exists and we are - # operating on 'row' - if code_fp_to_row_offset and vector == row: - filepath = error_output.get('filePath', '') - if filepath in code_fp_to_row_offset: - offset_for_file = code_fp_to_row_offset.get(filepath) - if isinstance(offset_for_file, int): - offset = offset_for_file - original_vector_value: Any | None = error_output.get(vector) - if original_vector_value: - error_output[vector] = str(max(int(original_vector_value) - offset, start)) - except ValueError as e: - demisto.debug(f'Failed adjusting "{vector}" on validation result {error_output}' - f'\n{e}') - - -def run_validate(file_path: str, json_output_file: str) -> None: - os.environ['DEMISTO_SDK_SKIP_VERSION_CHECK'] = '1' - tests_dir = 'Tests' - if not os.path.exists(tests_dir): - os.makedirs(tests_dir) - with open(f'{tests_dir}/id_set.json', 'w') as f: - json.dump({}, f) - v_manager = ValidateManager( - is_backward_check=False, prev_ver="origin/master", use_git=False, only_committed_files=False, - print_ignored_files=False, skip_conf_json=True, validate_id_set=False, file_path=str(file_path), - validate_all=False, is_external_repo=False, skip_pack_rn_validation=False, print_ignored_errors=False, - silence_init_prints=False, no_docker_checks=False, skip_dependencies=False, id_set_path=None, - staged=False, json_file_path=json_output_file, skip_schema_check=True, create_id_set=False, check_is_unskipped=False) - v_manager.run_validation() - - -def run_lint(file_path: str, json_output_file: str) -> None: - lint_log_dir = os.path.dirname(json_output_file) - lint_manager = LintManager( - input=str(file_path), git=False, all_packs=False, - prev_ver='origin/master', json_file_path=json_output_file - ) - lint_manager.run( - parallel=1, no_flake8=False, no_xsoar_linter=False, no_bandit=False, no_mypy=False, - no_pylint=True, no_coverage=True, coverage_report='', no_vulture=False, no_test=True, no_pwsh_analyze=True, - no_pwsh_test=True, keep_container=False, test_xml='', failure_report=lint_log_dir, docker_timeout=60, - docker_image_flag=None, docker_image_target=None - ) +def prepare_content_pack_for_validation(filename: str, data: bytes, content_dir_path: str) -> str: + from demisto_sdk.commands.init.contribution_converter import ContributionConverter -def prepare_content_pack_for_validation(filename: str, data: bytes, tmp_directory: str) -> tuple[str, dict]: - # write zip file data to file system - zip_path = os.path.abspath(os.path.join(tmp_directory, filename)) + # Write zip file data to file system. + zip_path = os.path.abspath(os.path.join(content_dir_path, filename)) with open(zip_path, 'wb') as fp: fp.write(data) pack_name = get_pack_name(zip_path) - contrib_converter = ContributionConverter(name=pack_name, contribution=zip_path, base_dir=tmp_directory) - code_fp_to_row_offset = convert_contribution_to_pack(contrib_converter) - # Call the standalone function and get the raw response + contrib_converter = ContributionConverter(name=pack_name, contribution=zip_path, base_dir=content_dir_path) + convert_contribution_to_pack(contrib_converter) + os.remove(zip_path) - return contrib_converter.pack_dir_path, code_fp_to_row_offset + return contrib_converter.pack_dir_path -def prepare_single_content_item_for_validation(filename: str, data: bytes, tmp_directory: str) -> tuple[str, dict]: - content = Content(tmp_directory) +def prepare_single_content_item_for_validation(file_name: str, data: bytes, packs_path: str) -> str: + from demisto_sdk.commands.init.contribution_converter import ContributionConverter + pack_name = 'TmpPack' - pack_dir = content.path / 'Packs' / pack_name + pack_path = os.path.join(packs_path, pack_name) + demisto.debug(f'Pack name: {pack_name}') # create pack_metadata.json file in TmpPack - contrib_converter = ContributionConverter(name=pack_name, base_dir=tmp_directory, pack_dir_name=pack_name, - contribution=pack_name) + contrib_converter = ContributionConverter( + name=pack_name, pack_dir_name=pack_name, contribution=pack_name + ) contrib_converter.create_metadata_file({'description': 'Temporary Pack', 'author': 'xsoar'}) - prefix = '-'.join(filename.split('-')[:-1]) - containing_dir = pack_dir / ENTITY_TYPE_TO_DIR.get(prefix, 'Integrations') - containing_dir.mkdir(exist_ok=True) - is_json = filename.casefold().endswith('.json') + # Determine entity type by filename prefix. + file_name_prefix = '-'.join(file_name.split('-')[:-1]) + containing_dir = os.path.join(pack_path, ENTITY_TYPE_TO_DIR.get(file_name_prefix, 'Integrations')) + os.makedirs(containing_dir, exist_ok=True) + + is_json = file_name.casefold().endswith('.json') data_as_string = data.decode() + yaml = YAML() loaded_data = json.loads(data_as_string) if is_json else yaml.load(data_as_string) if is_json: data_as_string = json.dumps(loaded_data) @@ -233,79 +340,67 @@ def prepare_single_content_item_for_validation(filename: str, data: bytes, tmp_d buff = io.StringIO() yaml.dump(loaded_data, buff) data_as_string = buff.getvalue() - # write content item file to file system - file_path = containing_dir / filename + + # Write content item file to file system. + file_path = Path(os.path.join(containing_dir, file_name)) file_path.write_text(data_as_string) file_type = find_type(str(file_path)) file_type = file_type.value if file_type else file_type if is_json or file_type in (FileType.PLAYBOOK.value, FileType.TEST_PLAYBOOK.value): - return str(file_path), {} + return str(file_path) extractor = YmlSplitter( - input=str(file_path), file_type=file_type, output=containing_dir, - no_logging=True, no_pipenv=True, no_basic_fmt=True + input=str(file_path), file_type=file_type, output=containing_dir ) - # validate the resulting package files, ergo set path_to_validate to the package directory that results + # Validate the resulting package files, ergo set path_to_validate to the package directory that results # from extracting the unified yaml to a package format extractor.extract_to_package_format() - code_fp_to_row_offset = {get_extracted_code_filepath(extractor): extractor.lines_inserted_at_code_start} - return extractor.get_output_path(), code_fp_to_row_offset + output_path = extractor.get_output_path() + demisto.debug(f'prepare_single_content_item_for_validation {output_path=}') + return output_path -def validate_content(filename: str, data: bytes, tmp_directory: str) -> list: - json_output_path = os.path.join(tmp_directory, 'validation_res.json') - lint_output_path = os.path.join(tmp_directory, 'lint_res.json') - output_capture = io.StringIO() - with redirect_stderr(output_capture), TemporaryFile(mode='w+') as tmp: - # Setup Demisto SDK's logging. - logging_setup( - calling_function='ValidateContent', - console_threshold='DEBUG' if is_debug_mode() else DEFAULT_CONSOLE_THRESHOLD, - propagate=True - ) - demisto.debug("Finished setting logger.") +def run_validate(path_to_validate: str, json_output_file: str) -> int: + """ + Runs demisto-sdk validations on a specified file path and writes the results to a JSON file. + Args: + path_to_validate (str): The path of the file or directory to be validated. + json_output_file (str): The file path where validation results will be written in JSON format. + + Returns: + int: An exit code indicating the validation status; 0 for success and non-zero for failures. + + """ + from demisto_sdk.commands.validate.config_reader import ConfigReader + from demisto_sdk.commands.validate.initializer import Initializer + from demisto_sdk.commands.validate.validation_results import ResultWriter + from demisto_sdk.commands.validate.validate_manager import ValidateManager + from demisto_sdk.commands.common.constants import ExecutionMode + + result_writer = ResultWriter(json_output_file) + config_reader = ConfigReader(category=DEFAULT_CONFIG_CATEGORY) + initializer = Initializer( + staged=False, + committed_only=False, + file_path=str(path_to_validate), + execution_mode=ExecutionMode.SPECIFIC_FILES + ) + validate_manager = ValidateManager(result_writer, config_reader, initializer, allow_autofix=False) + demisto.debug(f'run_validate validate_manager initialized. Running validations: {validate_manager.validators=}') + err_file = io.StringIO() + with redirect_stderr(err_file): + exit_code: int = validate_manager.run_validations() + demisto.debug(f'run_validate {exit_code=}') + return exit_code - if filename.endswith('.zip'): - path_to_validate, code_fp_to_row_offset = prepare_content_pack_for_validation( - filename, data, tmp_directory - ) - else: - path_to_validate, code_fp_to_row_offset = prepare_single_content_item_for_validation( - filename, data, tmp_directory - ) - run_validate(path_to_validate, json_output_path) - run_lint(path_to_validate, lint_output_path) - - demisto.debug("log capture:" + tmp.read()) - - all_outputs = [] - with open(json_output_path) as json_outputs: - outputs_as_json = json.load(json_outputs) - if outputs_as_json: - if type(outputs_as_json) is list: - all_outputs.extend(outputs_as_json) - else: - all_outputs.append(outputs_as_json) - - with open(lint_output_path) as json_outputs: - outputs_as_json = json.load(json_outputs) - if outputs_as_json: - if type(outputs_as_json) is list: - for validation in outputs_as_json: - adjust_linter_row_and_col(validation, code_fp_to_row_offset) - all_outputs.extend(outputs_as_json) - else: - all_outputs.append(outputs_as_json) - return all_outputs - - -def get_content_modules(content_tmp_dir: str, verify_ssl: bool = True) -> None: - """Copies the required content modules for linting from the cached dir - The cached dir is updated once a day + +def get_content_modules(content_path: str, verify_ssl: bool = True) -> None: + """ + Copies the required content modules for validation and pre-commit from the cached dir. The cached dir is updated once a day Args: - content_tmp_dir (str): The content tmp dir to copy the content modules to - verify_ssl (bool): Whether to verify SSL + content_path (str): Path to Content directory + verify_ssl (bool): Whether to verify SSL """ modules = [ { @@ -359,11 +454,22 @@ def get_content_modules(content_tmp_dir: str, verify_ssl: bool = True) -> None: 'Config/approved_categories.json', 'content_path': 'Config', }, + { + 'file': '.pre-commit-config_template.yaml', + 'github_url': 'https://raw.githubusercontent.com/demisto/content/master/.pre-commit-config_template.yaml', + 'content_path': '', + }, + { + 'file': '.pre-commit-config_template.yaml', + 'github_url': 'https://raw.githubusercontent.com/demisto/content/master/.pre-commit-config_template.yaml', + 'content_path': '', + } ] for module in modules: - content_path = os.path.join(content_tmp_dir, module['content_path']) - os.makedirs(content_path, exist_ok=True) + demisto.debug(f'get_content_modules getting {module["file"]=}') + module_path = os.path.join(content_path, module['content_path']) + os.makedirs(Path(module_path), exist_ok=True) try: cached_module_path = os.path.join(CACHED_MODULES_DIR, module['file']) fname = Path(cached_module_path) @@ -375,80 +481,353 @@ def get_content_modules(content_tmp_dir: str, verify_ssl: bool = True) -> None: res.raise_for_status() with open(cached_module_path, 'wb') as f: f.write(res.content) - demisto.debug(f'Copying from {cached_module_path} to {content_path}') - copy(cached_module_path, content_path) + demisto.debug(f'Copying from {cached_module_path} to {module_path}') + copy(cached_module_path, module_path) except Exception as e: fallback_path = f'/home/demisto/{module["file"]}' demisto.debug(f'Failed downloading content module {module["github_url"]} - {e}. ' f'Copying from {fallback_path}') - copy(fallback_path, content_path) + copy(fallback_path, module_path) + + +def run_pre_commit(output_path: Path) -> int: + """ + Runs demisto-sdk pre-commit. + Args: + output_path (str): The file path where validation results will be written in JSON format. + + Returns: + int: An exit code indicating the validation status; 0 for success and non-zero for failures. + + """ + from demisto_sdk.commands.pre_commit.pre_commit_command import pre_commit_manager + os.environ['DEMISTO_SDK_DISABLE_MULTIPROCESSING'] = 'true' + demisto.debug(f'run_pre_commit | {get_skipped_hooks()=} | {PRE_COMMIT_TEMPLATE_PATH=} | {output_path=}') + exit_code = pre_commit_manager( + skip_hooks=get_skipped_hooks(), + all_files=True, + run_docker_hooks=False, + pre_commit_template_path=Path(PRE_COMMIT_TEMPLATE_PATH), + json_output_path=output_path + ) + demisto.debug(f'run_pre_commit {exit_code=}') + return exit_code + + +def read_json_results(json_path: Path, results: list = None) -> list: + """ + Process JSON results file and append items to results list. + + Args: + json_path: JSON file path + results: Existing results list + + Returns: + Updated results with 'file_name' added to each result + """ + if results is None: + results = [] + + content = json.loads(json_path.read_text()) + if not content: + return results + + file_name = json_path.stem + if isinstance(content, list): + for item in content: + item['file_name'] = file_name + results.extend(content) + else: + content['file_name'] = file_name + results.append(content) + + return results + + +def read_validate_results(json_path: Path): + if not json_path.exists(): + raise DemistoException('Validation Results file does not exist.') + raw_outputs = read_json_results(json_path) + demisto.debug(f'read_validate_results: {raw_outputs=}') + + results = [] + for output in raw_outputs: + for validation in output.get('validations', []): + file_path = validation.get('file path', '') + file_type = 'yml' if file_path.endswith(('.yml', '.yaml')) else '' + error_code = validation.get('error code', '') + message = validation.get('message', '') + results.append( + ValidationResult( + filePath=str(Path(file_path).absolute()) if file_path else '', + name=Path(file_path).stem, + fileType=file_type, + errorCode=error_code, + errorType='Code' if file_type in {'py', 'ps1'} else 'Settings', + entityType=resolve_entity_type(file_path), + message=message, + linter='validate', + ) + ) + + return results + + +def read_pre_commit_results(pre_commit_dir: Path): + results = [] + for output_file in pre_commit_dir.iterdir(): + raw_outputs = read_json_results(output_file) + + for output in raw_outputs: + stdout: str = strip_ansi_codes(output.get('stdout', '')) + demisto.debug(f'stripped-output: {stdout}') + + hook_id: str = extract_hook_id(stdout) or output.get('file_name', '') + pattern_obj: dict = HOOK_ID_TO_PATTERN.get(hook_id, DEFAULT_ERROR_PATTERN) + parsed_results: list[dict] = parse_pre_commit_output( + stdout, pattern_obj + ) + + demisto.debug(f'extracted_data={json.dumps(parsed_results, indent=4)}') + for result in parsed_results: + file_path = result.get('file', '') + # Isolating file's extension. + file_type = '' if not file_path else os.path.splitext(f'{file_path}')[1].lstrip('.') + error_type = FILE_TYPE_TO_ERROR_TYPE.get(file_type, '') + # 'check-ast' details value has to be treated individually as regex does not capture it properly. + if hook_id == 'check-ast': + result['details'] = stdout.splitlines()[5:] # Trimming error metadata info (5 lines of it). + details = result['details'] if 'details' in result else '' + results.append( + ValidationResult( + filePath=file_path, + fileType=file_type, + name=Path(file_path).stem, + entityType=resolve_entity_type(file_path), + errorType=error_type, + message=details, + linter=hook_id, + col=result.get('column', 0), + row=int(result.get('line', 0)) - 1, # Normalizing as UI adds a module registration line at row=1. + ) + ) + + return results + + +def validate_content(path_to_validate: str) -> tuple[list, list]: + """ + Validate the content items in the given `path_to_validate`, using demisto-sdk's ValidateManager and PreCommitManager. + + Arguments: + path_to_validate: Path to the file/directory to validate. + + Returns: + Tuple[list, list]: Formatted validation results, and raw validation results. + """ + demisto.info(f'Starting to validate content at {path_to_validate}.') + + output_base_dir = Path('ValidateContentOutput') / f'run-{datetime.now().strftime("%Y%m%d-%H%M%S")}' + os.makedirs(output_base_dir, exist_ok=True) + + validations_output_path = output_base_dir / 'validation_res.json' + pre_commit_dir = output_base_dir / 'pre-commit-output/' + os.makedirs(pre_commit_dir, exist_ok=True) + + # One thread for `validate` execution & one for `pre_commit`. + with ThreadPoolExecutor(max_workers=2) as executor: + validate_future = executor.submit(run_validate, path_to_validate, str(validations_output_path)) + demisto.info('Submitting `run_validate` future.') + pre_commit_future = executor.submit(run_pre_commit, pre_commit_dir) + demisto.info('Submitting `pre_commit` future.') + + for future in as_completed([validate_future, pre_commit_future]): + if future == validate_future: + validate_exit_code = future.result(timeout=60) # One minute timeout. + demisto.info(f'Finished running `demisto-sdk validate` with exit code {validate_exit_code}.') + else: + pre_commit_exit_code = future.result(timeout=60) # One minute timeout. + demisto.info(f'Finished running `demisto-sdk pre-commit` with exit code {pre_commit_exit_code}.') + + # If no errors were found. + if not (validate_exit_code or pre_commit_exit_code): + return [], [] + + raw_validation_results: list[ValidationResult] = [] + raw_validation_results += read_validate_results(validations_output_path) + raw_validation_results += read_pre_commit_results(pre_commit_dir) + + demisto.debug(f'{json.dumps([output.to_dict() for output in raw_validation_results], indent=4)}') + + formatted_results = [] + for result in raw_validation_results: + formatted_results.append({ + FormattedResultFields.NAME: result.name, + FormattedResultFields.ERROR: result.message, + FormattedResultFields.LINE: result.row if int(result.row) > 0 else None, + FormattedResultFields.ERROR_CODE_OR_LINTER: result.errorCode or result.linter + }) + + return formatted_results, [output.to_dict() for output in raw_validation_results] + + +def setup_content_repo(content_path: str): + """ Set up local Content git repository to run demisto-sdk commands against. """ + content_repo = git.Repo.init(content_path) + demisto.debug(f'main created content_repo {os.listdir(content_path)=}') + + # Check if the repository has any commits, make an initial commit if needed. + if not content_repo.head.is_valid(): + # Make an empty initial commit to create the master branch. + content_repo.index.commit("Initial commit") + + # Set up the remote branch and fetch it. + content_repo.create_remote('origin', CONTENT_REPO_URL) + content_repo.remotes.origin.fetch('master', depth=1) + + # Ensure 'master' branch exists, and checkout. + if BRANCH_MASTER not in content_repo.heads: + content_repo.create_head(BRANCH_MASTER) + content_repo.heads.master.checkout() + return content_repo def get_file_name_and_contents( - filename: str | None = None, - data: str | None = None, - entry_id: str | None = None, + filename: str | None = None, + data: str | None = None, + entry_id: str | None = None, ): if filename and data: return filename, b64decode(data) elif entry_id: file_object = demisto.getFilePath(entry_id) - + demisto.debug(f'{file_object=}') with open(file_object['path'], 'rb') as f: file_contents = f.read() return file_object['name'], file_contents return None +def setup_content_dir(file_name: str, file_contents: bytes | str, entry_id: str, verify_ssl=False) -> str: + """ Sets up the content directory to validate the content items in it. """ + + # Set up the content directory path globally, required for demisto-sdk logic. + os.environ['DEMISTO_SDK_CONTENT_PATH'] = CONTENT_DIR_PATH + + packs_path = os.path.join(CONTENT_DIR_PATH, PACKS_DIR_NAME) + Path.mkdir(Path(packs_path)) + demisto.debug(f"created packs directory in {packs_path}") + + content_repo = setup_content_repo(CONTENT_DIR_PATH) + file_name, file_contents = get_file_name_and_contents(file_name, str(file_contents), entry_id) + file_type = file_name.split('.')[-1] + if file_type not in ALLOWED_FILE_TYPES: + demisto.debug(f'resolved {file_type=}') + raise DemistoException(f'{file_name} does not define a content item. Files defining content items can be of ' + f'types: {ALLOWED_FILE_TYPES}') + + if isinstance(file_contents, str): + size_in_bytes = len(file_contents.encode("utf-8")) + else: # Assuming it's already bytes + size_in_bytes = len(file_contents) + + demisto.debug( + f'setup_content_dir preparing content_items for validateion: ' + f'{file_name=}\n|' + f' file_content size in bytes={size_in_bytes} \n|' + f' {packs_path if packs_path else CONTENT_DIR_PATH}' + ) + if file_name.endswith('.zip'): + path_to_validate = prepare_content_pack_for_validation( + file_name, file_contents, CONTENT_DIR_PATH + ) + else: + path_to_validate = prepare_single_content_item_for_validation( + file_name, file_contents, packs_path + ) + demisto.debug(f'setup_content_dir {path_to_validate=}') + # "git add packs_path" + content_repo.index.add(os.path.join(packs_path)) + + os.makedirs(CACHED_MODULES_DIR, exist_ok=True) + get_content_modules(CONTENT_DIR_PATH, verify_ssl=verify_ssl) + return path_to_validate + + +def setup_envvars(): + os.environ['DEMISTO_SDK_IGNORE_CONTENT_WARNING'] = "false" + os.environ['DEMISTO_SDK_OFFLINE_ENV'] = 'False' + os.environ['ARTIFACTS_FOLDER'] = '/tmp/artifacts' + os.environ['DEMISTO_SDK_LOG_NO_COLORS'] = 'true' + demisto.debug(f'setup_envvars: {os.environ}') + + def main(): + setup_envvars() + # Save working directory for later return, as working directory changes during runtime. cwd = os.getcwd() - content_tmp_dir = TemporaryDirectory() + demisto.debug(f'{cwd=}') + try: args = demisto.args() - if args.get('use_system_proxy') == 'no': - del os.environ['HTTP_PROXY'] - del os.environ['HTTPS_PROXY'] - del os.environ['http_proxy'] - del os.environ['https_proxy'] - verify_ssl = argToBoolean(args.get('trust_any_certificate')) + demisto.debug(f'Got {args=}') - content_repo = git.Repo.init(content_tmp_dir.name) - content_repo.create_remote('origin', 'https://github.com/demisto/content.git') - os.makedirs(CACHED_MODULES_DIR, exist_ok=True) + setup_proxy(args) + verify_ssl = argToBoolean(args.get('trust_any_certificate')) - get_content_modules(content_tmp_dir.name, verify_ssl) + # Whether `filename` & `data` will be provided, or an `entry_id`. + filename: str = args.get('filename', '') + data: bytes | str = args.get('data', b'') + entry_id: str = args.get('entry_id', '') - filename, file_contents = get_file_name_and_contents( - args.get('filename'), - args.get('data'), - args.get('entry_id'), - ) + with ConstantTemporaryDirectory(CONTENT_DIR_PATH) as tmp_dir: + demisto.info('Setting up content validation environment.') + demisto.debug(f"created {tmp_dir=}") - os.makedirs(content_tmp_dir.name, exist_ok=True) - os.chdir(content_tmp_dir.name) - - result = validate_content(filename, file_contents, content_tmp_dir.name) - outputs = [] - for validation in result: - if validation.get('ui') or validation.get('fileType') in {'py', 'ps1', 'yml'}: - outputs.append({ - 'Name': validation.get('name'), - 'Error': validation.get('message'), - 'Line': validation.get('row'), - }) - return_results(CommandResults( - readable_output=tableToMarkdown('Validation Results', outputs, headers=['Name', 'Error', 'Line']), - outputs_prefix='ValidationResult', - outputs=outputs, - raw_response=result, - )) + # Setup Demisto SDK's logging. + logging_setup( + calling_function='ValidateContent', + console_threshold='DEBUG' if is_debug_mode() else DEFAULT_CONSOLE_THRESHOLD, + propagate=True + ) + demisto.debug("Finished setting logger.") + + path_to_validate: str = setup_content_dir(filename, data, entry_id, verify_ssl) + demisto.debug("Finished setting content dir.") + + # Got to be in content dir when running demisto-sdk commands. + os.chdir(CONTENT_DIR_PATH) + validation_results, raw_outputs = validate_content(path_to_validate) + os.chdir(cwd) + + if not raw_outputs: + readable_output = 'All validations passed.' + else: + readable_output = tableToMarkdown( + name='Validation Results', + t=validation_results, + headers=[ + FormattedResultFields.NAME, + FormattedResultFields.ERROR, + FormattedResultFields.LINE, + FormattedResultFields.ERROR_CODE_OR_LINTER + ] + ) + return_results(CommandResults( + readable_output=readable_output, + outputs_prefix='ValidationResult', + outputs=validation_results, + raw_response=raw_outputs, + )) + + demisto.info('Finished validating content.') except Exception as e: demisto.error(traceback.format_exc()) return_error(f'Failed to execute ValidateContent. Error: {str(e)}') finally: - content_tmp_dir.cleanup() os.chdir(cwd) if __name__ in ('__main__', '__builtin__', 'builtins'): + log_demisto_sdk_version() main() diff --git a/Packs/Base/Scripts/ValidateContent/ValidateContent.yml b/Packs/Base/Scripts/ValidateContent/ValidateContent.yml index 456c92608bc7..bcf02e434bd4 100644 --- a/Packs/Base/Scripts/ValidateContent/ValidateContent.yml +++ b/Packs/Base/Scripts/ValidateContent/ValidateContent.yml @@ -37,12 +37,17 @@ outputs: description: The validation error message. type: String - contextPath: ValidationResult.Line - description: The code line number in which the error was found in the lint. + description: The line number of the code where the linting error was detected. type: String +- contextPath: ValidationResult.ErrorCode + description: The error code or the name of the linter that identified the issue. + type: String + scripttarget: 0 timeout: 600ns runas: DBotWeakRole fromversion: 5.5.0 -dockerimage: demisto/xsoar-tools:1.0.0.1808549 +dockerimage: demisto/xsoar-tools:1.0.0.1895346 tests: -- ValidateContent - Test +- No tests +runonce: false diff --git a/Packs/Base/Scripts/ValidateContent/ValidateContent_test.py b/Packs/Base/Scripts/ValidateContent/ValidateContent_test.py index 70dae448c959..d6f6ab050abe 100644 --- a/Packs/Base/Scripts/ValidateContent/ValidateContent_test.py +++ b/Packs/Base/Scripts/ValidateContent/ValidateContent_test.py @@ -1,98 +1,197 @@ -import os -import pytest +import json +from unittest import mock +from io import BytesIO +from ValidateContent import (ValidationResult, read_validate_results, resolve_entity_type, + HOOK_ID_TO_PATTERN, get_pack_name, strip_ansi_codes, extract_hook_id, parse_pre_commit_output) +import demistomock as demisto -from ValidateContent import get_content_modules, adjust_linter_row_and_col + +def create_mock_zip_file_with_metadata(metadata_content): + """ + Helper function to create a mock zip file with metadata.json + + Args: + metadata_content: + + Returns: + mock_zip: a mock zip file with metadata.json containing metadata_content. + """ + + mock_zip = mock.MagicMock() + mock_metadata_file = BytesIO(json.dumps(metadata_content).encode('utf-8')) + + def mock_open(name, *args, **kwargs): + if name == 'metadata.json': + return mock_metadata_file + raise KeyError(f"No such file: {name}") + + mock_zip.open = mock_open + return mock_zip + + +def test_strip_ansi_codes(): + ansi_text = "\033[31mRed text\033[0m" + assert strip_ansi_codes(ansi_text) == "Red text" + + +def test_extract_hook_id(): + output = "Running hook: check-ast\n- hook id: check-ast\nAn error occurred" + assert extract_hook_id(output) == "check-ast" + assert extract_hook_id("No hook id") == '' + + +def test_parse_pre_commit_output_check_ast(): + output = """check python ast.........................................................Failed +- hook id: check-ast +- exit code: 1 + +Packs/TmpPack/Integrations/HelloWorldTest/HelloWorldTest.py: failed parsing with CPython 3.11.10: + + Traceback (most recent call last): + File "/root/.cache/pre-commit/repopc0svvoh/py_env-python3.11/lib/python3.11/site-packages/pre_commit_hooks/check_ast.py", + line 21, in main + ast.parse(f.read(), filename=filename) + File "/usr/local/lib/python3.11/ast.py", line 50, in parse + return compile(source, filename, mode, flags, + ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + File "Packs/TmpPack/Integrations/HelloWorldTest/HelloWorldTest.py", line 1413 + elif command == 'hello + ^ + SyntaxError: unterminated string literal (detected at line 1413)""" + + pattern_obj = HOOK_ID_TO_PATTERN['check-ast'] + result = parse_pre_commit_output(output, pattern_obj) + assert result == [{'file': 'Packs/TmpPack/Integrations/HelloWorldTest/HelloWorldTest.py', 'line': '1413'}] + + +def test_parse_pre_commit_output_mypy(): + output = """mypy-py3.11..............................................................Failed +- hook id: mypy +- exit code: 1 + +Packs/TAXIIServer/Integrations/TAXII2Server/TAXII2Server.py:791: error: Name +"greet" is not defined [name-defined] + greet(inp) + ^ +Packs/TAXIIServer/Integrations/TAXII2Server/TAXII2Server.py:791: error: Name +"inp" is not defined [name-defined] + greet(inp) + ^ +Packs/TAXIIServer/Integrations/TAXII2Server/TAXII2Server.py:794: error: Name +"by" is not defined [name-defined] + by({'arrrr': 'rrrrra', 'rrrraa': 'rapapapu'}) + ^ +Found 3 errors in 1 file (checked 1 source file)""" + pattern_obj = HOOK_ID_TO_PATTERN['mypy'] + result = parse_pre_commit_output(output, pattern_obj) + assert result == [ + { + 'file': 'Packs/TAXIIServer/Integrations/TAXII2Server/TAXII2Server.py', + 'line': '791', + 'details': '''Name +"greet" is not defined [name-defined] + greet(inp) + ^''' + }, + { + 'file': 'Packs/TAXIIServer/Integrations/TAXII2Server/TAXII2Server.py', + 'line': '791', + 'details': '''Name +"inp" is not defined [name-defined] + greet(inp) + ^''' + }, + { + 'file': 'Packs/TAXIIServer/Integrations/TAXII2Server/TAXII2Server.py', + 'line': '794', + 'details': '''Name +"by" is not defined [name-defined] + by({'arrrr': 'rrrrra', 'rrrraa': 'rapapapu'}) + ^''' + } + ] + + +def test_resolve_entity_type(): + assert resolve_entity_type("Packs/SomePack/Integrations/SomeIntegration") == "integration" + assert resolve_entity_type("Packs/SomePack/Scripts/SomeScript") == "script" + assert resolve_entity_type("Packs/SomePack/Playbooks/SomePlaybook") == "playbook" + assert resolve_entity_type("Packs/SomePack/TestPlaybooks/SomeTestPlaybook") == "testplaybook" + assert resolve_entity_type("Packs/SomePack/") == "contentpack" -def test_get_content_modules(tmp_path, requests_mock, monkeypatch): +def test_get_pack_name_success(mocker): """ Given: - - Content temp dir to copy the modules to + A valid zip file path with a metadata.json file containing a pack name. + When: + Calling get_pack_name with the zip file path. + Then: + The function should return the correct pack name from the metadata.json file. + """ + mock_metadata = {'name': 'TestPack'} + mock_metadata_json = json.dumps(mock_metadata) + + mock_zipfile = mocker.MagicMock() + mock_metadata_file = mocker.MagicMock() + mock_metadata_file.read.return_value = mock_metadata_json + # Simulate behaviour of nested context managers. + mock_zipfile.__enter__.return_value.open.return_value.__enter__.return_value = mock_metadata_file + + mocker.patch('zipfile.ZipFile', return_value=mock_zipfile) + + result = get_pack_name('test_pack.zip') + assert result == 'TestPack' + + +def test_get_pack_name_no_name(mocker): + """ + Given: + A valid zip file path with a metadata.json file that doesn't contain a pack name. When: - - Getting content modules + Calling get_pack_name with the zip file path. + Then: + The function should return 'TmpPack' as the default pack name. + """ + mock_metadata = {} + mock_metadata_json = json.dumps(mock_metadata) + + mock_zipfile = mocker.MagicMock() + mock_metadata_file = mocker.MagicMock() + mock_metadata_file.read.return_value = mock_metadata_json + mock_zipfile.__enter__.return_value.open.return_value.__enter__.return_value = mock_metadata_file + + mocker.patch('zipfile.ZipFile', return_value=mock_zipfile) + mock_error = mocker.patch.object(demisto, 'error') + + result = get_pack_name('test_pack.zip') + assert result == 'TmpPack' + mock_error.assert_called_with('Could not find pack name in metadata.json') + +def test_read_validate_results(tmp_path): + """ + Given: + A temporary JSON file with validation results. + When: + Calling read_validate_results with the path to this file. Then: - - Verify content modules exist in the temp content dir + The function should return a list of ValidationResult objects. """ - requests_mock.get( - 'https://raw.githubusercontent.com/demisto/content/master/Packs/Base/Scripts' - '/CommonServerPython/CommonServerPython.py', - ) - requests_mock.get( - 'https://raw.githubusercontent.com/demisto/content/master/Packs/Base/Scripts' - '/CommonServerPowerShell/CommonServerPowerShell.ps1', - ) - requests_mock.get( - 'https://raw.githubusercontent.com/demisto/content/master/Tests/demistomock/demistomock.py', - ) - requests_mock.get( - 'https://raw.githubusercontent.com/demisto/content/master/Tests/demistomock/demistomock.ps1', - ) - requests_mock.get( - 'https://raw.githubusercontent.com/demisto/content/master/tox.ini', - ) - requests_mock.get( - 'https://raw.githubusercontent.com/demisto/content/master/Tests/scripts/dev_envs/pytest/conftest.py' - ) - requests_mock.get( - 'https://raw.githubusercontent.com/demisto/content/master/Config/approved_usecases.json' - ) - requests_mock.get( - 'https://raw.githubusercontent.com/demisto/content/master/Config/approved_tags.json' - ) - requests_mock.get( - 'https://raw.githubusercontent.com/demisto/content/master/Config/approved_categories.json' - ) - cached_modules = tmp_path / 'cached_modules' - cached_modules.mkdir() - monkeypatch.setattr('ValidateContent.CACHED_MODULES_DIR', str(cached_modules)) - content_tmp_dir = tmp_path / 'content_tmp_dir' - content_tmp_dir.mkdir() - - get_content_modules(str(content_tmp_dir)) - - assert os.path.isfile(content_tmp_dir / 'Packs/Base/Scripts/CommonServerPython/CommonServerPython.py') - assert os.path.isfile(content_tmp_dir / 'Packs/Base/Scripts/CommonServerPowerShell/CommonServerPowerShell.ps1') - assert os.path.isfile(content_tmp_dir / 'Tests/demistomock/demistomock.py') - assert os.path.isfile(content_tmp_dir / 'Tests/demistomock/demistomock.ps1') - assert os.path.isfile(content_tmp_dir / 'tox.ini') - assert os.path.isfile(content_tmp_dir / 'Tests/scripts/dev_envs/pytest/conftest.py') - assert os.path.isfile(content_tmp_dir / 'Config/approved_usecases.json') - assert os.path.isfile(content_tmp_dir / 'Config/approved_tags.json') - assert os.path.isfile(content_tmp_dir / 'Config/approved_categories.json') - - -row_and_column_adjustment_test_data = [ - ( - {'message': 'blah'}, {'message': 'blah'} - ), - ( - {'message': 'blah', 'row': '1'}, {'message': 'blah', 'row': '1'} - ), - ( - {'message': 'blah', 'row': '2'}, {'message': 'blah', 'row': '1'} - ), - ( - {'message': 'blah', 'col': '0'}, {'message': 'blah', 'col': '0'} - ), - ( - {'message': 'blah', 'col': '1'}, {'message': 'blah', 'col': '0'} - ), - ( - {'message': 'blah', 'row': '456'}, {'message': 'blah', 'row': '454'} - ), - ( - {'message': 'blah', 'col': '50'}, {'message': 'blah', 'col': '49'} - ), - ( - {'message': 'blah', 'row': '30', 'col': '30'}, {'message': 'blah', 'row': '28', 'col': '29'} - ) -] - - -@pytest.mark.parametrize('original_validation_result,expected_output', row_and_column_adjustment_test_data) -def test_adjust_linter_row_and_col(original_validation_result, expected_output): - adjust_linter_row_and_col(original_validation_result) - # after adjustment, the original validation result should match the expected - assert original_validation_result == expected_output + json_file = tmp_path / "validation_results.json" + json_file.write_text(json.dumps([{ + "validations": [{ + "file path": "Packs/TestPack/Scripts/TestScript/TestScript.yml", + "error code": "ST001", + "message": "Test error message" + }] + }])) + + results = read_validate_results(json_file) + + assert len(results) == 1 + assert isinstance(results[0], ValidationResult) + assert results[0].filePath.endswith("Packs/TestPack/Scripts/TestScript/TestScript.yml") + assert results[0].errorCode == "ST001" + assert results[0].message == "Test error message" diff --git a/Packs/Base/TestPlaybooks/ValidateContent_-_Test.yml b/Packs/Base/TestPlaybooks/ValidateContent_-_Test.yml deleted file mode 100644 index 040b19ab5b02..000000000000 --- a/Packs/Base/TestPlaybooks/ValidateContent_-_Test.yml +++ /dev/null @@ -1,328 +0,0 @@ -id: ValidateContent - Test -version: -1 -name: ValidateContent - Test -starttaskid: "0" -tasks: - "0": - id: "0" - taskid: d31a9644-80b6-4cdc-80ff-8fa7c1a5c9b2 - type: start - task: - id: d31a9644-80b6-4cdc-80ff-8fa7c1a5c9b2 - version: -1 - name: "" - iscommand: false - brand: "" - description: '' - nexttasks: - '#none#': - - "1" - - "4" - separatecontext: false - view: |- - { - "position": { - "x": 265, - "y": 50 - } - } - note: false - timertriggers: [] - ignoreworker: false - skipunavailable: false - quietmode: 0 - continueonerrortype: "" - isoversize: false - isautoswitchedtoquietmode: false - "1": - id: "1" - taskid: 58fcef75-c421-4f6b-8e2c-e078d089588d - type: regular - task: - id: 58fcef75-c421-4f6b-8e2c-e078d089588d - version: -1 - name: Download unified integration - description: Sends http request. Returns the response as json. - scriptName: http - type: regular - iscommand: false - brand: "" - nexttasks: - '#none#': - - "2" - scriptarguments: - filename: - simple: integration.yml - method: - simple: GET - saveAsFile: - simple: "yes" - url: - simple: https://raw.githubusercontent.com/demisto/demisto-sdk/master/demisto_sdk/tests/test_files/UnifiedIntegrations/Integrations/integration-Symantec_Messaging_Gateway.yml - separatecontext: false - view: |- - { - "position": { - "x": 50, - "y": 545 - } - } - note: false - timertriggers: [] - ignoreworker: false - skipunavailable: false - quietmode: 0 - continueonerrortype: "" - isoversize: false - isautoswitchedtoquietmode: false - "2": - id: "2" - taskid: 1243b355-7d89-405a-8e79-c9d7f614428f - type: regular - task: - id: 1243b355-7d89-405a-8e79-c9d7f614428f - version: -1 - name: ValidateContent - description: Runs validation and linting on content items. - scriptName: ValidateContent - type: regular - iscommand: false - brand: "" - scriptarguments: - entry_id: - simple: ${lastCompletedTaskEntries} - separatecontext: false - view: |- - { - "position": { - "x": 50, - "y": 720 - } - } - note: false - timertriggers: [] - ignoreworker: false - skipunavailable: false - quietmode: 0 - nexttasks: - '#none#': - - "8" - continueonerrortype: "" - isoversize: false - isautoswitchedtoquietmode: false - "4": - id: "4" - taskid: 234937e9-6b55-4cd9-8aa7-41f28bec867d - type: regular - task: - id: 234937e9-6b55-4cd9-8aa7-41f28bec867d - version: -1 - name: Download integration with errors - description: Sends http request. Returns the response as json. - scriptName: http - type: regular - iscommand: false - brand: "" - nexttasks: - '#none#': - - "5" - scriptarguments: - filename: - simple: integration-errors.yml - method: - simple: GET - saveAsFile: - simple: "yes" - url: - simple: https://raw.githubusercontent.com/demisto/content/4bff829d93ffc7fd0b770be7a04a425fd05ec075/Packs/Base/Scripts/ValidateContent/test_data/automationwitherrors.yml - separatecontext: false - continueonerrortype: "" - view: |- - { - "position": { - "x": 480, - "y": 195 - } - } - note: false - timertriggers: [] - ignoreworker: false - skipunavailable: false - quietmode: 0 - isoversize: false - isautoswitchedtoquietmode: false - "5": - id: "5" - taskid: 646a8c37-e436-406f-8b22-e6c0c87147fc - type: regular - task: - id: 646a8c37-e436-406f-8b22-e6c0c87147fc - version: -1 - name: ValidateContent - description: Runs validation and linting using the Demisto SDK on content items, such as integrations, automations and content packs. This automation script is used as part of the content validation that runs as part of the contribution flow. - scriptName: ValidateContent - type: regular - iscommand: false - brand: "" - nexttasks: - '#none#': - - "6" - scriptarguments: - entry_id: - simple: ${lastCompletedTaskEntries} - separatecontext: false - continueonerrortype: "" - view: |- - { - "position": { - "x": 480, - "y": 370 - } - } - note: false - timertriggers: [] - ignoreworker: false - skipunavailable: false - quietmode: 0 - isoversize: false - isautoswitchedtoquietmode: false - "6": - id: "6" - taskid: 8f8b5c2b-1121-4215-8b6b-66b962dda8ff - type: condition - task: - id: 8f8b5c2b-1121-4215-8b6b-66b962dda8ff - version: -1 - name: AssertMypyValidationFound - type: condition - iscommand: false - brand: "" - nexttasks: - "yes": - - "7" - separatecontext: false - conditions: - - label: "yes" - condition: - - - operator: isExists - left: - value: - complex: - root: ValidationResult.Error - filters: - - - operator: containsGeneral - left: - value: - simple: ValidationResult.Error - iscontext: true - right: - value: - simple: unterminated string literal - iscontext: true - continueonerrortype: "" - view: |- - { - "position": { - "x": 480, - "y": 545 - } - } - note: false - timertriggers: [] - ignoreworker: false - skipunavailable: false - quietmode: 0 - isoversize: false - isautoswitchedtoquietmode: false - "7": - id: "7" - taskid: 2c3cb64a-bf4d-4efc-8050-fd01fa4360fa - type: condition - task: - id: 2c3cb64a-bf4d-4efc-8050-fd01fa4360fa - version: -1 - name: ValidateDemistoValiudateErrorFound - type: condition - iscommand: false - brand: "" - nexttasks: - "yes": - - "8" - separatecontext: false - conditions: - - label: "yes" - condition: - - - operator: isExists - left: - value: - complex: - root: ValidationResult.Error - filters: - - - operator: containsGeneral - left: - value: - simple: ValidationResult.Error - iscontext: true - right: - value: - simple: There is no docker image provided - iscontext: true - continueonerrortype: "" - view: |- - { - "position": { - "x": 480, - "y": 720 - } - } - note: false - timertriggers: [] - ignoreworker: false - skipunavailable: false - quietmode: 0 - isoversize: false - isautoswitchedtoquietmode: false - "8": - id: "8" - taskid: 06a4519d-7189-4c38-8d65-10742ecc4a55 - type: title - task: - id: 06a4519d-7189-4c38-8d65-10742ecc4a55 - version: -1 - name: Done - type: title - iscommand: false - brand: "" - description: '' - separatecontext: false - continueonerrortype: "" - view: |- - { - "position": { - "x": 265, - "y": 895 - } - } - note: false - timertriggers: [] - ignoreworker: false - skipunavailable: false - quietmode: 0 - isoversize: false - isautoswitchedtoquietmode: false -view: |- - { - "linkLabelsPosition": {}, - "paper": { - "dimensions": { - "height": 910, - "width": 810, - "x": 50, - "y": 50 - } - } - } -inputs: [] -outputs: [] -fromversion: 5.5.0 -description: '' diff --git a/Packs/Base/pack_metadata.json b/Packs/Base/pack_metadata.json index 0a73cadfb345..41d9f43c117a 100644 --- a/Packs/Base/pack_metadata.json +++ b/Packs/Base/pack_metadata.json @@ -2,7 +2,7 @@ "name": "Base", "description": "The base pack for Cortex XSOAR.", "support": "xsoar", - "currentVersion": "1.39.6", + "currentVersion": "1.39.7", "author": "Cortex XSOAR", "serverMinVersion": "6.0.0", "url": "https://www.paloaltonetworks.com/cortex",