diff --git a/unskript-ctl/unskript_ctl_run.py b/unskript-ctl/unskript_ctl_run.py index 2002c67f8..dd20172fa 100644 --- a/unskript-ctl/unskript_ctl_run.py +++ b/unskript-ctl/unskript_ctl_run.py @@ -186,9 +186,33 @@ def display_check_result(self, checks_output): checks_output = self.output_after_merging_checks(checks_output, self.check_uuids) self.uglobals.create_property('CHECKS_OUTPUT') self.uglobals['CHECKS_OUTPUT'] = checks_output - print("Uploading failed objects to S3...") - uploader = S3Uploader() - uploader.rename_and_upload_failed_objects(checks_output) + self.logger.debug("Creating checks output JSON to upload to S3") + # print("Uploading failed objects to S3...") + # uploader = S3Uploader() + # uploader.rename_and_upload_failed_objects(checks_output) + now = datetime.now() + rfc3339_timestamp = now.isoformat() + 'Z' + if self.uglobals.get('CURRENT_EXECUTION_RUN_DIRECTORY'): + parent_folder = self.uglobals.get('CURRENT_EXECUTION_RUN_DIRECTORY') + dashboard_checks_output_file = f"dashboard_{rfc3339_timestamp}.json" + dashboard_checks_output_file_path = os.path.join(parent_folder, dashboard_checks_output_file) + try: + # Convert checks_output to JSON format + checks_output_json = json.dumps(checks_output, indent=2) + except json.JSONDecodeError: + self.logger.debug(f"Failed to decode JSON response for {self.customer_name}") + return + + # Write checks output JSON to a separate file + try: + if checks_output_json: + self.logger.debug(f"Writing JSON data to dashboard json file") + with open(dashboard_checks_output_file_path, 'w') as json_file: + json_file.write(checks_output_json) + except IOError as e: + self.logger.debug(f"Failed to write JSON data to {dashboard_checks_output_file_path}: {e}") + return + for result in checks_output: if result.get('skip') and result.get('skip') is True: idx += 1 diff --git a/unskript-ctl/unskript_upload_results_to_s3.py b/unskript-ctl/unskript_upload_results_to_s3.py index 3075778be..731cbc684 100644 --- a/unskript-ctl/unskript_upload_results_to_s3.py +++ b/unskript-ctl/unskript_upload_results_to_s3.py @@ -87,39 +87,39 @@ def create_s3_folder_path(self): return True - def rename_and_upload_failed_objects(self, checks_output): - try: - # Convert checks_output to JSON format - checks_output_json = json.dumps(checks_output, indent=2) - except json.JSONDecodeError: - logger.debug(f"Failed to decode JSON response for {self.customer_name}") - return + # def rename_and_upload_failed_objects(self, checks_output): + # try: + # # Convert checks_output to JSON format + # checks_output_json = json.dumps(checks_output, indent=2) + # except json.JSONDecodeError: + # logger.debug(f"Failed to decode JSON response for {self.customer_name}") + # return - # Write JSON data to a local file - try: - logger.debug(f"Writing JSON data to local file: {self.local_file_name}") - with open(self.local_file_name, 'w') as json_file: - json_file.write(checks_output_json) - except IOError as e: - logger.debug(f"Failed to write JSON data to local file: {e}") - return + # # Write JSON data to a local file + # try: + # logger.debug(f"Writing JSON data to local file: {self.local_file_name}") + # with open(self.local_file_name, 'w') as json_file: + # json_file.write(checks_output_json) + # except IOError as e: + # logger.debug(f"Failed to write JSON data to local file: {e}") + # return - if not self.create_s3_folder_path(): - logger.debug("Unable to create bucket") - return + # if not self.create_s3_folder_path(): + # logger.debug("Unable to create bucket") + # return - # Upload the JSON file - try: - logger.debug(f"Uploading file {self.file_name} to {self.bucket_name}/{self.file_path}") - self.s3_client.upload_file(self.local_file_name, self.bucket_name, self.file_path) - logger.debug(f"File {self.file_name} uploaded successfully to {self.bucket_name}/{self.folder_path}") - except NoCredentialsError: - logger.debug("Credentials not available") - except Exception as e: - logger.debug(f"Unable to upload failed objetcs file to S3 bucket: {e}") - # Remove the local file after upload - logger.debug(f"Removing local file of check outputs json from /tmp: {self.local_file_name}") - os.remove(self.local_file_name) + # # Upload the JSON file + # try: + # logger.debug(f"Uploading file {self.file_name} to {self.bucket_name}/{self.file_path}") + # self.s3_client.upload_file(self.local_file_name, self.bucket_name, self.file_path) + # logger.debug(f"File {self.file_name} uploaded successfully to {self.bucket_name}/{self.folder_path}") + # except NoCredentialsError: + # logger.debug("Credentials not available") + # except Exception as e: + # logger.debug(f"Unable to upload failed objetcs file to S3 bucket: {e}") + # # Remove the local file after upload + # logger.debug(f"Removing local file of check outputs json from /tmp: {self.local_file_name}") + # os.remove(self.local_file_name) def rename_and_upload_other_items(self): if not self.create_s3_folder_path(): @@ -137,12 +137,16 @@ def rename_and_upload_other_items(self): for _file in _files: file_list_to_upload.append(os.path.join(parent_dir, _file)) except: - logger.debug(f"Failed to get contents of Execution Run directory") + logger.debug("Failed to get contents of Execution Run directory") for _file in file_list_to_upload: base_name, extension = os.path.splitext(os.path.basename(_file)) - temp_fp = f"{base_name}_{self.ts}{extension}" - file_path = os.path.join(self.folder_path, temp_fp) + if base_name.startswith("dashboard"): + file_path = os.path.join(self.folder_path, os.path.basename(_file)) + else: + temp_fp = f"{base_name}_{self.ts}{extension}" + file_path = os.path.join(self.folder_path, temp_fp) + if not self.do_upload_(_file, file_path): logger.debug(f"ERROR: Uploading error for {_file}")