From 829f97a46f19e08b2c65bbabaced16c87269ab73 Mon Sep 17 00:00:00 2001 From: Brian Cherinka Date: Sun, 12 Nov 2023 13:37:30 -0500 Subject: [PATCH 1/7] adding option for json logger --- src/sdsstools/logger.py | 98 +++++++++++++++++++++++++++++++++++++---- test/test_logger.py | 32 ++++++++++++++ 2 files changed, 121 insertions(+), 9 deletions(-) diff --git a/src/sdsstools/logger.py b/src/sdsstools/logger.py index b4d7341..67c5336 100644 --- a/src/sdsstools/logger.py +++ b/src/sdsstools/logger.py @@ -8,8 +8,10 @@ import copy import datetime +import json import logging import os +import pathlib import re import shutil import sys @@ -22,6 +24,7 @@ from pygments import highlight from pygments.formatters import TerminalFormatter from pygments.lexers import get_lexer_by_name +from pythonjsonlogger import jsonlogger from rich.console import Console from rich.logging import RichHandler from rich.text import Text @@ -142,6 +145,22 @@ def format(self, record): return logging.Formatter.format(self, record_cp) +class CustomJsonFormatter(jsonlogger.JsonFormatter): + """ Custom `jsonlogger.JsonFormatter` for the JSON file handler """ + + def add_fields(self, log_record, record, message_dict): + """ Add custom fields to the JSON body """ + super().add_fields(log_record, record, message_dict) + if not log_record.get('timestamp'): + now = datetime.datetime.fromtimestamp(record.created).strftime('%Y-%m-%dT%H:%M:%S.%fZ') # noqa: E501 + log_record["timestamp"] = now + log_record["type"] = "log" + log_record["level"] = record.levelname + log_record.update(record.__dict__) + if record.exc_info: + log_record['error'] = {'type': record.exc_info[0].__name__, + 'trace': message_dict['exc_info']} + class CustomRichHandler(RichHandler): """A slightly custom ``RichHandler`` logging handler.""" @@ -257,10 +276,13 @@ def init( self.addHandler(self.sh) self.sh.setLevel(log_level) - # Placeholders for the file handler. + # Placeholders for the file handlers. self.fh: Union[logging.FileHandler, None] = None self.log_filename: Union[str, None] = None + self.jh: Union[logging.FileHandler, None] = None + self.json_log_filename: Union[str, None] = None + # A header that precedes every message. self.header = "" @@ -311,6 +333,23 @@ def asyncio_exception_handler(self, loop, context): def save_log(self, path: str): assert self.log_filename, "File logger not running." shutil.copyfile(self.log_filename, os.path.expanduser(path)) + if self.json_log_filename: + assert self.json_log_filename, "JSON file logger not running." + shutil.copyfile(self.json_log_filename, os.path.expanduser(path)) + + def _set_file_handler(self, filepath: str, suffix: str, rotating: bool = True, + mode: str = 'a', + when: str = 'midnight', + utc: bool = True, + at_time: Union[str, datetime.time] = None): + if rotating: + fh = TimedRotatingFileHandler( + str(filepath), when=when, utc=utc, atTime=at_time + ) + fh.suffix = suffix # type: ignore + else: + fh = logging.FileHandler(str(filepath), mode=mode) + return fh def start_file_logger( self, @@ -322,6 +361,8 @@ def start_file_logger( when: str = "midnight", utc: bool = True, at_time: Union[str, datetime.time] = None, + as_json: bool = False, + with_json: bool = False, ): """Start file logging. @@ -349,11 +390,22 @@ def start_file_logger( at “midnight” or “on a particular weekday”. """ + # suffix = pathlib.Path(path).suffix + # if as_json and suffix != '.json': + # path = path.replace(suffix, '.json') + log_file_path = os.path.realpath(os.path.expanduser(path)) logdir = os.path.dirname(log_file_path) SUFFIX = "%Y-%m-%d_%H:%M:%S" + # set the JSON file path name + suffix = pathlib.Path(log_file_path).suffix + if as_json and suffix != '.json': + log_file_path = log_file_path.replace(suffix, '.json') + elif with_json: + json_log_file_path = log_file_path.replace(suffix, '.json') + try: if not os.path.exists(logdir): os.makedirs(logdir) @@ -367,13 +419,30 @@ def start_file_logger( if at_time and isinstance(at_time, str): at_time = datetime.time.fromisoformat(at_time) - if rotating: - self.fh = TimedRotatingFileHandler( - str(log_file_path), when=when, utc=utc, atTime=at_time - ) - self.fh.suffix = SUFFIX # type: ignore - else: - self.fh = logging.FileHandler(str(log_file_path), mode=mode) + # if rotating: + # self.fh = TimedRotatingFileHandler( + # str(log_file_path), when=when, utc=utc, atTime=at_time + # ) + # self.fh.suffix = SUFFIX # type: ignore + # else: + # self.fh = logging.FileHandler(str(log_file_path), mode=mode) + self.fh = self._set_file_handler(log_file_path, SUFFIX, mode=mode, + rotating=rotating, when=when, utc=utc, + at_time=at_time) + + if with_json: + self.jh = self._set_file_handler(json_log_file_path, SUFFIX, mode=mode, + rotating=rotating, when=when, utc=utc, + at_time=at_time) + + # if with_json: + # if rotating: + # self.jh = TimedRotatingFileHandler( + # str(log_file_path), when=when, utc=utc, atTime=at_time + # ) + # self.jh.suffix = SUFFIX # type: ignore + # else: + # self.jh = logging.FileHandler(str(log_file_path), mode=mode) except (IOError, OSError, ValueError) as ee: warnings.warn( @@ -383,7 +452,9 @@ def start_file_logger( ) else: - self.fh.setFormatter(FileFormatter()) + #json_fmt = json.dumps('%(timestamp)s %(level)s %(name)s %(message)s') + formatter = CustomJsonFormatter() if as_json else FileFormatter() + self.fh.setFormatter(formatter) self.addHandler(self.fh) self.fh.setLevel(log_level) @@ -392,6 +463,15 @@ def start_file_logger( self.log_filename = log_file_path + if with_json and self.jh: + self.jh.setFormatter(CustomJsonFormatter()) + self.addHandler(self.jh) + self.jh.setLevel(log_level) + self.json_log_filename = json_log_file_path + if self.warnings_logger: + self.warnings_logger.addHandler(self.jh) + + def handle(self, record): """Handles a record but first stores it.""" diff --git a/test/test_logger.py b/test/test_logger.py index 29e3b9b..b12543b 100644 --- a/test/test_logger.py +++ b/test/test_logger.py @@ -7,6 +7,7 @@ # @License: BSD 3-clause (http://www.opensource.org/licenses/BSD-3-Clause) import asyncio +import json import logging import logging.handlers import os @@ -217,3 +218,34 @@ def test_catch_exception_rich_logger(tmp_path, mocker): logger.handle_exceptions(excinfo.type, excinfo.value, excinfo.tb) emit_mock.assert_called() + + +def test_as_json_logger(tmp_path): + log_file = tmp_path / "logs" / "test_log.log" + + logger1 = get_logger(str(uuid.uuid4())) + logger1.start_file_logger(log_file, as_json=True) + logger1.info("test message") + + files = list((tmp_path / "logs").glob("*")) + assert len(files) == 1 + assert files[0].name.endswith(".json") + assert not files[0].name.endswith(".log") + + with open(str(log_file).replace(".log", ".json")) as f: + data = [json.loads(i) for i in f.readlines()] + assert len(data) == 1 + assert data[0]["message"] == "test message" + + +def test_with_json_logger(tmp_path): + log_file = tmp_path / "logs" / "test_log.log" + + logger1 = get_logger(str(uuid.uuid4())) + logger1.start_file_logger(log_file, with_json=True) + logger1.info("test message") + + files = list(sorted((tmp_path / "logs").glob("*"))) + assert len(files) == 2 + assert files[0].name.endswith(".json") + assert files[1].name.endswith(".log") From d51a836f5dcbfa82bd20b137e101906aec0cac73 Mon Sep 17 00:00:00 2001 From: Brian Cherinka Date: Sun, 12 Nov 2023 13:37:38 -0500 Subject: [PATCH 2/7] adding packge dep --- poetry.lock | 25 +++++++++++++++++++++++-- pyproject.toml | 1 + 2 files changed, 24 insertions(+), 2 deletions(-) diff --git a/poetry.lock b/poetry.lock index bb9915b..add0805 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,4 +1,4 @@ -# This file is automatically @generated by Poetry 1.7.0 and should not be changed by hand. +# This file is automatically @generated by Poetry 1.6.1 and should not be changed by hand. [[package]] name = "appnope" @@ -1212,6 +1212,17 @@ termcolor = ">=2.1.0" [package.extras] dev = ["black", "flake8", "pre-commit"] +[[package]] +name = "python-json-logger" +version = "2.0.7" +description = "A python library adding a json log formatter" +optional = false +python-versions = ">=3.6" +files = [ + {file = "python-json-logger-2.0.7.tar.gz", hash = "sha256:23e7ec02d34237c5aa1e29a070193a4ea87583bb4e7f8fd06d3de8264c4b2e1c"}, + {file = "python_json_logger-2.0.7-py3-none-any.whl", hash = "sha256:f380b826a991ebbe3de4d897aeec42760035ac760345e57b812938dc8b35e2bd"}, +] + [[package]] name = "pywin32-ctypes" version = "0.2.2" @@ -1235,6 +1246,7 @@ files = [ {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69b023b2b4daa7548bcfbd4aa3da05b3a74b772db9e23b982788168117739938"}, {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:81e0b275a9ecc9c0c0c07b4b90ba548307583c125f54d5b6946cfee6360c733d"}, {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba336e390cd8e4d1739f42dfe9bb83a3cc2e80f567d8805e11b46f4a943f5515"}, + {file = "PyYAML-6.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:326c013efe8048858a6d312ddd31d56e468118ad4cdeda36c719bf5bb6192290"}, {file = "PyYAML-6.0.1-cp310-cp310-win32.whl", hash = "sha256:bd4af7373a854424dabd882decdc5579653d7868b8fb26dc7d0e99f823aa5924"}, {file = "PyYAML-6.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:fd1592b3fdf65fff2ad0004b5e363300ef59ced41c2e6b3a99d4089fa8c5435d"}, {file = "PyYAML-6.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6965a7bc3cf88e5a1c3bd2e0b5c22f8d677dc88a455344035f03399034eb3007"}, @@ -1242,8 +1254,15 @@ files = [ {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42f8152b8dbc4fe7d96729ec2b99c7097d656dc1213a3229ca5383f973a5ed6d"}, {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:062582fca9fabdd2c8b54a3ef1c978d786e0f6b3a1510e0ac93ef59e0ddae2bc"}, {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d2b04aac4d386b172d5b9692e2d2da8de7bfb6c387fa4f801fbf6fb2e6ba4673"}, + {file = "PyYAML-6.0.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e7d73685e87afe9f3b36c799222440d6cf362062f78be1013661b00c5c6f678b"}, {file = "PyYAML-6.0.1-cp311-cp311-win32.whl", hash = "sha256:1635fd110e8d85d55237ab316b5b011de701ea0f29d07611174a1b42f1444741"}, {file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"}, + {file = "PyYAML-6.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:855fb52b0dc35af121542a76b9a84f8d1cd886ea97c84703eaa6d88e37a2ad28"}, + {file = "PyYAML-6.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40df9b996c2b73138957fe23a16a4f0ba614f4c0efce1e9406a184b6d07fa3a9"}, + {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c22bec3fbe2524cde73d7ada88f6566758a8f7227bfbf93a408a9d86bcc12a0"}, + {file = "PyYAML-6.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4"}, + {file = "PyYAML-6.0.1-cp312-cp312-win32.whl", hash = "sha256:d483d2cdf104e7c9fa60c544d92981f12ad66a457afae824d146093b8c294c54"}, + {file = "PyYAML-6.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:0d3304d8c0adc42be59c5f8a4d9e3d7379e6955ad754aa9d6ab7a398b59dd1df"}, {file = "PyYAML-6.0.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:50550eb667afee136e9a77d6dc71ae76a44df8b3e51e41b77f6de2932bfe0f47"}, {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1fe35611261b29bd1de0070f0b2f47cb6ff71fa6595c077e42bd0c419fa27b98"}, {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:704219a11b772aea0d8ecd7058d0082713c3562b4e271b849ad7dc4a5c90c13c"}, @@ -1260,6 +1279,7 @@ files = [ {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a0cd17c15d3bb3fa06978b4e8958dcdc6e0174ccea823003a106c7d4d7899ac5"}, {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:28c119d996beec18c05208a8bd78cbe4007878c6dd15091efb73a30e90539696"}, {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7e07cbde391ba96ab58e532ff4803f79c4129397514e1413a7dc761ccd755735"}, + {file = "PyYAML-6.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:49a183be227561de579b4a36efbb21b3eab9651dd81b1858589f796549873dd6"}, {file = "PyYAML-6.0.1-cp38-cp38-win32.whl", hash = "sha256:184c5108a2aca3c5b3d3bf9395d50893a7ab82a38004c8f61c258d4428e80206"}, {file = "PyYAML-6.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:1e2722cc9fbb45d9b87631ac70924c11d3a401b2d7f410cc0e3bbf249f2dca62"}, {file = "PyYAML-6.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9eb6caa9a297fc2c2fb8862bc5370d0303ddba53ba97e71f08023b6cd73d16a8"}, @@ -1267,6 +1287,7 @@ files = [ {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5773183b6446b2c99bb77e77595dd486303b4faab2b086e7b17bc6bef28865f6"}, {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b786eecbdf8499b9ca1d697215862083bd6d2a99965554781d0d8d1ad31e13a0"}, {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc1bf2925a1ecd43da378f4db9e4f799775d6367bdb94671027b73b393a7c42c"}, + {file = "PyYAML-6.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:04ac92ad1925b2cff1db0cfebffb6ffc43457495c9b3c39d3fcae417d7125dc5"}, {file = "PyYAML-6.0.1-cp39-cp39-win32.whl", hash = "sha256:faca3bdcf85b2fc05d06ff3fbc1f83e1391b3e724afa3feba7d13eeab355484c"}, {file = "PyYAML-6.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:510c9deebc5c0225e8c96813043e62b680ba2f9c50a08d3724c7f28a747d1486"}, {file = "PyYAML-6.0.1.tar.gz", hash = "sha256:bfdf460b1736c775f2ba9f6a92bca30bc2095067b8a9d77876d1fad6cc3b4a43"}, @@ -1601,4 +1622,4 @@ testing = ["big-O", "jaraco.functools", "jaraco.itertools", "more-itertools", "p [metadata] lock-version = "2.0" python-versions = "^3.8" -content-hash = "a950cdb63540c55a050bd838f83f1fb94beef0535aa99484c8afbd2ec2bf5e93" +content-hash = "a3fd9720daceb327c1451162247c6909a251f8fd651aa9549737f1abcaa343f8" diff --git a/pyproject.toml b/pyproject.toml index 0fad79d..c4df7bc 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -26,6 +26,7 @@ packaging = ">=20.4" daemonocle = "^1.0.2" typing-extensions = "^4.6.3" rich = "^13.4.2" +python-json-logger = "^2.0.7" [tool.poetry.group.dev.dependencies] ipython = [{version = ">=8.14.0", python = ">=3.9"}, From c7317a47a88d3712b0f69096b4b576a8b7395590 Mon Sep 17 00:00:00 2001 From: Brian Cherinka Date: Sun, 12 Nov 2023 13:45:04 -0500 Subject: [PATCH 3/7] updating readme --- README.md | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/README.md b/README.md index e77257c..cdc2881 100644 --- a/README.md +++ b/README.md @@ -53,9 +53,12 @@ log = get_logger(NAME) You can get a logger using the `rich` `RichHandler` by passing `use_rich_handler=True`. -The file logger is disabled by default and can be started by calling `log.start_file_logger(path)`. By default a `TimedRotatingFileHandler` is created. If you want a normal `FileHandler` use `rotate=False`. The file mode defaults to `mode='a'` (append). +The file logger is disabled by default and can be started by calling `log.start_file_logger(path)`. By default a `TimedRotatingFileHandler` is created. If you want a normal `FileHandler` use `rotating=False`. The file mode defaults to `mode='a'` (append). The `TimedRotatingFileHandler` options `when`, `utc`, and `at_time` are available to +`log.start_file_logger` for controlling aspects of the rollover time. -The `SDSSLoger` instance also include an `asyncio_exception_handler` method that can be added to the asyncio event loop to handle exceptions; for example `loop.set_exception_handler(log.asyncio_exception_handler)`. +By default, the file logger is formatted to output a human-readble `log` file. To output a JSON log instead, set `as_json=True`, when calling `log.start_file_logger`. This will create a `.json` log file which can machine-read and more easily parsed for content. To output both a human-readable (`.log`) and JSON log (`.json`), set `with_json=True`. + +The `SDSSLogger` instance also include an `asyncio_exception_handler` method that can be added to the asyncio event loop to handle exceptions; for example `loop.set_exception_handler(log.asyncio_exception_handler)`. Console logging uses the standard `StreamHandler`. It's possible to use the `rich` library [RichHandler](https://rich.readthedocs.io/en/stable/logging.html) instead by passing `use_rich_handler=True` to `get_logger()`. Additional keyword arguments to `RichHandler` can be passed as a `rich_handler_kwargs` dictionary. In the future the `rich` handler may become the default console logger. From 8149fb638c379ea3d2013aea3aeca4fffcf3027f Mon Sep 17 00:00:00 2001 From: Brian Cherinka Date: Sun, 12 Nov 2023 13:52:08 -0500 Subject: [PATCH 4/7] linting --- src/sdsstools/logger.py | 21 +++------------------ 1 file changed, 3 insertions(+), 18 deletions(-) diff --git a/src/sdsstools/logger.py b/src/sdsstools/logger.py index 67c5336..455713f 100644 --- a/src/sdsstools/logger.py +++ b/src/sdsstools/logger.py @@ -8,7 +8,6 @@ import copy import datetime -import json import logging import os import pathlib @@ -419,13 +418,7 @@ def start_file_logger( if at_time and isinstance(at_time, str): at_time = datetime.time.fromisoformat(at_time) - # if rotating: - # self.fh = TimedRotatingFileHandler( - # str(log_file_path), when=when, utc=utc, atTime=at_time - # ) - # self.fh.suffix = SUFFIX # type: ignore - # else: - # self.fh = logging.FileHandler(str(log_file_path), mode=mode) + # get the file handler self.fh = self._set_file_handler(log_file_path, SUFFIX, mode=mode, rotating=rotating, when=when, utc=utc, at_time=at_time) @@ -435,15 +428,6 @@ def start_file_logger( rotating=rotating, when=when, utc=utc, at_time=at_time) - # if with_json: - # if rotating: - # self.jh = TimedRotatingFileHandler( - # str(log_file_path), when=when, utc=utc, atTime=at_time - # ) - # self.jh.suffix = SUFFIX # type: ignore - # else: - # self.jh = logging.FileHandler(str(log_file_path), mode=mode) - except (IOError, OSError, ValueError) as ee: warnings.warn( "log file {0!r} could not be opened for " @@ -452,7 +436,7 @@ def start_file_logger( ) else: - #json_fmt = json.dumps('%(timestamp)s %(level)s %(name)s %(message)s') + # set the correct file or json formatter formatter = CustomJsonFormatter() if as_json else FileFormatter() self.fh.setFormatter(formatter) self.addHandler(self.fh) @@ -463,6 +447,7 @@ def start_file_logger( self.log_filename = log_file_path + # set json file handler and formatter if with_json and self.jh: self.jh.setFormatter(CustomJsonFormatter()) self.addHandler(self.jh) From c922cb57a2f50263cd56c8d3fef14be039e06b35 Mon Sep 17 00:00:00 2001 From: Brian Cherinka Date: Sun, 12 Nov 2023 13:53:11 -0500 Subject: [PATCH 5/7] updating docstring --- src/sdsstools/logger.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/src/sdsstools/logger.py b/src/sdsstools/logger.py index 455713f..7d341fd 100644 --- a/src/sdsstools/logger.py +++ b/src/sdsstools/logger.py @@ -387,6 +387,10 @@ def start_file_logger( at_time The time of day when rollover occurs, when rollover is set to occur at “midnight” or “on a particular weekday”. + as_json + If `True`, outputs a JSON log instead of a human log + with_json + If `True`, outputs both a JSON log and a human log """ # suffix = pathlib.Path(path).suffix From f77176283cad0140d4ca7a42899a511df5ebdfcb Mon Sep 17 00:00:00 2001 From: Brian Cherinka Date: Sun, 12 Nov 2023 13:53:43 -0500 Subject: [PATCH 6/7] more cleanup --- src/sdsstools/logger.py | 4 ---- 1 file changed, 4 deletions(-) diff --git a/src/sdsstools/logger.py b/src/sdsstools/logger.py index 7d341fd..b9eadac 100644 --- a/src/sdsstools/logger.py +++ b/src/sdsstools/logger.py @@ -393,10 +393,6 @@ def start_file_logger( If `True`, outputs both a JSON log and a human log """ - # suffix = pathlib.Path(path).suffix - # if as_json and suffix != '.json': - # path = path.replace(suffix, '.json') - log_file_path = os.path.realpath(os.path.expanduser(path)) logdir = os.path.dirname(log_file_path) From dfec62624879b4eebe497e062581b64e6f03ecaa Mon Sep 17 00:00:00 2001 From: Brian Cherinka Date: Sun, 12 Nov 2023 13:56:08 -0500 Subject: [PATCH 7/7] linting --- src/sdsstools/logger.py | 63 +++++++++++++++++++++++++++-------------- 1 file changed, 42 insertions(+), 21 deletions(-) diff --git a/src/sdsstools/logger.py b/src/sdsstools/logger.py index b9eadac..e694c73 100644 --- a/src/sdsstools/logger.py +++ b/src/sdsstools/logger.py @@ -144,21 +144,26 @@ def format(self, record): return logging.Formatter.format(self, record_cp) + class CustomJsonFormatter(jsonlogger.JsonFormatter): - """ Custom `jsonlogger.JsonFormatter` for the JSON file handler """ + """Custom `jsonlogger.JsonFormatter` for the JSON file handler""" def add_fields(self, log_record, record, message_dict): - """ Add custom fields to the JSON body """ + """Add custom fields to the JSON body""" super().add_fields(log_record, record, message_dict) - if not log_record.get('timestamp'): - now = datetime.datetime.fromtimestamp(record.created).strftime('%Y-%m-%dT%H:%M:%S.%fZ') # noqa: E501 + if not log_record.get("timestamp"): + now = datetime.datetime.fromtimestamp(record.created).strftime( + "%Y-%m-%dT%H:%M:%S.%fZ" + ) # noqa: E501 log_record["timestamp"] = now log_record["type"] = "log" log_record["level"] = record.levelname log_record.update(record.__dict__) if record.exc_info: - log_record['error'] = {'type': record.exc_info[0].__name__, - 'trace': message_dict['exc_info']} + log_record["error"] = { + "type": record.exc_info[0].__name__, + "trace": message_dict["exc_info"], + } class CustomRichHandler(RichHandler): @@ -336,11 +341,16 @@ def save_log(self, path: str): assert self.json_log_filename, "JSON file logger not running." shutil.copyfile(self.json_log_filename, os.path.expanduser(path)) - def _set_file_handler(self, filepath: str, suffix: str, rotating: bool = True, - mode: str = 'a', - when: str = 'midnight', - utc: bool = True, - at_time: Union[str, datetime.time] = None): + def _set_file_handler( + self, + filepath: str, + suffix: str, + rotating: bool = True, + mode: str = "a", + when: str = "midnight", + utc: bool = True, + at_time: Union[str, datetime.time] = None, + ): if rotating: fh = TimedRotatingFileHandler( str(filepath), when=when, utc=utc, atTime=at_time @@ -400,10 +410,10 @@ def start_file_logger( # set the JSON file path name suffix = pathlib.Path(log_file_path).suffix - if as_json and suffix != '.json': - log_file_path = log_file_path.replace(suffix, '.json') + if as_json and suffix != ".json": + log_file_path = log_file_path.replace(suffix, ".json") elif with_json: - json_log_file_path = log_file_path.replace(suffix, '.json') + json_log_file_path = log_file_path.replace(suffix, ".json") try: if not os.path.exists(logdir): @@ -419,14 +429,26 @@ def start_file_logger( at_time = datetime.time.fromisoformat(at_time) # get the file handler - self.fh = self._set_file_handler(log_file_path, SUFFIX, mode=mode, - rotating=rotating, when=when, utc=utc, - at_time=at_time) + self.fh = self._set_file_handler( + log_file_path, + SUFFIX, + mode=mode, + rotating=rotating, + when=when, + utc=utc, + at_time=at_time, + ) if with_json: - self.jh = self._set_file_handler(json_log_file_path, SUFFIX, mode=mode, - rotating=rotating, when=when, utc=utc, - at_time=at_time) + self.jh = self._set_file_handler( + json_log_file_path, + SUFFIX, + mode=mode, + rotating=rotating, + when=when, + utc=utc, + at_time=at_time, + ) except (IOError, OSError, ValueError) as ee: warnings.warn( @@ -456,7 +478,6 @@ def start_file_logger( if self.warnings_logger: self.warnings_logger.addHandler(self.jh) - def handle(self, record): """Handles a record but first stores it."""