Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Adds option for outputting a JSON log #42

Merged
merged 7 commits into from
Nov 16, 2023
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
7 changes: 5 additions & 2 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -53,9 +53,12 @@ log = get_logger(NAME)

You can get a logger using the `rich` `RichHandler` by passing `use_rich_handler=True`.

The file logger is disabled by default and can be started by calling `log.start_file_logger(path)`. By default a `TimedRotatingFileHandler` is created. If you want a normal `FileHandler` use `rotate=False`. The file mode defaults to `mode='a'` (append).
The file logger is disabled by default and can be started by calling `log.start_file_logger(path)`. By default a `TimedRotatingFileHandler` is created. If you want a normal `FileHandler` use `rotating=False`. The file mode defaults to `mode='a'` (append). The `TimedRotatingFileHandler` options `when`, `utc`, and `at_time` are available to
`log.start_file_logger` for controlling aspects of the rollover time.

The `SDSSLoger` instance also include an `asyncio_exception_handler` method that can be added to the asyncio event loop to handle exceptions; for example `loop.set_exception_handler(log.asyncio_exception_handler)`.
By default, the file logger is formatted to output a human-readble `log` file. To output a JSON log instead, set `as_json=True`, when calling `log.start_file_logger`. This will create a `.json` log file which can machine-read and more easily parsed for content. To output both a human-readable (`.log`) and JSON log (`.json`), set `with_json=True`.

The `SDSSLogger` instance also include an `asyncio_exception_handler` method that can be added to the asyncio event loop to handle exceptions; for example `loop.set_exception_handler(log.asyncio_exception_handler)`.

Console logging uses the standard `StreamHandler`. It's possible to use the `rich` library [RichHandler](https://rich.readthedocs.io/en/stable/logging.html) instead by
passing `use_rich_handler=True` to `get_logger()`. Additional keyword arguments to `RichHandler` can be passed as a `rich_handler_kwargs` dictionary. In the future the `rich` handler may become the default console logger.
Expand Down
25 changes: 23 additions & 2 deletions poetry.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

1 change: 1 addition & 0 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -26,6 +26,7 @@ packaging = ">=20.4"
daemonocle = "^1.0.2"
typing-extensions = "^4.6.3"
rich = "^13.4.2"
python-json-logger = "^2.0.7"

[tool.poetry.group.dev.dependencies]
ipython = [{version = ">=8.14.0", python = ">=3.9"},
Expand Down
102 changes: 94 additions & 8 deletions src/sdsstools/logger.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,7 @@
import datetime
import logging
import os
import pathlib
import re
import shutil
import sys
Expand All @@ -22,6 +23,7 @@
from pygments import highlight
from pygments.formatters import TerminalFormatter
from pygments.lexers import get_lexer_by_name
from pythonjsonlogger import jsonlogger
from rich.console import Console
from rich.logging import RichHandler
from rich.text import Text
Expand Down Expand Up @@ -143,6 +145,27 @@
return logging.Formatter.format(self, record_cp)


class CustomJsonFormatter(jsonlogger.JsonFormatter):
"""Custom `jsonlogger.JsonFormatter` for the JSON file handler"""

def add_fields(self, log_record, record, message_dict):
"""Add custom fields to the JSON body"""
super().add_fields(log_record, record, message_dict)
if not log_record.get("timestamp"):
now = datetime.datetime.fromtimestamp(record.created).strftime(
"%Y-%m-%dT%H:%M:%S.%fZ"
) # noqa: E501
log_record["timestamp"] = now
log_record["type"] = "log"
log_record["level"] = record.levelname
log_record.update(record.__dict__)
if record.exc_info:
log_record["error"] = {

Check warning on line 163 in src/sdsstools/logger.py

View check run for this annotation

Codecov / codecov/patch

src/sdsstools/logger.py#L163

Added line #L163 was not covered by tests
"type": record.exc_info[0].__name__,
"trace": message_dict["exc_info"],
}


class CustomRichHandler(RichHandler):
"""A slightly custom ``RichHandler`` logging handler."""

Expand Down Expand Up @@ -257,10 +280,13 @@
self.addHandler(self.sh)
self.sh.setLevel(log_level)

# Placeholders for the file handler.
# Placeholders for the file handlers.
self.fh: Union[logging.FileHandler, None] = None
self.log_filename: Union[str, None] = None

self.jh: Union[logging.FileHandler, None] = None
self.json_log_filename: Union[str, None] = None

# A header that precedes every message.
self.header = ""

Expand Down Expand Up @@ -311,6 +337,28 @@
def save_log(self, path: str):
assert self.log_filename, "File logger not running."
shutil.copyfile(self.log_filename, os.path.expanduser(path))
if self.json_log_filename:
assert self.json_log_filename, "JSON file logger not running."
shutil.copyfile(self.json_log_filename, os.path.expanduser(path))

Check warning on line 342 in src/sdsstools/logger.py

View check run for this annotation

Codecov / codecov/patch

src/sdsstools/logger.py#L341-L342

Added lines #L341 - L342 were not covered by tests

def _set_file_handler(
self,
filepath: str,
suffix: str,
rotating: bool = True,
mode: str = "a",
when: str = "midnight",
utc: bool = True,
at_time: Union[str, datetime.time] = None,
):
if rotating:
fh = TimedRotatingFileHandler(
str(filepath), when=when, utc=utc, atTime=at_time
)
fh.suffix = suffix # type: ignore
else:
fh = logging.FileHandler(str(filepath), mode=mode)
return fh

def start_file_logger(
self,
Expand All @@ -322,6 +370,8 @@
when: str = "midnight",
utc: bool = True,
at_time: Union[str, datetime.time] = None,
as_json: bool = False,
with_json: bool = False,
):
"""Start file logging.

Expand All @@ -347,13 +397,24 @@
at_time
The time of day when rollover occurs, when rollover is set to occur
at “midnight” or “on a particular weekday”.
as_json
If `True`, outputs a JSON log instead of a human log
with_json
If `True`, outputs both a JSON log and a human log
"""

log_file_path = os.path.realpath(os.path.expanduser(path))
logdir = os.path.dirname(log_file_path)

SUFFIX = "%Y-%m-%d_%H:%M:%S"

# set the JSON file path name
suffix = pathlib.Path(log_file_path).suffix
if as_json and suffix != ".json":
log_file_path = log_file_path.replace(suffix, ".json")
elif with_json:
json_log_file_path = log_file_path.replace(suffix, ".json")

try:
if not os.path.exists(logdir):
os.makedirs(logdir)
Expand All @@ -367,13 +428,27 @@
if at_time and isinstance(at_time, str):
at_time = datetime.time.fromisoformat(at_time)

if rotating:
self.fh = TimedRotatingFileHandler(
str(log_file_path), when=when, utc=utc, atTime=at_time
# get the file handler
self.fh = self._set_file_handler(
log_file_path,
SUFFIX,
mode=mode,
rotating=rotating,
when=when,
utc=utc,
at_time=at_time,
)

if with_json:
self.jh = self._set_file_handler(
json_log_file_path,
SUFFIX,
mode=mode,
rotating=rotating,
when=when,
utc=utc,
at_time=at_time,
)
self.fh.suffix = SUFFIX # type: ignore
else:
self.fh = logging.FileHandler(str(log_file_path), mode=mode)

except (IOError, OSError, ValueError) as ee:
warnings.warn(
Expand All @@ -383,7 +458,9 @@
)

else:
self.fh.setFormatter(FileFormatter())
# set the correct file or json formatter
formatter = CustomJsonFormatter() if as_json else FileFormatter()
self.fh.setFormatter(formatter)
self.addHandler(self.fh)
self.fh.setLevel(log_level)

Expand All @@ -392,6 +469,15 @@

self.log_filename = log_file_path

# set json file handler and formatter
if with_json and self.jh:
self.jh.setFormatter(CustomJsonFormatter())
self.addHandler(self.jh)
self.jh.setLevel(log_level)
self.json_log_filename = json_log_file_path
if self.warnings_logger:
self.warnings_logger.addHandler(self.jh)

def handle(self, record):
"""Handles a record but first stores it."""

Expand Down
32 changes: 32 additions & 0 deletions test/test_logger.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@
# @License: BSD 3-clause (http://www.opensource.org/licenses/BSD-3-Clause)

import asyncio
import json
import logging
import logging.handlers
import os
Expand Down Expand Up @@ -217,3 +218,34 @@ def test_catch_exception_rich_logger(tmp_path, mocker):

logger.handle_exceptions(excinfo.type, excinfo.value, excinfo.tb)
emit_mock.assert_called()


def test_as_json_logger(tmp_path):
log_file = tmp_path / "logs" / "test_log.log"

logger1 = get_logger(str(uuid.uuid4()))
logger1.start_file_logger(log_file, as_json=True)
logger1.info("test message")

files = list((tmp_path / "logs").glob("*"))
assert len(files) == 1
assert files[0].name.endswith(".json")
assert not files[0].name.endswith(".log")

with open(str(log_file).replace(".log", ".json")) as f:
data = [json.loads(i) for i in f.readlines()]
assert len(data) == 1
assert data[0]["message"] == "test message"


def test_with_json_logger(tmp_path):
log_file = tmp_path / "logs" / "test_log.log"

logger1 = get_logger(str(uuid.uuid4()))
logger1.start_file_logger(log_file, with_json=True)
logger1.info("test message")

files = list(sorted((tmp_path / "logs").glob("*")))
assert len(files) == 2
assert files[0].name.endswith(".json")
assert files[1].name.endswith(".log")