Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[Feature] Support using other file handlers #1188

Merged
merged 12 commits into from
Aug 30, 2023
43 changes: 35 additions & 8 deletions mmengine/logging/logger.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@
import warnings
from getpass import getuser
from logging import Logger, LogRecord
from logging.handlers import RotatingFileHandler, TimedRotatingFileHandler
from socket import gethostname
from typing import Optional, Union

Expand Down Expand Up @@ -170,6 +171,19 @@ class MMLogger(Logger, ManagerMixin):
log_level (str): The log level of the handler. Defaults to
'INFO'. If log level is 'DEBUG', distributed logs will be saved
during distributed training.
log_rotating (dict, optional): The cfg dict of file handler that
zhouzaida marked this conversation as resolved.
Show resolved Hide resolved
rotate log files at a certain point.If `log_rotating=None`,
a ``FileHandler`` that does not rotate log file to the logger.
The dict of log_rotating can only support the type keyword
with the value of `time` or `size`. And the remaining key pairs
are the parameters required by ``logging.handlers.RotatingFileHandler``
or ``logging.handlers.TimedRotatingFileHandler``. Like:
log_rotating = dict(
type='time',
when='MIDNIGHT',
interval=1,
backupCount=365)
zhouzaida marked this conversation as resolved.
Show resolved Hide resolved
Defaults to None.
file_mode (str): The file mode used to open log file. Defaults to 'w'.
distributed (bool): Whether to save distributed logs, Defaults to
false.
Expand All @@ -180,6 +194,7 @@ def __init__(self,
logger_name='mmengine',
log_file: Optional[str] = None,
log_level: Union[int, str] = 'INFO',
log_rotating: Optional[dict] = None,
file_mode: str = 'w',
distributed=False):
Logger.__init__(self, logger_name)
Expand Down Expand Up @@ -223,14 +238,26 @@ def __init__(self,
# Save multi-ranks logs if distributed is True. The logs of rank0
# will always be saved.
if global_rank == 0 or is_distributed:
# Here, the default behaviour of the official logger is 'a'.
# Thus, we provide an interface to change the file mode to
# the default behaviour. `FileHandler` is not supported to
# have colors, otherwise it will appear garbled.
file_handler = logging.FileHandler(log_file, file_mode)
# `StreamHandler` record year, month, day hour, minute,
# and second timestamp. file_handler will only record logs
# without color to avoid garbled code saved in files.
if log_rotating is not None:
assert 'type' in log_rotating, \
'You should specify the `type` of log_rotating.'
log_rotating_type = log_rotating.pop('type')
if log_rotating_type == 'time':
file_handler = TimedRotatingFileHandler(filename=log_file, **log_rotating)
elif log_rotating_type == 'size':
file_handler = RotatingFileHandler(filename=log_file, **log_rotating)
else:
raise ValueError('The type of log_rotating should be `time` or `size`, '
f'but got {log_rotating_type}.')
else:
# Here, the default behaviour of the official logger is 'a'.
# Thus, we provide an interface to change the file mode to
# the default behaviour. `FileHandler` is not supported to
# have colors, otherwise it will appear garbled.
file_handler = logging.FileHandler(log_file, file_mode)
# `StreamHandler` record year, month, day hour, minute,
# and second timestamp. file_handler will only record logs
# without color to avoid garbled code saved in files.
file_handler.setFormatter(
MMFormatter(color=False, datefmt='%Y/%m/%d %H:%M:%S'))
file_handler.setLevel(log_level)
Expand Down
2 changes: 1 addition & 1 deletion mmengine/model/base_model/data_preprocessor.py
Original file line number Diff line number Diff line change
Expand Up @@ -293,7 +293,7 @@ def forward(self, data: dict, training: bool = False) -> Union[dict, list]:
else:
raise TypeError('Output of `cast_data` should be a dict of '
'list/tuple with inputs and data_samples, '
f'but got {type(data)} {data}')
f'but got {type(data)}: {data}')
data['inputs'] = batch_inputs
data.setdefault('data_samples', None)
return data