Skip to content

Commit

Permalink
[优化]log console
Browse files Browse the repository at this point in the history
  • Loading branch information
longfengpili committed Dec 11, 2023
1 parent 46bbbe1 commit e5ce5e0
Show file tree
Hide file tree
Showing 4 changed files with 141 additions and 140 deletions.
2 changes: 1 addition & 1 deletion pydbapi/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@
# @Author: chunyang.xu
# @Date: 2023-06-02 15:27:41
# @Last Modified by: longfengpili
# @Last Modified time: 2023-11-02 15:01:53
# @Last Modified time: 2023-12-11 17:24:46
# @github: https://github.com/longfengpili


Expand Down
2 changes: 1 addition & 1 deletion pydbapi/api/trino.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@
# @Author: longfengpili
# @Date: 2023-06-02 15:27:41
# @Last Modified by: longfengpili
# @Last Modified time: 2023-12-04 11:46:32
# @Last Modified time: 2023-12-11 17:26:13
# @github: https://github.com/longfengpili


Expand Down
275 changes: 138 additions & 137 deletions pydbapi/conf/logconf.py
Original file line number Diff line number Diff line change
@@ -1,137 +1,138 @@
# -*- coding: utf-8 -*-
# @Author: longfengpili
# @Date: 2023-07-26 17:46:27
# @Last Modified by: longfengpili
# @Last Modified time: 2023-08-03 11:58:10
# @github: https://github.com/longfengpili


import re
import os
import sys
import colorlog

AUTO_RULES = ['test_xu'] # 可以自动执行表名(表名包含即可)
REDSHIFT_AUTO_RULES = AUTO_RULES + ['_data_aniland'] # Amazon Redshift 可以自动执行表名(表名包含即可)

# logging settings
LOG_BASE_PATH = os.path.join(os.path.expanduser('~'), 'pydbapilog') # 可以user目录下查看日志
PROJECT_NAME = re.sub(':?\\\\', '_', os.getcwd())
PROJECT_NAME = PROJECT_NAME[1:] if PROJECT_NAME.startswith('/') else PROJECT_NAME # linux

LOGGING_CONFIG = {
'version': 1, # 保留字
'disable_existing_loggers': False, # 禁用已经存在的logger实例
# 日志文件的格式
'formatters': {
# 详细的日志格式
'standard': {
'format': '%(asctime)s.%(msecs)03d - %(threadName)s:%(thread)d - %(name)s - %(levelname)s - %(pathname)s - %(lineno)d - %(message)s',
'datefmt': '%Y-%m-%d %H:%M:%S',
},
# 简单的日志格式
'simple': {
'format': '%(asctime)s.%(msecs)03d - %(threadName)s - %(name)s - %(levelname)s - %(filename)s - %(lineno)d - %(message)s',
'datefmt': '%Y-%m-%d %H:%M:%S',
},
# 定义一个特殊的日志格式
'collect': {
'format': '%(message)s'
},
# color
'color': {
'()': colorlog.ColoredFormatter,
'format': '%(asctime)s.%(msecs)03d - %(threadName)s - %(name)s - %(levelname)s - %(filename)s - %(lineno)d - %(log_color)s%(message)s',
'datefmt': '%Y-%m-%d %H:%M:%S',
'log_colors': {
'CRITICAL': 'bold_red',
'ERROR': 'red',
'WARNING': 'purple',
'INFO': 'green',
'DEBUG': 'yellow'
}
},
},
# 过滤器
'filters': {
},
# 处理器
'handlers': {
# 在终端打印
'console': {
'level': 'DEBUG',
'filters': [],
'class': 'logging.StreamHandler', #
'formatter': 'color' if sys.stdout.isatty() or any("jupyter" in arg for arg in sys.argv) else 'simple'
},
# 默认的
'default': {
'level': 'INFO',
'class': 'pydbapi.conf.MakeFileHandler', # 能够判断创建日持文件
'filename': os.path.join(LOG_BASE_PATH, f'{PROJECT_NAME}_default.log'), # 日志文件
'when': 'd', # 每天备份
'interval': 1,
'backupCount': 30, # 最多备份几个
'formatter': 'standard',
'encoding': 'utf-8',
},
'db': {
'level': 'INFO',
'class': 'pydbapi.conf.MakeFileHandler', # 保存到文件,自动切
'filename': os.path.join(LOG_BASE_PATH, f'{PROJECT_NAME}_db.log'), # 日志文件
'when': 'd', # 每小时备份
'interval': 1,
'backupCount': 30,
'formatter': 'simple',
'encoding': "utf-8"
},
'sql': {
'level': 'INFO',
'class': 'logging.handlers.TimedRotatingFileHandler', # 保存到文件,自动切
'filename': os.path.join(LOG_BASE_PATH, f'{PROJECT_NAME}_sql.log'), # 日志文件
'when': 'd', # 每小时备份
'interval': 1,
'backupCount': 30,
'formatter': 'simple',
'encoding': "utf-8"
},
},
'loggers': {
# 默认的logger应用如下配置
'': {
'handlers': ['console', 'default'],
'level': 'INFO',
'propagate': True, # 向不向更高级别的logger传递
},
'db': {
'handlers': ['console', 'db'],
'level': 'INFO',
'propagate': False, # 向不向更高级别的logger传递
},
'sql': {
'handlers': ['console', 'sql'],
'level': 'INFO',
'propagate': False, # 向不向更高级别的logger传递
},
'redshift': {
'handlers': ['console', 'db'],
'level': 'INFO',
'propagate': False, # 向不向更高级别的logger传递
},
'sqlite': {
'handlers': ['console', 'db'],
'level': 'INFO',
'propagate': False, # 向不向更高级别的logger传递
},
'mysql': {
'handlers': ['console', 'db'],
'level': 'INFO',
'propagate': False, # 向不向更高级别的logger传递
},
'snowflake': {
'handlers': ['console', 'db'],
'level': 'INFO',
'propagate': False, # 向不向更高级别的logger传递
},
},
}
# -*- coding: utf-8 -*-
# @Author: longfengpili
# @Date: 2023-07-26 17:46:27
# @Last Modified by: longfengpili
# @Last Modified time: 2023-12-11 17:30:43
# @github: https://github.com/longfengpili


import re
import os
import sys
import colorlog


AUTO_RULES = ['test_xu'] # 可以自动执行表名(表名包含即可)
REDSHIFT_AUTO_RULES = AUTO_RULES + ['_data_aniland'] # Amazon Redshift 可以自动执行表名(表名包含即可)

# logging settings
LOG_BASE_PATH = os.path.join(os.path.expanduser('~'), 'pydbapilog') # 可以user目录下查看日志
PROJECT_NAME = re.sub(':?\\\\', '_', os.getcwd())
PROJECT_NAME = PROJECT_NAME[1:] if PROJECT_NAME.startswith('/') else PROJECT_NAME # linux

LOGGING_CONFIG = {
'version': 1, # 保留字
'disable_existing_loggers': False, # 禁用已经存在的logger实例
# 日志文件的格式
'formatters': {
# 详细的日志格式
'standard': {
'format': '%(asctime)s.%(msecs)03d - %(threadName)s:%(thread)d - %(name)s - %(levelname)s - %(pathname)s - %(lineno)d - %(message)s',
'datefmt': '%Y-%m-%d %H:%M:%S',
},
# 简单的日志格式
'simple': {
'format': '%(asctime)s.%(msecs)03d - %(threadName)s - %(name)s - %(levelname)s - %(filename)s - %(lineno)d - %(message)s',
'datefmt': '%Y-%m-%d %H:%M:%S',
},
# 定义一个特殊的日志格式
'collect': {
'format': '%(message)s'
},
# color
'color': {
'()': colorlog.ColoredFormatter,
'format': '%(asctime)s.%(msecs)03d - %(threadName)s - %(name)s - %(levelname)s - %(filename)s - %(lineno)d - %(log_color)s%(message)s',
'datefmt': '%Y-%m-%d %H:%M:%S',
'log_colors': {
'CRITICAL': 'bold_red',
'ERROR': 'red',
'WARNING': 'purple',
'INFO': 'green',
'DEBUG': 'yellow'
}
},
},
# 过滤器
'filters': {
},
# 处理器
'handlers': {
# 在终端打印
'console': {
'level': 'DEBUG',
'filters': [],
'class': 'logging.StreamHandler', #
'formatter': 'color' if sys.stdout.isatty() or any("ipython" in arg for arg in sys.argv) else 'simple'
},
# 默认的
'default': {
'level': 'INFO',
'class': 'pydbapi.conf.MakeFileHandler', # 能够判断创建日持文件
'filename': os.path.join(LOG_BASE_PATH, f'{PROJECT_NAME}_default.log'), # 日志文件
'when': 'd', # 每天备份
'interval': 1,
'backupCount': 30, # 最多备份几个
'formatter': 'standard',
'encoding': 'utf-8',
},
'db': {
'level': 'INFO',
'class': 'pydbapi.conf.MakeFileHandler', # 保存到文件,自动切
'filename': os.path.join(LOG_BASE_PATH, f'{PROJECT_NAME}_db.log'), # 日志文件
'when': 'd', # 每小时备份
'interval': 1,
'backupCount': 30,
'formatter': 'simple',
'encoding': "utf-8"
},
'sql': {
'level': 'INFO',
'class': 'logging.handlers.TimedRotatingFileHandler', # 保存到文件,自动切
'filename': os.path.join(LOG_BASE_PATH, f'{PROJECT_NAME}_sql.log'), # 日志文件
'when': 'd', # 每小时备份
'interval': 1,
'backupCount': 30,
'formatter': 'simple',
'encoding': "utf-8"
},
},
'loggers': {
# 默认的logger应用如下配置
'': {
'handlers': ['console', 'default'],
'level': 'INFO',
'propagate': True, # 向不向更高级别的logger传递
},
'db': {
'handlers': ['console', 'db'],
'level': 'INFO',
'propagate': False, # 向不向更高级别的logger传递
},
'sql': {
'handlers': ['console', 'sql'],
'level': 'INFO',
'propagate': False, # 向不向更高级别的logger传递
},
'redshift': {
'handlers': ['console', 'db'],
'level': 'INFO',
'propagate': False, # 向不向更高级别的logger传递
},
'sqlite': {
'handlers': ['console', 'db'],
'level': 'INFO',
'propagate': False, # 向不向更高级别的logger传递
},
'mysql': {
'handlers': ['console', 'db'],
'level': 'INFO',
'propagate': False, # 向不向更高级别的logger传递
},
'snowflake': {
'handlers': ['console', 'db'],
'level': 'INFO',
'propagate': False, # 向不向更高级别的logger传递
},
},
}
2 changes: 1 addition & 1 deletion pydbapi/db/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@
# @Author: longfengpili
# @Date: 2023-06-02 15:27:41
# @Last Modified by: longfengpili
# @Last Modified time: 2023-12-11 17:00:46
# @Last Modified time: 2023-12-11 17:07:57
# @github: https://github.com/longfengpili


Expand Down

0 comments on commit e5ce5e0

Please sign in to comment.