From 4e6e865177909933625a947f5dbc052b71357f26 Mon Sep 17 00:00:00 2001 From: Raiden Sakura Date: Sun, 21 Apr 2024 15:11:50 +0800 Subject: [PATCH] Implement #3305 --- core/config.py | 2 + core/config_help.json | 16 ++++++ core/models.py | 111 +++++++++++++++++++++++++++++++++++++++--- 3 files changed, 122 insertions(+), 7 deletions(-) diff --git a/core/config.py b/core/config.py index e70210cddf..626011bf03 100644 --- a/core/config.py +++ b/core/config.py @@ -178,6 +178,8 @@ class ConfigManager: "disable_updates": False, # Logging "log_level": "INFO", + "stream_log_format": "plain", + "file_log_format": "plain", "discord_log_level": "INFO", # data collection "data_collection": True, diff --git a/core/config_help.json b/core/config_help.json index 9e6adab51e..40da905951 100644 --- a/core/config_help.json +++ b/core/config_help.json @@ -1190,6 +1190,22 @@ ], "notes": [] }, + "stream_log_format": { + "default": "plain", + "description": "The logging format when through a stream, can be 'plain' or 'json'", + "examples": [], + "notes": [ + "This configuration can only to be set through `.env` file or environment (config) variables." + ] + }, + "file_log_format": { + "default": "plain", + "description": "The logging format when logging to a file, can be 'plain' or 'json'", + "examples": [], + "notes": [ + "This configuration can only to be set through `.env` file or environment (config) variables." + ] + }, "discord_log_level": { "default": "INFO", "description": "The `discord.py` library logging level for logging to stdout.", diff --git a/core/models.py b/core/models.py index 976abbbf81..2bcd0f859d 100644 --- a/core/models.py +++ b/core/models.py @@ -1,3 +1,4 @@ +import json import logging import os import re @@ -7,9 +8,9 @@ from logging import FileHandler, Handler, StreamHandler from logging.handlers import RotatingFileHandler from string import Formatter -from typing import Optional +from typing import Dict, Optional -import _string +import _string # type: ignore import discord from discord.ext import commands @@ -72,6 +73,70 @@ def line(self, level="info"): ) +class JsonFormatter(logging.Formatter): + """ + Formatter that outputs JSON strings after parsing the LogRecord. + Parameters + ---------- + fmt_dict : Optional[Dict[str, str]] + {key: logging format attribute} pairs. Defaults to {"message": "message"}. + time_format: str + time.strftime() format string. Default: "%Y-%m-%dT%H:%M:%S" + msec_format: str + Microsecond formatting. Appended at the end. Default: "%s.%03dZ" + """ + + def __init__( + self, + fmt_dict: Optional[Dict[str, str]] = None, + time_format: str = "%Y-%m-%dT%H:%M:%S", + msec_format: str = "%s.%03dZ", + ): + self.fmt_dict: Dict[str, str] = fmt_dict if fmt_dict is not None else {"message": "message"} + self.default_time_format: str = time_format + self.default_msec_format: str = msec_format + self.datefmt: Optional[str] = None + + def usesTime(self) -> bool: + """ + Overwritten to look for the attribute in the format dict values instead of the fmt string. + """ + return "asctime" in self.fmt_dict.values() + + def formatMessage(self, record) -> Dict[str, str]: + """ + Overwritten to return a dictionary of the relevant LogRecord attributes instead of a string. + KeyError is raised if an unknown attribute is provided in the fmt_dict. + """ + return {fmt_key: record.__dict__[fmt_val] for fmt_key, fmt_val in self.fmt_dict.items()} + + def format(self, record) -> str: + """ + Mostly the same as the parent's class method, the difference being that a dict is manipulated and dumped as JSON + instead of a string. + """ + record.message = record.getMessage() + + if self.usesTime(): + record.asctime = self.formatTime(record, self.datefmt) + + message_dict = self.formatMessage(record) + + if record.exc_info: + # Cache the traceback text to avoid converting it multiple times + # (it's constant anyway) + if not record.exc_text: + record.exc_text = self.formatException(record.exc_info) + + if record.exc_text: + message_dict["exc_info"] = record.exc_text + + if record.stack_info: + message_dict["stack_info"] = self.formatStack(record.stack_info) + + return json.dumps(message_dict, default=str) + + class FileFormatter(logging.Formatter): ansi_escape = re.compile(r"\x1B\[[0-?]*[ -/]*[@-~]") @@ -88,6 +153,19 @@ def format(self, record): datefmt="%Y-%m-%d %H:%M:%S", ) +json_formatter = JsonFormatter( + { + "level": "levelname", + "message": "message", + "loggerName": "name", + "processName": "processName", + "processID": "process", + "threadName": "threadName", + "threadID": "thread", + "timestamp": "asctime", + } +) + def create_log_handler( filename: Optional[str] = None, @@ -96,6 +174,7 @@ def create_log_handler( level: int = logging.DEBUG, mode: str = "a+", encoding: str = "utf-8", + format: str = "plain", maxBytes: int = 28000000, backupCount: int = 1, **kwargs, @@ -120,6 +199,9 @@ def create_log_handler( encoding : str If this keyword argument is specified along with filename, its value is used when the `FileHandler` is created, and thus used when opening the output file. Defaults to 'utf-8'. + format : str + The format to output with, can either be 'json' or 'plain'. Will apply to whichever handler is created, + based on other conditional logic. maxBytes : int The max file size before the rollover occurs. Defaults to 28000000 (28MB). Rollover occurs whenever the current log file is nearly `maxBytes` in length; but if either of `maxBytes` or `backupCount` is zero, @@ -136,17 +218,21 @@ def create_log_handler( if filename is None: handler = StreamHandler(stream=sys.stdout, **kwargs) - handler.setFormatter(log_stream_formatter) + formatter = log_stream_formatter elif not rotating: handler = FileHandler(filename, mode=mode, encoding=encoding, **kwargs) - handler.setFormatter(log_file_formatter) + formatter = log_file_formatter else: handler = RotatingFileHandler( filename, mode=mode, encoding=encoding, maxBytes=maxBytes, backupCount=backupCount, **kwargs ) - handler.setFormatter(log_file_formatter) + formatter = log_file_formatter + + if format == "json": + formatter = json_formatter handler.setLevel(level) + handler.setFormatter(formatter) return handler @@ -168,7 +254,11 @@ def getLogger(name=None) -> ModmailLogger: def configure_logging(bot) -> None: - global ch_debug, log_level + global ch_debug, log_level, ch + + stream_log_format, file_log_format = bot.config["stream_log_format"], bot.config["file_log_format"] + if stream_log_format == "json": + ch.setFormatter(json_formatter) logger = getLogger(__name__) level_text = bot.config["log_level"].upper() logging_levels = { @@ -192,8 +282,15 @@ def configure_logging(bot) -> None: logger.info("Log file: %s", bot.log_file_path) ch_debug = create_log_handler(bot.log_file_path, rotating=True) + + if file_log_format == "json": + ch_debug.setFormatter(json_formatter) + ch.setLevel(log_level) + logger.info("Stream log format: %s", stream_log_format) + logger.info("File log format: %s", file_log_format) + for log in loggers: log.setLevel(log_level) log.addHandler(ch_debug) @@ -210,7 +307,7 @@ def configure_logging(bot) -> None: d_logger.setLevel(d_level) non_verbose_log_level = max(d_level, logging.INFO) - stream_handler = create_log_handler(level=non_verbose_log_level) + stream_handler = create_log_handler(level=non_verbose_log_level, format=file_log_format) if non_verbose_log_level != d_level: logger.info("Discord logging level (stdout): %s.", logging.getLevelName(non_verbose_log_level)) logger.info("Discord logging level (logfile): %s.", logging.getLevelName(d_level))