🐛 新增日志配置, 修复日志相关的BUG (#131)

This commit is contained in:
KimigaiiWuyi 2025-06-16 18:43:03 +08:00
parent a47a30c724
commit e52307f5b5
9 changed files with 52 additions and 33 deletions

View File

@ -137,7 +137,7 @@ class _Bot:
msg_id = sp_msg_id
for mr in message_result:
logger.trace(f'[GsCore][即将发送消息] {mr}')
logger.trace('[GsCore][即将发送消息]', messages=mr)
if at_sender and sender_id:
if at_sender_pos == '消息最后':
mr.append(MessageSegment.at(sender_id))

View File

@ -18,6 +18,7 @@ CONFIG_DEFAULT = {
'log': {
'level': 'INFO',
'output': ['stdout', 'stderr', 'file'],
'module': False,
# ...
},
'enable_empty_start': True,

View File

@ -16,9 +16,11 @@ from gsuid_core.models import Event, Message, MessageReceive
from gsuid_core.utils.database.models import CoreUser, CoreGroup, Subscribe
from gsuid_core.utils.plugins_config.gs_config import (
sp_config,
log_config,
core_plugins_config,
)
show_receive: bool = log_config.get_config('ShowReceive').data
command_start = core_config.get_config('command_start')
enable_empty = core_config.get_config('enable_empty_start')
config_masters = core_config.get_config('masters')
@ -38,7 +40,8 @@ async def handle_event(ws: _Bot, msg: MessageReceive, is_http: bool = False):
# 获取用户权限,越小越高
msg.user_pm = user_pm = await get_user_pml(msg)
event = await msg_process(msg)
logger.info('[收到事件]', event_payload=event)
if show_receive:
logger.info('[收到事件]', event_payload=event)
if event.user_pm == 0:
if not await Subscribe.data_exist(

View File

@ -16,13 +16,13 @@ from colorama import Fore, Style, init
from structlog.types import EventDict, Processor, WrappedLogger
from structlog.processors import CallsiteParameter, CallsiteParameterAdder
from gsuid_core.models import Event
from gsuid_core.config import core_config
from gsuid_core.models import Event, Message
from gsuid_core.data_store import get_res_path
log_history: List[EventDict] = []
LOG_PATH = get_res_path() / 'logs'
IS_DEBUG_LOG = False
IS_DEBUG_LOG: bool = False
class DailyNamedFileHandler(TimedRotatingFileHandler):
@ -30,7 +30,7 @@ class DailyNamedFileHandler(TimedRotatingFileHandler):
一个会自动使用 YYYY-MM-DD.log 作为文件名的日志处理器
"""
def __init__(self, log_dir, backupCount=7, encoding='utf-8'):
def __init__(self, log_dir, backupCount=0, encoding='utf-8'):
self.log_dir = Path(log_dir)
self.log_dir.mkdir(parents=True, exist_ok=True)
@ -60,28 +60,11 @@ class DailyNamedFileHandler(TimedRotatingFileHandler):
self.baseFilename = self._get_dated_filename()
self._cleanup_logs()
# self._cleanup_logs()
if not self.delay:
self.stream = self._open()
def _cleanup_logs(self):
"""根据 backupCount 清理旧的日志文件。"""
if self.backupCount > 0:
# backupCount + 1 是因为当前日志文件也计算在内
log_files = sorted(
self.log_dir.glob("*.log"),
key=lambda f: f.stat().st_mtime,
reverse=True,
)
for f in log_files[self.backupCount :]: # noqa: E203
try:
f.unlink()
except OSError:
# 在并发环境下,文件可能已被其他进程删除
pass
class TraceCapableLogger(Protocol):
def trace(self, event: Any, *args: Any, **kwargs: Any) -> None: ...
@ -127,6 +110,13 @@ def format_callsite_processor(
return event_dict
def reduce_message(messages: List[Message]):
for message in messages:
if message.data and len(message.data) >= 500:
message.data = message.data[:100]
return messages
def format_event_for_console(
logger: WrappedLogger, method_name: str, event_dict: EventDict
) -> EventDict:
@ -142,10 +132,14 @@ def format_event_for_console(
f'group_id={event.group_id}, '
f'user_id={event.user_id}, '
f'user_pm={event.user_pm}, '
f'content={event.content}, '
f'content={reduce_message(event.content)}, '
)
event_dict.pop("event_payload")
messages: Optional[List[Message]] = event_dict.get("messages")
if isinstance(messages, List):
event_dict['messages'] = reduce_message(messages)
return event_dict
@ -322,7 +316,7 @@ def setup_logging():
# 关键:使用 TimedRotatingFileHandler 实现每日轮转
file_handler = DailyNamedFileHandler(
log_dir=LOG_PATH,
backupCount=7,
backupCount=0,
encoding='utf-8',
)
file_handler.setLevel(LEVEL)

View File

@ -4,9 +4,9 @@ from pathlib import Path
from shutil import copyfile
from gsuid_core.logger import LOG_PATH, logger
from gsuid_core.utils.plugins_config.gs_config import core_plugins_config
from gsuid_core.utils.plugins_config.gs_config import log_config
CLEAN_DAY: str = core_plugins_config.get_config('ScheduledCleanLogDay').data
CLEAN_DAY: str = log_config.get_config('ScheduledCleanLogDay').data
def clean_log():

View File

@ -178,11 +178,6 @@ CONIFG_DEFAULT: Dict[str, GSC] = {
['38890', '28541', '28542'],
['38890', '28541', '28542'],
),
'ScheduledCleanLogDay': GsStrConfig(
'定时清理几天外的日志',
'定时清理几天外的日志',
'8',
),
'EnableForwardMessage': GsStrConfig(
'是否允许发送合并转发',
'可选循环发送、合并消息、合并转发、禁止',

View File

@ -10,6 +10,7 @@ from gsuid_core.logger import logger
from gsuid_core.data_store import get_res_path
from .sp_config import SP_CONIFG
from .log_config import LOG_CONFIG
from .status_config import STATUS_CONIFG
from .config_default import CONIFG_DEFAULT
from .pic_gen_config import PIC_GEN_CONIFG
@ -225,6 +226,12 @@ send_pic_config = StringConfig(
SEND_PIC_CONIFG,
)
log_config = StringConfig(
'GsCore日志配置',
RES / 'log_config.json',
LOG_CONFIG,
)
pic_gen_config = StringConfig(
'GsCore图片生成',
RES / 'pic_gen_config.json',

View File

@ -0,0 +1,16 @@
from typing import Dict
from .models import GSC, GsStrConfig, GsBoolConfig
LOG_CONFIG: Dict[str, GSC] = {
'ScheduledCleanLogDay': GsStrConfig(
'定时清理几天外的日志',
'定时清理几天外的日志',
'8',
),
'ShowReceive': GsBoolConfig(
'显示用户普通消息',
'关闭该选项将导致log只记录命令触发',
True,
),
}

View File

@ -77,6 +77,7 @@ HTML = """
.log-level.WARN { background-color: #FFC107; color: #333; }
.log-level.WARNING { background-color: #FFC107; color: #333; }
.log-level.ERROR { background-color: #F44336; }
.log-level.TRACE { background-color: #673AB7; }
.log-level.CRITICAL { background-color: #F44336; }
.log-level.EXCEPTION { background-color: #F44336; }
@ -188,15 +189,17 @@ ON_MOUNT_SSE = """
}
// ======================= 修改结束 =======================
logEntry.innerHTML = `
<span class="log-time">${time}</span>
<span class="log-level ${level}">${level}</span>
<span class="log-content">${messageHtml}</span>
`;
const isScrolledToBottom = logContainer.scrollHeight - logContainer.clientHeight <= logContainer.scrollTop + 5;
logContainer.appendChild(logEntry);
const isScrolledToBottom = logContainer.scrollHeight - logContainer.clientHeight <= logContainer.scrollTop + 5;
if (isScrolledToBottom) {
logContainer.scrollTop = logContainer.scrollHeight;
}