163 lines
5.5 KiB
Python
163 lines
5.5 KiB
Python
import logging
|
|
import logging.handlers
|
|
import os
|
|
import sys
|
|
from datetime import datetime
|
|
from pathlib import Path
|
|
import queue
|
|
import threading
|
|
|
|
|
|
def setup_logger(
|
|
name: str = "RAG-Service",
|
|
level: str = "INFO",
|
|
log_dir: str = "./logs",
|
|
log_to_file: bool = True,
|
|
log_to_console: bool = True,
|
|
use_async: bool = True, # 新增:是否使用异步日志
|
|
max_file_size: int = 10 * 1024 * 1024, # 10MB
|
|
backup_count: int = 5
|
|
):
|
|
"""
|
|
设置全局日志配置
|
|
|
|
Args:
|
|
name: 应用名称
|
|
level: 日志级别
|
|
log_dir: 日志目录
|
|
log_to_file: 是否输出到文件
|
|
log_to_console: 是否输出到控制台
|
|
use_async: 是否使用异步日志(推荐)
|
|
max_file_size: 日志文件最大大小
|
|
backup_count: 备份文件数量
|
|
"""
|
|
|
|
# 创建日志目录
|
|
if log_to_file:
|
|
log_path = Path(log_dir)
|
|
log_path.mkdir(exist_ok=True)
|
|
|
|
# 配置日志格式
|
|
formatter = logging.Formatter(
|
|
fmt='%(asctime)s [%(levelname)s] %(name)s [PID:%(process)d] [%(filename)s:%(lineno)d] %(message)s',
|
|
datefmt='%Y-%m-%d %H:%M:%S'
|
|
)
|
|
|
|
# 获取根日志器
|
|
root_logger = logging.getLogger()
|
|
root_logger.setLevel(getattr(logging, level.upper()))
|
|
|
|
# 清除现有处理器
|
|
for handler in root_logger.handlers[:]:
|
|
root_logger.removeHandler(handler)
|
|
|
|
# 控制台处理器
|
|
if log_to_console:
|
|
console_handler = logging.StreamHandler(sys.stdout)
|
|
console_handler.setLevel(getattr(logging, level.upper()))
|
|
console_handler.setFormatter(formatter)
|
|
root_logger.addHandler(console_handler)
|
|
|
|
# 文件处理器
|
|
if log_to_file:
|
|
if use_async:
|
|
# 使用异步队列处理器,避免阻塞
|
|
# 普通日志文件
|
|
file_handler = logging.handlers.RotatingFileHandler(
|
|
log_path / f"{name.lower()}.log",
|
|
maxBytes=max_file_size,
|
|
backupCount=backup_count,
|
|
encoding='utf-8'
|
|
)
|
|
file_handler.setLevel(getattr(logging, level.upper()))
|
|
file_handler.setFormatter(formatter)
|
|
|
|
# 包装成异步处理器
|
|
async_file_handler = logging.handlers.QueueHandler(queue.Queue(-1))
|
|
async_file_handler.setLevel(getattr(logging, level.upper()))
|
|
|
|
# 创建队列监听器
|
|
queue_listener = logging.handlers.QueueListener(
|
|
async_file_handler.queue, file_handler, respect_handler_level=True
|
|
)
|
|
queue_listener.start()
|
|
|
|
root_logger.addHandler(async_file_handler)
|
|
|
|
# 错误日志文件
|
|
error_handler = logging.handlers.RotatingFileHandler(
|
|
log_path / f"{name.lower()}-error.log",
|
|
maxBytes=max_file_size,
|
|
backupCount=backup_count,
|
|
encoding='utf-8'
|
|
)
|
|
error_handler.setLevel(logging.ERROR)
|
|
error_handler.setFormatter(formatter)
|
|
|
|
# 包装成异步处理器
|
|
async_error_handler = logging.handlers.QueueHandler(queue.Queue(-1))
|
|
async_error_handler.setLevel(logging.ERROR)
|
|
|
|
# 创建队列监听器
|
|
error_queue_listener = logging.handlers.QueueListener(
|
|
async_error_handler.queue, error_handler, respect_handler_level=True
|
|
)
|
|
error_queue_listener.start()
|
|
|
|
root_logger.addHandler(async_error_handler)
|
|
|
|
# 保存监听器引用,防止被垃圾回收
|
|
root_logger._queue_listeners = [queue_listener, error_queue_listener]
|
|
|
|
else:
|
|
# 传统同步文件处理器
|
|
file_handler = logging.handlers.RotatingFileHandler(
|
|
log_path / f"{name.lower()}.log",
|
|
maxBytes=max_file_size,
|
|
backupCount=backup_count,
|
|
encoding='utf-8'
|
|
)
|
|
file_handler.setLevel(getattr(logging, level.upper()))
|
|
file_handler.setFormatter(formatter)
|
|
root_logger.addHandler(file_handler)
|
|
|
|
# 错误日志文件
|
|
error_handler = logging.handlers.RotatingFileHandler(
|
|
log_path / f"{name.lower()}-error.log",
|
|
maxBytes=max_file_size,
|
|
backupCount=backup_count,
|
|
encoding='utf-8'
|
|
)
|
|
error_handler.setLevel(logging.ERROR)
|
|
error_handler.setFormatter(formatter)
|
|
root_logger.addHandler(error_handler)
|
|
|
|
# 设置第三方库日志级别
|
|
logging.getLogger("httpx").setLevel(logging.WARNING)
|
|
logging.getLogger("chromadb").setLevel(logging.WARNING)
|
|
logging.getLogger("openai").setLevel(logging.WARNING)
|
|
logging.getLogger("langchain").setLevel(logging.WARNING)
|
|
logging.getLogger("uvicorn.access").setLevel(logging.WARNING)
|
|
|
|
return root_logger
|
|
|
|
|
|
def cleanup_logger():
|
|
"""清理日志资源"""
|
|
root_logger = logging.getLogger()
|
|
if hasattr(root_logger, '_queue_listeners'):
|
|
for listener in root_logger._queue_listeners:
|
|
listener.stop()
|
|
delattr(root_logger, '_queue_listeners')
|
|
|
|
|
|
def get_logger(name: str = None) -> logging.Logger:
|
|
"""获取日志器"""
|
|
if name:
|
|
return logging.getLogger(name)
|
|
return logging.getLogger()
|
|
|
|
|
|
# 全局日志器实例
|
|
logger = get_logger(__name__)
|