feat: 日志模块

This commit is contained in:
李如威 2025-07-08 23:55:10 +08:00
parent 11a74ea763
commit 72c05def57
6 changed files with 303 additions and 41 deletions

View File

@ -16,4 +16,9 @@ PORT=8000
# 上传配置
UPLOAD_DIR=./uploads
MAX_FILE_SIZE=10485760 # 10MB
# 10MB
MAX_FILE_SIZE=10485760
# 日志配置
LOG_LEVEL=INFO
LOG_DIR=./logs

View File

@ -28,12 +28,20 @@ class Config:
UPLOAD_DIR = os.getenv("UPLOAD_DIR", "./uploads")
MAX_FILE_SIZE = int(os.getenv("MAX_FILE_SIZE", 10485760)) # 10MB
# 日志配置
LOG_LEVEL = os.getenv("LOG_LEVEL", "INFO")
LOG_DIR = os.getenv("LOG_DIR", "./logs")
@classmethod
def validate(cls):
"""验证配置"""
if not cls.OPENAI_API_KEY:
raise ValueError("OPENAI_API_KEY 环境变量未设置")
# 创建必要的目录
os.makedirs(cls.UPLOAD_DIR, exist_ok=True)
os.makedirs(cls.LOG_DIR, exist_ok=True)
os.makedirs(cls.CHROMA_PERSIST_DIRECTORY, exist_ok=True)
# 创建配置实例
config = Config()

67
main.py
View File

@ -7,6 +7,7 @@ from typing import List
import shutil
from io import BytesIO
import json
import time
from config import config
from models import (
@ -25,6 +26,16 @@ from utils import (
ensure_directory_exists,
is_supported_file_type,
)
from utils.logger import setup_logger, get_logger, cleanup_logger
# 初始化日志系统
setup_logger(
name=config.APP_NAME,
level=config.LOG_LEVEL,
log_dir=config.LOG_DIR,
use_async=True # 使用异步日志,避免阻塞
)
logger = get_logger(__name__)
# 创建FastAPI应用
app = FastAPI(
@ -48,7 +59,9 @@ app.add_middleware(
ensure_directory_exists(config.UPLOAD_DIR)
# 创建RAG服务实例
logger.info(f"正在启动 {config.APP_NAME} v{config.APP_VERSION}")
rag_service = AsyncRAGService()
logger.info("RAG服务实例创建完成")
def get_rag_service() -> AsyncRAGService:
@ -59,6 +72,7 @@ def get_rag_service() -> AsyncRAGService:
@app.get("/", response_model=dict)
async def root():
"""根路径 - 服务健康检查"""
logger.info("根路径访问")
return {
"message": f"欢迎使用 {config.APP_NAME}",
"version": config.APP_VERSION,
@ -69,6 +83,7 @@ async def root():
@app.get("/health")
async def health_check():
"""健康检查接口"""
logger.debug("健康检查请求")
return {"status": "healthy", "service": config.APP_NAME}
@ -77,9 +92,14 @@ async def upload_document(
file: UploadFile = File(...), service: AsyncRAGService = Depends(get_rag_service)
):
"""上传文档接口"""
start_time = time.time()
try:
logger.info(f"开始上传文档: {file.filename}")
# 验证文件类型
if not is_supported_file_type(file.filename):
logger.warning(f"不支持的文件类型: {file.filename}")
raise HTTPException(
status_code=400, detail="不支持的文件类型。目前支持PDF, TXT"
)
@ -87,6 +107,7 @@ async def upload_document(
# 验证文件大小
content = await file.read()
if not validate_file_size(len(content), config.MAX_FILE_SIZE):
logger.warning(f"文件过大: {file.filename}, 大小: {len(content)} bytes")
raise HTTPException(
status_code=400,
detail=f"文件过大。最大支持 {config.MAX_FILE_SIZE // 1024 // 1024}MB",
@ -99,6 +120,7 @@ async def upload_document(
text_content = content.decode("utf-8")
if not text_content.strip():
logger.warning(f"文件内容为空: {file.filename}")
raise HTTPException(status_code=400, detail="文件内容为空或无法提取文本")
# 添加到向量库
@ -109,6 +131,9 @@ async def upload_document(
with open(file_path, "wb") as f:
f.write(content)
duration = time.time() - start_time
logger.info(f"文档上传成功: {file.filename}, 文档ID: {doc_id}, 耗时: {duration:.2f}s")
return SuccessResponse(
message="文档上传成功",
data={
@ -121,6 +146,8 @@ async def upload_document(
except HTTPException:
raise
except Exception as e:
duration = time.time() - start_time
logger.error(f"文档上传失败: {file.filename}, 错误: {str(e)}, 耗时: {duration:.2f}s", exc_info=True)
raise HTTPException(status_code=500, detail=f"文档处理失败: {str(e)}")
@ -129,13 +156,20 @@ async def chat(
request: ChatRequest, service: AsyncRAGService = Depends(get_rag_service)
):
"""聊天问答接口"""
start_time = time.time()
try:
logger.info(f"开始处理问答: {request.question[:50]}...")
result = await service.chat_async(
question=request.question,
top_k=request.top_k,
temperature=request.temperature,
)
duration = time.time() - start_time
logger.info(f"问答处理完成, 耗时: {duration:.2f}s")
return ChatResponse(
answer=result["answer"],
sources=result["sources"],
@ -143,6 +177,8 @@ async def chat(
)
except Exception as e:
duration = time.time() - start_time
logger.error(f"问答处理失败: {str(e)}, 耗时: {duration:.2f}s", exc_info=True)
raise HTTPException(status_code=500, detail=f"问答处理失败: {str(e)}")
@ -151,6 +187,7 @@ async def chat_stream(
request: StreamChatRequest, service: AsyncRAGService = Depends(get_rag_service)
):
"""流式聊天问答接口"""
logger.info(f"开始处理流式问答: {request.question[:50]}...")
async def generate_stream():
try:
@ -164,6 +201,7 @@ async def chat_stream(
yield f"data: {chunk.model_dump_json()}\n\n"
except Exception as e:
logger.error(f"流式问答处理失败: {str(e)}", exc_info=True)
# 发生错误时发送错误信息
error_chunk = StreamChatChunk(
content=f"生成回答时发生错误: {str(e)}",
@ -189,7 +227,9 @@ async def chat_stream(
async def get_documents(service: AsyncRAGService = Depends(get_rag_service)):
"""获取文档列表接口"""
try:
logger.info("获取文档列表")
docs = await service.get_documents_async()
logger.info(f"获取到 {len(docs)} 个文档")
return [
DocumentInfo(
id=doc["id"],
@ -201,6 +241,7 @@ async def get_documents(service: AsyncRAGService = Depends(get_rag_service)):
for doc in docs
]
except Exception as e:
logger.error(f"获取文档列表失败: {str(e)}", exc_info=True)
raise HTTPException(status_code=500, detail=f"获取文档列表失败: {str(e)}")
@ -210,21 +251,26 @@ async def delete_document(
):
"""删除文档接口"""
try:
logger.info(f"删除文档: {doc_id}")
success = await service.delete_document_async(doc_id)
if not success:
logger.warning(f"文档不存在: {doc_id}")
raise HTTPException(status_code=404, detail="文档不存在")
logger.info(f"文档删除成功: {doc_id}")
return SuccessResponse(message="文档删除成功")
except HTTPException:
raise
except Exception as e:
logger.error(f"删除文档失败: {doc_id}, 错误: {str(e)}", exc_info=True)
raise HTTPException(status_code=500, detail=f"删除文档失败: {str(e)}")
@app.exception_handler(Exception)
async def global_exception_handler(request, exc):
"""全局异常处理器"""
logger.error(f"全局异常: {str(exc)}", exc_info=True)
return JSONResponse(
status_code=500,
content=ErrorResponse(
@ -237,15 +283,22 @@ if __name__ == "__main__":
# 验证配置
try:
config.validate()
logger.info("配置验证通过")
except ValueError as e:
logger.error(f"配置错误: {e}")
print(f"配置错误: {e}")
exit(1)
# 启动服务
uvicorn.run(
"main:app",
host=config.HOST,
port=config.PORT,
reload=config.DEBUG,
log_level="info",
)
logger.info(f"启动服务在 {config.HOST}:{config.PORT}")
try:
uvicorn.run(
"main:app",
host=config.HOST,
port=config.PORT,
reload=config.DEBUG,
log_level="info",
)
finally:
# 应用退出时清理日志资源
cleanup_logger()

View File

@ -3,6 +3,7 @@ import asyncio
from langchain_openai import ChatOpenAI
from langchain.prompts import PromptTemplate
from services.vector_store import AsyncVectorStore
from utils.logger import get_logger
import os
import time
@ -11,6 +12,7 @@ class AsyncRAGService:
"""异步 RAG 服务主类"""
def __init__(self):
self.logger = get_logger(__name__)
self.vector_store = AsyncVectorStore()
self.llm = ChatOpenAI(
model="deepseek-r1:8b",
@ -31,10 +33,22 @@ class AsyncRAGService:
答案""",
)
self.logger.info("RAG服务初始化完成")
async def add_document_async(self, content: str, filename: str) -> str:
"""异步添加文档"""
return await self.vector_store.add_document_async(content, filename)
start_time = time.time()
try:
self.logger.info(f"开始添加文档: {filename}")
result = await self.vector_store.add_document_async(content, filename)
duration = time.time() - start_time
self.logger.info(f"文档添加成功: {filename}, 耗时: {duration:.2f}s")
return result
except Exception as e:
duration = time.time() - start_time
self.logger.error(f"文档添加失败: {filename}, 错误: {str(e)}, 耗时: {duration:.2f}s")
raise
async def chat_async(
self, question: str, top_k: int = 3, temperature: float = 0.7
@ -42,37 +56,50 @@ class AsyncRAGService:
"""异步聊天问答"""
start_time = time.time()
# 异步检索相关文档
search_results = await self.vector_store.search_async(question, top_k)
try:
self.logger.info(f"开始处理问答: {question[:50]}...")
# 异步检索相关文档
search_results = await self.vector_store.search_async(question, top_k)
self.logger.debug(f"检索到 {len(search_results)} 个相关文档")
if not search_results:
self.logger.warning("未找到相关文档")
return {
"answer": "抱歉,我无法在现有文档中找到相关信息来回答您的问题。",
"sources": [],
"processing_time": time.time() - start_time,
}
# 并行执行上下文构建和 LLM 调用准备
context_task = asyncio.create_task(self._build_context_async(search_results))
sources_task = asyncio.create_task(self._format_sources_async(search_results))
# 等待上下文构建完成
context = await context_task
# 异步生成回答
self.llm.temperature = temperature
prompt = self.prompt_template.format(context=context, question=question)
response = await asyncio.to_thread(self.llm.invoke, prompt)
# 等待源信息格式化完成
sources = await sources_task
duration = time.time() - start_time
self.logger.info(f"问答处理完成, 耗时: {duration:.2f}s")
if not search_results:
return {
"answer": "抱歉,我无法在现有文档中找到相关信息来回答您的问题。",
"sources": [],
"processing_time": time.time() - start_time,
"answer": response.content,
"sources": sources,
"processing_time": duration,
}
# 并行执行上下文构建和 LLM 调用准备
context_task = asyncio.create_task(self._build_context_async(search_results))
sources_task = asyncio.create_task(self._format_sources_async(search_results))
# 等待上下文构建完成
context = await context_task
# 异步生成回答
self.llm.temperature = temperature
prompt = self.prompt_template.format(context=context, question=question)
response = await asyncio.to_thread(self.llm.invoke, prompt)
# 等待源信息格式化完成
sources = await sources_task
return {
"answer": response.content,
"sources": sources,
"processing_time": time.time() - start_time,
}
except Exception as e:
duration = time.time() - start_time
self.logger.error(f"问答处理失败: {str(e)}, 耗时: {duration:.2f}s")
raise
async def chat_stream_async(
self, question: str, top_k: int = 3, temperature: float = 0.7
@ -129,19 +156,20 @@ class AsyncRAGService:
# 获取流式响应
stream = await asyncio.to_thread(self.llm.stream, prompt)
async for chunk in self._async_stream_wrapper(stream):
if hasattr(chunk, 'content') and chunk.content:
if hasattr(chunk, "content") and chunk.content:
yield chunk.content
except Exception as e:
yield f"生成回答时发生错误: {str(e)}"
async def _async_stream_wrapper(self, stream):
"""将同步流转换为异步流"""
def get_next_chunk(stream_iter):
try:
return next(stream_iter)
except StopIteration:
return None
stream_iter = iter(stream)
while True:
chunk = await asyncio.to_thread(get_next_chunk, stream_iter)
@ -189,4 +217,4 @@ class AsyncRAGService:
for result in search_results
]
return await asyncio.to_thread(_format_sources)
return await asyncio.to_thread(_format_sources)

View File

@ -1,7 +1,13 @@
import os
# 在导入 chromadb 之前设置环境变量
os.environ["CHROMA_TELEMETRY"] = "false"
os.environ["ANONYMIZED_TELEMETRY"] = "false"
import chromadb
from typing import List, Dict, Any
import asyncio
import chromadb
from chromadb.config import Settings
from sentence_transformers import SentenceTransformer
from langchain.text_splitter import RecursiveCharacterTextSplitter

162
utils/logger.py Normal file
View File

@ -0,0 +1,162 @@
import logging
import logging.handlers
import os
import sys
from datetime import datetime
from pathlib import Path
import queue
import threading
def setup_logger(
name: str = "RAG-Service",
level: str = "INFO",
log_dir: str = "./logs",
log_to_file: bool = True,
log_to_console: bool = True,
use_async: bool = True, # 新增:是否使用异步日志
max_file_size: int = 10 * 1024 * 1024, # 10MB
backup_count: int = 5
):
"""
设置全局日志配置
Args:
name: 应用名称
level: 日志级别
log_dir: 日志目录
log_to_file: 是否输出到文件
log_to_console: 是否输出到控制台
use_async: 是否使用异步日志推荐
max_file_size: 日志文件最大大小
backup_count: 备份文件数量
"""
# 创建日志目录
if log_to_file:
log_path = Path(log_dir)
log_path.mkdir(exist_ok=True)
# 配置日志格式
formatter = logging.Formatter(
fmt='%(asctime)s [%(levelname)s] %(name)s [PID:%(process)d] [%(filename)s:%(lineno)d] %(message)s',
datefmt='%Y-%m-%d %H:%M:%S'
)
# 获取根日志器
root_logger = logging.getLogger()
root_logger.setLevel(getattr(logging, level.upper()))
# 清除现有处理器
for handler in root_logger.handlers[:]:
root_logger.removeHandler(handler)
# 控制台处理器
if log_to_console:
console_handler = logging.StreamHandler(sys.stdout)
console_handler.setLevel(getattr(logging, level.upper()))
console_handler.setFormatter(formatter)
root_logger.addHandler(console_handler)
# 文件处理器
if log_to_file:
if use_async:
# 使用异步队列处理器,避免阻塞
# 普通日志文件
file_handler = logging.handlers.RotatingFileHandler(
log_path / f"{name.lower()}.log",
maxBytes=max_file_size,
backupCount=backup_count,
encoding='utf-8'
)
file_handler.setLevel(getattr(logging, level.upper()))
file_handler.setFormatter(formatter)
# 包装成异步处理器
async_file_handler = logging.handlers.QueueHandler(queue.Queue(-1))
async_file_handler.setLevel(getattr(logging, level.upper()))
# 创建队列监听器
queue_listener = logging.handlers.QueueListener(
async_file_handler.queue, file_handler, respect_handler_level=True
)
queue_listener.start()
root_logger.addHandler(async_file_handler)
# 错误日志文件
error_handler = logging.handlers.RotatingFileHandler(
log_path / f"{name.lower()}-error.log",
maxBytes=max_file_size,
backupCount=backup_count,
encoding='utf-8'
)
error_handler.setLevel(logging.ERROR)
error_handler.setFormatter(formatter)
# 包装成异步处理器
async_error_handler = logging.handlers.QueueHandler(queue.Queue(-1))
async_error_handler.setLevel(logging.ERROR)
# 创建队列监听器
error_queue_listener = logging.handlers.QueueListener(
async_error_handler.queue, error_handler, respect_handler_level=True
)
error_queue_listener.start()
root_logger.addHandler(async_error_handler)
# 保存监听器引用,防止被垃圾回收
root_logger._queue_listeners = [queue_listener, error_queue_listener]
else:
# 传统同步文件处理器
file_handler = logging.handlers.RotatingFileHandler(
log_path / f"{name.lower()}.log",
maxBytes=max_file_size,
backupCount=backup_count,
encoding='utf-8'
)
file_handler.setLevel(getattr(logging, level.upper()))
file_handler.setFormatter(formatter)
root_logger.addHandler(file_handler)
# 错误日志文件
error_handler = logging.handlers.RotatingFileHandler(
log_path / f"{name.lower()}-error.log",
maxBytes=max_file_size,
backupCount=backup_count,
encoding='utf-8'
)
error_handler.setLevel(logging.ERROR)
error_handler.setFormatter(formatter)
root_logger.addHandler(error_handler)
# 设置第三方库日志级别
logging.getLogger("httpx").setLevel(logging.WARNING)
logging.getLogger("chromadb").setLevel(logging.WARNING)
logging.getLogger("openai").setLevel(logging.WARNING)
logging.getLogger("langchain").setLevel(logging.WARNING)
logging.getLogger("uvicorn.access").setLevel(logging.WARNING)
return root_logger
def cleanup_logger():
"""清理日志资源"""
root_logger = logging.getLogger()
if hasattr(root_logger, '_queue_listeners'):
for listener in root_logger._queue_listeners:
listener.stop()
delattr(root_logger, '_queue_listeners')
def get_logger(name: str = None) -> logging.Logger:
"""获取日志器"""
if name:
return logging.getLogger(name)
return logging.getLogger()
# 全局日志器实例
logger = get_logger(__name__)