feat: 日志模块
This commit is contained in:
parent
9686f24b60
commit
1e18cd2e76
|
|
@ -8,4 +8,5 @@ pymupdf
|
|||
python-docx
|
||||
scikit-learn
|
||||
aiofiles
|
||||
pillow
|
||||
pillow
|
||||
loguru
|
||||
|
|
@ -1,3 +1,4 @@
|
|||
#!/usr/bin/env bash
|
||||
export $(cat .env | xargs)
|
||||
pytest -s -W ignore::DeprecationWarning src/tests/test_nodes.py
|
||||
|
||||
|
|
|
|||
|
|
@ -15,10 +15,12 @@ class Config(TypedDict):
|
|||
embedding_api_key: str
|
||||
embedding_api_host: str
|
||||
embedding_model: str
|
||||
logger_level: str
|
||||
|
||||
|
||||
def _read_config() -> Config:
|
||||
return {
|
||||
"logger_level": os.getenv("LOGGER_LEVEL", "DEBUG"),
|
||||
"host": os.getenv("HOST"),
|
||||
"port": int(os.getenv("PORT")),
|
||||
"version": os.getenv("VERSION"),
|
||||
|
|
|
|||
|
|
@ -1,10 +1,9 @@
|
|||
from src.pipeline.config import config
|
||||
import httpx
|
||||
from urllib.parse import urljoin
|
||||
|
||||
async def get_embedding(text, timeout: int = 30):
|
||||
try:
|
||||
async with httpx.AsyncClient(timeout=timeout) as client:
|
||||
async with httpx.AsyncClient(timeout=timeout, http2=False, trust_env=False) as client:
|
||||
url = config["embedding_api_host"]
|
||||
body = {
|
||||
"model": config["embedding_model"],
|
||||
|
|
@ -14,8 +13,6 @@ async def get_embedding(text, timeout: int = 30):
|
|||
"Content-Type": "application/json",
|
||||
"Authorization": f"Bearer {config['embedding_api_key']}",
|
||||
}
|
||||
print(url)
|
||||
print(headers)
|
||||
res = await client.post(url, headers=headers, json=body)
|
||||
res.raise_for_status()
|
||||
data = res.json()
|
||||
|
|
@ -0,0 +1,78 @@
|
|||
import uuid
|
||||
from src.pipeline.core.pocket_flow import AsyncBatchNode
|
||||
from src.pipeline.core.utils import fixed_size_chunk, load_document
|
||||
from src.pipeline.core.llm import get_embedding
|
||||
import re
|
||||
|
||||
|
||||
class ChunkDocumentsNode(AsyncBatchNode):
|
||||
async def prep_async(self, shared):
|
||||
return shared["documents"]
|
||||
|
||||
async def exec_async(self, document):
|
||||
"""
|
||||
:param document: {text, file_name}
|
||||
"""
|
||||
# print(f"document: {document}")
|
||||
text = document["text"]
|
||||
# 先将所有制表符等替换为一个空格
|
||||
text = re.sub(r"[ \t]+", " ", text)
|
||||
# 再将多个空格替换为一个空格
|
||||
text = re.sub(r" +", " ", text)
|
||||
# 去除首尾空格
|
||||
text = text.strip()
|
||||
return [{"text": x, "file_name": document["file_name"], "uuid": uuid.uuid4().hex} for x in fixed_size_chunk(text, chunk_size=10000)]
|
||||
|
||||
async def post_async(self, shared, prep_res, exec_res_list):
|
||||
all_chunks = []
|
||||
for chunks in exec_res_list:
|
||||
all_chunks.extend(chunks)
|
||||
shared["documents"] = all_chunks
|
||||
return "default"
|
||||
|
||||
|
||||
class EmbeddingDocumentsNode(AsyncBatchNode):
|
||||
async def prep_async(self, shared):
|
||||
return shared["documents"]
|
||||
|
||||
async def exec_async(self, document):
|
||||
"""
|
||||
:param document: {text, file_name}
|
||||
"""
|
||||
return {**document, "embedding": await get_embedding(document["text"])}
|
||||
|
||||
async def post_async(self, shared, prep_res, exec_res_list):
|
||||
|
||||
shared["documents"] = exec_res_list
|
||||
|
||||
return "default"
|
||||
|
||||
|
||||
class ReadDocumentNode(AsyncBatchNode):
|
||||
async def prep_async(self, shared):
|
||||
return shared["files"]
|
||||
|
||||
async def exec_async(self, file_path):
|
||||
try:
|
||||
document_text = await load_document(file_path)
|
||||
return {
|
||||
"file_path": file_path,
|
||||
"file_name": file_path.split("/")[-1],
|
||||
"text": document_text,
|
||||
"text_length": len(document_text),
|
||||
"status": "done",
|
||||
"message": "",
|
||||
}
|
||||
except Exception as e:
|
||||
return {
|
||||
"file_path": file_path,
|
||||
"file_name": file_path.split("/")[-1],
|
||||
"text": "",
|
||||
"text_length": 0,
|
||||
"status": "error",
|
||||
"message": str(e),
|
||||
}
|
||||
|
||||
async def post_async(self, shared, prep_res, exec_res):
|
||||
shared["documents"] = exec_res
|
||||
return "default"
|
||||
|
|
@ -3,8 +3,30 @@ import docx
|
|||
import fitz # PyMuPDF
|
||||
import aiofiles
|
||||
import io
|
||||
import os
|
||||
import sys
|
||||
from pathlib import Path
|
||||
from PIL import Image
|
||||
from loguru import logger
|
||||
from src.pipeline.config import config
|
||||
|
||||
# -----------------------------
|
||||
# 日志
|
||||
# -----------------------------
|
||||
|
||||
logger.remove() # 清除已有 handler(包括 pytest 导致的重复加载
|
||||
logger.add(sys.stdout, level=config["logger_level"], colorize=True)
|
||||
logger.add(
|
||||
"logs/pipeline.log",
|
||||
level=config["logger_level"],
|
||||
rotation="10 MB", # 自动分割
|
||||
retention="7 days", # 保留时间
|
||||
compression="zip", # 自动压缩
|
||||
enqueue=True, # 多线程安全
|
||||
colorize=False, # 文件不需要颜色
|
||||
backtrace=True,
|
||||
diagnose=True,
|
||||
)
|
||||
|
||||
# -----------------------------
|
||||
# 文本加载
|
||||
|
|
|
|||
|
|
@ -1,4 +0,0 @@
|
|||
from src.pipeline.nodes.chunk_document_node import ChunkDocumentsNode
|
||||
from src.pipeline.nodes.read_document_node import ReadDocumentNode
|
||||
|
||||
__all__ = ["ChunkDocumentsNode", "ReadDocumentNode"]
|
||||
|
|
@ -1,32 +0,0 @@
|
|||
import uuid
|
||||
from src.pipeline.core.pocket_flow import AsyncBatchNode
|
||||
from src.pipeline.core.utils import fixed_size_chunk
|
||||
import re
|
||||
|
||||
|
||||
class ChunkDocumentsNode(AsyncBatchNode):
|
||||
async def prep_async(self, shared):
|
||||
return shared["documents"]
|
||||
|
||||
async def exec_async(self, document):
|
||||
"""
|
||||
:param document: {text, file_name}
|
||||
"""
|
||||
# print(f"document: {document}")
|
||||
text = document["text"]
|
||||
# 先将所有制表符等替换为一个空格
|
||||
text = re.sub(r"[ \t]+", " ", text)
|
||||
# 再将多个空格替换为一个空格
|
||||
text = re.sub(r" +", " ", text)
|
||||
# 去除首尾空格
|
||||
text = text.strip()
|
||||
return [{"text": x, "file_name": document["file_name"], "uuid": uuid.uuid4().hex} for x in fixed_size_chunk(text)]
|
||||
|
||||
async def post_async(self, shared, prep_res, exec_res_list):
|
||||
all_chunks = []
|
||||
for chunks in exec_res_list:
|
||||
all_chunks.extend(chunks)
|
||||
|
||||
shared["documents"] = all_chunks
|
||||
|
||||
return "default"
|
||||
|
|
@ -1,19 +0,0 @@
|
|||
from src.pipeline.core.pocket_flow import AsyncBatchNode
|
||||
from src.pipeline.llm import get_embedding
|
||||
|
||||
|
||||
class EmbeddingDocumentsNode(AsyncBatchNode):
|
||||
async def prep_async(self, shared):
|
||||
return shared["documents"]
|
||||
|
||||
async def exec_async(self, document):
|
||||
"""
|
||||
:param document: {text, file_name}
|
||||
"""
|
||||
return {**document, "embedding": await get_embedding(document["text"])}
|
||||
|
||||
async def post_async(self, shared, prep_res, exec_res_list):
|
||||
|
||||
shared["documents"] = exec_res_list
|
||||
|
||||
return "default"
|
||||
|
|
@ -1,32 +0,0 @@
|
|||
from src.pipeline.core.pocket_flow import AsyncBatchNode
|
||||
from src.pipeline.core.utils import load_document
|
||||
|
||||
|
||||
class ReadDocumentNode(AsyncBatchNode):
|
||||
async def prep_async(self, shared):
|
||||
return shared["files"]
|
||||
|
||||
async def exec_async(self, file_path):
|
||||
try:
|
||||
document_text = await load_document(file_path)
|
||||
return {
|
||||
"file_path": file_path,
|
||||
"file_name": file_path.split("/")[-1],
|
||||
"text": document_text,
|
||||
"text_length": len(document_text),
|
||||
"status": "done",
|
||||
"message": "",
|
||||
}
|
||||
except Exception as e:
|
||||
return {
|
||||
"file_path": file_path,
|
||||
"file_name": file_path.split("/")[-1],
|
||||
"text": "",
|
||||
"text_length": 0,
|
||||
"status": "error",
|
||||
"message": str(e),
|
||||
}
|
||||
|
||||
async def post_async(self, shared, prep_res, exec_res):
|
||||
shared["documents"] = exec_res
|
||||
return "default"
|
||||
|
|
@ -1,10 +1,8 @@
|
|||
import pytest
|
||||
import json
|
||||
from src.pipeline.nodes.read_document_node import ReadDocumentNode
|
||||
from src.pipeline.nodes.chunk_document_node import ChunkDocumentsNode
|
||||
from src.pipeline.nodes.embedding_document_node import EmbeddingDocumentsNode
|
||||
# from src.pipeline.nodes import ReadDocumentNode, ChunkDocumentsNode
|
||||
from src.pipeline.core.nodes import ReadDocumentNode, ChunkDocumentsNode, EmbeddingDocumentsNode
|
||||
from src.pipeline.core.pocket_flow import AsyncFlow
|
||||
from src.pipeline.core.utils import logger
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
|
|
@ -15,9 +13,9 @@ async def test_embedding():
|
|||
shared = {
|
||||
"files": [
|
||||
"./files/带图片的.pdf",
|
||||
"./files/大白智问-API接入文档-V1.2.2.pdf",
|
||||
"./files/我来帮您创建一个美观简洁的微信小程序订单详情页面。首先让我了解一下当前的项目结构.md",
|
||||
"./files/deepsearch状态.txt",
|
||||
# "./files/大白智问-API接入文档-V1.2.2.pdf",
|
||||
# "./files/我来帮您创建一个美观简洁的微信小程序订单详情页面。首先让我了解一下当前的项目结构.md",
|
||||
# "./files/deepsearch状态.txt",
|
||||
]
|
||||
}
|
||||
|
||||
|
|
@ -29,4 +27,4 @@ async def test_embedding():
|
|||
|
||||
await flow.run_async(shared)
|
||||
|
||||
print(json.dumps(shared["documents"], indent=4, ensure_ascii=False))
|
||||
logger.debug(json.dumps([{**x, "embedding":x["embedding"][:4]} for x in shared["documents"]], indent=4, ensure_ascii=False))
|
||||
|
|
|
|||
Loading…
Reference in New Issue