From 1e18cd2e76255bf2fc7c3427d80e012b1e663b5f Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E6=9D=8E=E5=A6=82=E5=A8=81?= Date: Thu, 11 Dec 2025 10:28:50 +0800 Subject: [PATCH] =?UTF-8?q?feat:=20=E6=97=A5=E5=BF=97=E6=A8=A1=E5=9D=97?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- requirements.txt | 3 +- scripts/run_test.sh | 1 + src/pipeline/config.py | 2 + src/pipeline/{llm/__init__.py => core/llm.py} | 5 +- src/pipeline/core/nodes.py | 78 +++++++++++++++++++ src/pipeline/core/utils.py | 22 ++++++ src/pipeline/nodes/__int__.py | 4 - src/pipeline/nodes/chunk_document_node.py | 32 -------- src/pipeline/nodes/embedding_document_node.py | 19 ----- src/pipeline/nodes/read_document_node.py | 32 -------- src/tests/test_nodes.py | 14 ++-- 11 files changed, 112 insertions(+), 100 deletions(-) rename src/pipeline/{llm/__init__.py => core/llm.py} (82%) create mode 100644 src/pipeline/core/nodes.py delete mode 100644 src/pipeline/nodes/__int__.py delete mode 100644 src/pipeline/nodes/chunk_document_node.py delete mode 100644 src/pipeline/nodes/embedding_document_node.py delete mode 100644 src/pipeline/nodes/read_document_node.py diff --git a/requirements.txt b/requirements.txt index 14e5dc7..2499b92 100644 --- a/requirements.txt +++ b/requirements.txt @@ -8,4 +8,5 @@ pymupdf python-docx scikit-learn aiofiles -pillow \ No newline at end of file +pillow +loguru \ No newline at end of file diff --git a/scripts/run_test.sh b/scripts/run_test.sh index 904a5f4..f3acf62 100644 --- a/scripts/run_test.sh +++ b/scripts/run_test.sh @@ -1,3 +1,4 @@ #!/usr/bin/env bash export $(cat .env | xargs) pytest -s -W ignore::DeprecationWarning src/tests/test_nodes.py + diff --git a/src/pipeline/config.py b/src/pipeline/config.py index 5853591..9f0685a 100644 --- a/src/pipeline/config.py +++ b/src/pipeline/config.py @@ -15,10 +15,12 @@ class Config(TypedDict): embedding_api_key: str embedding_api_host: str embedding_model: str + logger_level: str def _read_config() -> Config: return { + "logger_level": os.getenv("LOGGER_LEVEL", "DEBUG"), "host": os.getenv("HOST"), "port": int(os.getenv("PORT")), "version": os.getenv("VERSION"), diff --git a/src/pipeline/llm/__init__.py b/src/pipeline/core/llm.py similarity index 82% rename from src/pipeline/llm/__init__.py rename to src/pipeline/core/llm.py index 1c1965e..71c1aba 100644 --- a/src/pipeline/llm/__init__.py +++ b/src/pipeline/core/llm.py @@ -1,10 +1,9 @@ from src.pipeline.config import config import httpx -from urllib.parse import urljoin async def get_embedding(text, timeout: int = 30): try: - async with httpx.AsyncClient(timeout=timeout) as client: + async with httpx.AsyncClient(timeout=timeout, http2=False, trust_env=False) as client: url = config["embedding_api_host"] body = { "model": config["embedding_model"], @@ -14,8 +13,6 @@ async def get_embedding(text, timeout: int = 30): "Content-Type": "application/json", "Authorization": f"Bearer {config['embedding_api_key']}", } - print(url) - print(headers) res = await client.post(url, headers=headers, json=body) res.raise_for_status() data = res.json() diff --git a/src/pipeline/core/nodes.py b/src/pipeline/core/nodes.py new file mode 100644 index 0000000..ada12d4 --- /dev/null +++ b/src/pipeline/core/nodes.py @@ -0,0 +1,78 @@ +import uuid +from src.pipeline.core.pocket_flow import AsyncBatchNode +from src.pipeline.core.utils import fixed_size_chunk, load_document +from src.pipeline.core.llm import get_embedding +import re + + +class ChunkDocumentsNode(AsyncBatchNode): + async def prep_async(self, shared): + return shared["documents"] + + async def exec_async(self, document): + """ + :param document: {text, file_name} + """ + # print(f"document: {document}") + text = document["text"] + # 先将所有制表符等替换为一个空格 + text = re.sub(r"[ \t]+", " ", text) + # 再将多个空格替换为一个空格 + text = re.sub(r" +", " ", text) + # 去除首尾空格 + text = text.strip() + return [{"text": x, "file_name": document["file_name"], "uuid": uuid.uuid4().hex} for x in fixed_size_chunk(text, chunk_size=10000)] + + async def post_async(self, shared, prep_res, exec_res_list): + all_chunks = [] + for chunks in exec_res_list: + all_chunks.extend(chunks) + shared["documents"] = all_chunks + return "default" + + +class EmbeddingDocumentsNode(AsyncBatchNode): + async def prep_async(self, shared): + return shared["documents"] + + async def exec_async(self, document): + """ + :param document: {text, file_name} + """ + return {**document, "embedding": await get_embedding(document["text"])} + + async def post_async(self, shared, prep_res, exec_res_list): + + shared["documents"] = exec_res_list + + return "default" + + +class ReadDocumentNode(AsyncBatchNode): + async def prep_async(self, shared): + return shared["files"] + + async def exec_async(self, file_path): + try: + document_text = await load_document(file_path) + return { + "file_path": file_path, + "file_name": file_path.split("/")[-1], + "text": document_text, + "text_length": len(document_text), + "status": "done", + "message": "", + } + except Exception as e: + return { + "file_path": file_path, + "file_name": file_path.split("/")[-1], + "text": "", + "text_length": 0, + "status": "error", + "message": str(e), + } + + async def post_async(self, shared, prep_res, exec_res): + shared["documents"] = exec_res + return "default" diff --git a/src/pipeline/core/utils.py b/src/pipeline/core/utils.py index f54e79a..0cfce81 100644 --- a/src/pipeline/core/utils.py +++ b/src/pipeline/core/utils.py @@ -3,8 +3,30 @@ import docx import fitz # PyMuPDF import aiofiles import io +import os +import sys from pathlib import Path from PIL import Image +from loguru import logger +from src.pipeline.config import config + +# ----------------------------- +# 日志 +# ----------------------------- + +logger.remove() # 清除已有 handler(包括 pytest 导致的重复加载 +logger.add(sys.stdout, level=config["logger_level"], colorize=True) +logger.add( + "logs/pipeline.log", + level=config["logger_level"], + rotation="10 MB", # 自动分割 + retention="7 days", # 保留时间 + compression="zip", # 自动压缩 + enqueue=True, # 多线程安全 + colorize=False, # 文件不需要颜色 + backtrace=True, + diagnose=True, +) # ----------------------------- # 文本加载 diff --git a/src/pipeline/nodes/__int__.py b/src/pipeline/nodes/__int__.py deleted file mode 100644 index d1921c8..0000000 --- a/src/pipeline/nodes/__int__.py +++ /dev/null @@ -1,4 +0,0 @@ -from src.pipeline.nodes.chunk_document_node import ChunkDocumentsNode -from src.pipeline.nodes.read_document_node import ReadDocumentNode - -__all__ = ["ChunkDocumentsNode", "ReadDocumentNode"] diff --git a/src/pipeline/nodes/chunk_document_node.py b/src/pipeline/nodes/chunk_document_node.py deleted file mode 100644 index 774feb5..0000000 --- a/src/pipeline/nodes/chunk_document_node.py +++ /dev/null @@ -1,32 +0,0 @@ -import uuid -from src.pipeline.core.pocket_flow import AsyncBatchNode -from src.pipeline.core.utils import fixed_size_chunk -import re - - -class ChunkDocumentsNode(AsyncBatchNode): - async def prep_async(self, shared): - return shared["documents"] - - async def exec_async(self, document): - """ - :param document: {text, file_name} - """ - # print(f"document: {document}") - text = document["text"] - # 先将所有制表符等替换为一个空格 - text = re.sub(r"[ \t]+", " ", text) - # 再将多个空格替换为一个空格 - text = re.sub(r" +", " ", text) - # 去除首尾空格 - text = text.strip() - return [{"text": x, "file_name": document["file_name"], "uuid": uuid.uuid4().hex} for x in fixed_size_chunk(text)] - - async def post_async(self, shared, prep_res, exec_res_list): - all_chunks = [] - for chunks in exec_res_list: - all_chunks.extend(chunks) - - shared["documents"] = all_chunks - - return "default" diff --git a/src/pipeline/nodes/embedding_document_node.py b/src/pipeline/nodes/embedding_document_node.py deleted file mode 100644 index 472c2cf..0000000 --- a/src/pipeline/nodes/embedding_document_node.py +++ /dev/null @@ -1,19 +0,0 @@ -from src.pipeline.core.pocket_flow import AsyncBatchNode -from src.pipeline.llm import get_embedding - - -class EmbeddingDocumentsNode(AsyncBatchNode): - async def prep_async(self, shared): - return shared["documents"] - - async def exec_async(self, document): - """ - :param document: {text, file_name} - """ - return {**document, "embedding": await get_embedding(document["text"])} - - async def post_async(self, shared, prep_res, exec_res_list): - - shared["documents"] = exec_res_list - - return "default" diff --git a/src/pipeline/nodes/read_document_node.py b/src/pipeline/nodes/read_document_node.py deleted file mode 100644 index 50df9a5..0000000 --- a/src/pipeline/nodes/read_document_node.py +++ /dev/null @@ -1,32 +0,0 @@ -from src.pipeline.core.pocket_flow import AsyncBatchNode -from src.pipeline.core.utils import load_document - - -class ReadDocumentNode(AsyncBatchNode): - async def prep_async(self, shared): - return shared["files"] - - async def exec_async(self, file_path): - try: - document_text = await load_document(file_path) - return { - "file_path": file_path, - "file_name": file_path.split("/")[-1], - "text": document_text, - "text_length": len(document_text), - "status": "done", - "message": "", - } - except Exception as e: - return { - "file_path": file_path, - "file_name": file_path.split("/")[-1], - "text": "", - "text_length": 0, - "status": "error", - "message": str(e), - } - - async def post_async(self, shared, prep_res, exec_res): - shared["documents"] = exec_res - return "default" diff --git a/src/tests/test_nodes.py b/src/tests/test_nodes.py index 59707d9..b2af5be 100644 --- a/src/tests/test_nodes.py +++ b/src/tests/test_nodes.py @@ -1,10 +1,8 @@ import pytest import json -from src.pipeline.nodes.read_document_node import ReadDocumentNode -from src.pipeline.nodes.chunk_document_node import ChunkDocumentsNode -from src.pipeline.nodes.embedding_document_node import EmbeddingDocumentsNode -# from src.pipeline.nodes import ReadDocumentNode, ChunkDocumentsNode +from src.pipeline.core.nodes import ReadDocumentNode, ChunkDocumentsNode, EmbeddingDocumentsNode from src.pipeline.core.pocket_flow import AsyncFlow +from src.pipeline.core.utils import logger @pytest.mark.asyncio @@ -15,9 +13,9 @@ async def test_embedding(): shared = { "files": [ "./files/带图片的.pdf", - "./files/大白智问-API接入文档-V1.2.2.pdf", - "./files/我来帮您创建一个美观简洁的微信小程序订单详情页面。首先让我了解一下当前的项目结构.md", - "./files/deepsearch状态.txt", + # "./files/大白智问-API接入文档-V1.2.2.pdf", + # "./files/我来帮您创建一个美观简洁的微信小程序订单详情页面。首先让我了解一下当前的项目结构.md", + # "./files/deepsearch状态.txt", ] } @@ -29,4 +27,4 @@ async def test_embedding(): await flow.run_async(shared) - print(json.dumps(shared["documents"], indent=4, ensure_ascii=False)) + logger.debug(json.dumps([{**x, "embedding":x["embedding"][:4]} for x in shared["documents"]], indent=4, ensure_ascii=False))