feat: embedding

This commit is contained in:
李如威 2025-12-10 18:34:11 +08:00
parent 8b1db6dd33
commit 9686f24b60
5 changed files with 59 additions and 15 deletions

View File

@ -12,6 +12,9 @@ class Config(TypedDict):
llm_api_key: str
llm_api_host: str
llm_model: str
embedding_api_key: str
embedding_api_host: str
embedding_model: str
def _read_config() -> Config:
@ -22,6 +25,9 @@ def _read_config() -> Config:
"llm_api_host": os.getenv("LLM_API_HOST"),
"llm_api_key": os.getenv("LLM_API_KEY"),
"llm_model": os.getenv("LLM_MODEL"),
"embedding_api_host": os.getenv("EMBEDDING_API_HOST"),
"embedding_api_key": os.getenv("EMBEDDING_API_KEY"),
"embedding_model": os.getenv("EMBEDDING_MODEL"),
}
config = _read_config()

View File

@ -1,8 +1,25 @@
from src.pipeline.config import config
import httpx
from urllib.parse import urljoin
async def chat_completion(messages, model=None):
model = model or settings.LLM_MODEL
async with httpx.AsyncClient(timeout=60) as client:
r = await client.post(settings.VLLM_CHAT_URL, json={"model": model, "messages": messages}, headers=HEADERS)
r.raise_for_status()
return r.json()
async def get_embedding(text, timeout: int = 30):
try:
async with httpx.AsyncClient(timeout=timeout) as client:
url = config["embedding_api_host"]
body = {
"model": config["embedding_model"],
"input": text,
}
headers = {
"Content-Type": "application/json",
"Authorization": f"Bearer {config['embedding_api_key']}",
}
print(url)
print(headers)
res = await client.post(url, headers=headers, json=body)
res.raise_for_status()
data = res.json()
return data["data"][0]["embedding"]
except Exception as e:
print(f"get_embedding[ERROR]: {e}")
return []

View File

@ -1,6 +1,6 @@
import uuid
from src.pipeline.core.pocket_flow import AsyncBatchNode
from src.pipeline.core.utils import fixed_size_chunk
import json
import re
@ -9,8 +9,7 @@ class ChunkDocumentsNode(AsyncBatchNode):
return shared["documents"]
async def exec_async(self, document):
""" 简单切片
:param self
"""
:param document: {text, file_name}
"""
# print(f"document: {document}")
@ -21,16 +20,13 @@ class ChunkDocumentsNode(AsyncBatchNode):
text = re.sub(r" +", " ", text)
# 去除首尾空格
text = text.strip()
return [{"text": x, "file_name": document["file_name"]} for x in fixed_size_chunk(text)]
return [{"text": x, "file_name": document["file_name"], "uuid": uuid.uuid4().hex} for x in fixed_size_chunk(text)]
async def post_async(self, shared, prep_res, exec_res_list):
all_chunks = []
for chunks in exec_res_list:
all_chunks.extend(chunks)
print(f"all_chunks: {json.dumps(all_chunks, indent=2, ensure_ascii=False)}")
shared["texts"] = all_chunks
shared["documents"] = all_chunks
return "default"

View File

@ -0,0 +1,19 @@
from src.pipeline.core.pocket_flow import AsyncBatchNode
from src.pipeline.llm import get_embedding
class EmbeddingDocumentsNode(AsyncBatchNode):
async def prep_async(self, shared):
return shared["documents"]
async def exec_async(self, document):
"""
:param document: {text, file_name}
"""
return {**document, "embedding": await get_embedding(document["text"])}
async def post_async(self, shared, prep_res, exec_res_list):
shared["documents"] = exec_res_list
return "default"

View File

@ -1,9 +1,12 @@
import pytest
import json
from src.pipeline.nodes.read_document_node import ReadDocumentNode
from src.pipeline.nodes.chunk_document_node import ChunkDocumentsNode
from src.pipeline.nodes.embedding_document_node import EmbeddingDocumentsNode
# from src.pipeline.nodes import ReadDocumentNode, ChunkDocumentsNode
from src.pipeline.core.pocket_flow import AsyncFlow
@pytest.mark.asyncio
async def test_embedding():
@ -20,7 +23,10 @@ async def test_embedding():
readNode = ReadDocumentNode()
chunkNode = ChunkDocumentsNode()
readNode >> chunkNode
embeddingNode = EmbeddingDocumentsNode()
readNode >> chunkNode >> embeddingNode
flow = AsyncFlow(readNode)
await flow.run_async(shared)
print(json.dumps(shared["documents"], indent=4, ensure_ascii=False))