feat: embedding
This commit is contained in:
parent
8b1db6dd33
commit
9686f24b60
|
|
@ -12,6 +12,9 @@ class Config(TypedDict):
|
||||||
llm_api_key: str
|
llm_api_key: str
|
||||||
llm_api_host: str
|
llm_api_host: str
|
||||||
llm_model: str
|
llm_model: str
|
||||||
|
embedding_api_key: str
|
||||||
|
embedding_api_host: str
|
||||||
|
embedding_model: str
|
||||||
|
|
||||||
|
|
||||||
def _read_config() -> Config:
|
def _read_config() -> Config:
|
||||||
|
|
@ -22,6 +25,9 @@ def _read_config() -> Config:
|
||||||
"llm_api_host": os.getenv("LLM_API_HOST"),
|
"llm_api_host": os.getenv("LLM_API_HOST"),
|
||||||
"llm_api_key": os.getenv("LLM_API_KEY"),
|
"llm_api_key": os.getenv("LLM_API_KEY"),
|
||||||
"llm_model": os.getenv("LLM_MODEL"),
|
"llm_model": os.getenv("LLM_MODEL"),
|
||||||
|
"embedding_api_host": os.getenv("EMBEDDING_API_HOST"),
|
||||||
|
"embedding_api_key": os.getenv("EMBEDDING_API_KEY"),
|
||||||
|
"embedding_model": os.getenv("EMBEDDING_MODEL"),
|
||||||
}
|
}
|
||||||
|
|
||||||
config = _read_config()
|
config = _read_config()
|
||||||
|
|
|
||||||
|
|
@ -1,8 +1,25 @@
|
||||||
from src.pipeline.config import config
|
from src.pipeline.config import config
|
||||||
|
import httpx
|
||||||
|
from urllib.parse import urljoin
|
||||||
|
|
||||||
async def chat_completion(messages, model=None):
|
async def get_embedding(text, timeout: int = 30):
|
||||||
model = model or settings.LLM_MODEL
|
try:
|
||||||
async with httpx.AsyncClient(timeout=60) as client:
|
async with httpx.AsyncClient(timeout=timeout) as client:
|
||||||
r = await client.post(settings.VLLM_CHAT_URL, json={"model": model, "messages": messages}, headers=HEADERS)
|
url = config["embedding_api_host"]
|
||||||
r.raise_for_status()
|
body = {
|
||||||
return r.json()
|
"model": config["embedding_model"],
|
||||||
|
"input": text,
|
||||||
|
}
|
||||||
|
headers = {
|
||||||
|
"Content-Type": "application/json",
|
||||||
|
"Authorization": f"Bearer {config['embedding_api_key']}",
|
||||||
|
}
|
||||||
|
print(url)
|
||||||
|
print(headers)
|
||||||
|
res = await client.post(url, headers=headers, json=body)
|
||||||
|
res.raise_for_status()
|
||||||
|
data = res.json()
|
||||||
|
return data["data"][0]["embedding"]
|
||||||
|
except Exception as e:
|
||||||
|
print(f"get_embedding[ERROR]: {e}")
|
||||||
|
return []
|
||||||
|
|
|
||||||
|
|
@ -1,6 +1,6 @@
|
||||||
|
import uuid
|
||||||
from src.pipeline.core.pocket_flow import AsyncBatchNode
|
from src.pipeline.core.pocket_flow import AsyncBatchNode
|
||||||
from src.pipeline.core.utils import fixed_size_chunk
|
from src.pipeline.core.utils import fixed_size_chunk
|
||||||
import json
|
|
||||||
import re
|
import re
|
||||||
|
|
||||||
|
|
||||||
|
|
@ -9,8 +9,7 @@ class ChunkDocumentsNode(AsyncBatchNode):
|
||||||
return shared["documents"]
|
return shared["documents"]
|
||||||
|
|
||||||
async def exec_async(self, document):
|
async def exec_async(self, document):
|
||||||
""" 简单切片
|
"""
|
||||||
:param self
|
|
||||||
:param document: {text, file_name}
|
:param document: {text, file_name}
|
||||||
"""
|
"""
|
||||||
# print(f"document: {document}")
|
# print(f"document: {document}")
|
||||||
|
|
@ -21,16 +20,13 @@ class ChunkDocumentsNode(AsyncBatchNode):
|
||||||
text = re.sub(r" +", " ", text)
|
text = re.sub(r" +", " ", text)
|
||||||
# 去除首尾空格
|
# 去除首尾空格
|
||||||
text = text.strip()
|
text = text.strip()
|
||||||
return [{"text": x, "file_name": document["file_name"]} for x in fixed_size_chunk(text)]
|
return [{"text": x, "file_name": document["file_name"], "uuid": uuid.uuid4().hex} for x in fixed_size_chunk(text)]
|
||||||
|
|
||||||
async def post_async(self, shared, prep_res, exec_res_list):
|
async def post_async(self, shared, prep_res, exec_res_list):
|
||||||
all_chunks = []
|
all_chunks = []
|
||||||
for chunks in exec_res_list:
|
for chunks in exec_res_list:
|
||||||
all_chunks.extend(chunks)
|
all_chunks.extend(chunks)
|
||||||
|
|
||||||
print(f"all_chunks: {json.dumps(all_chunks, indent=2, ensure_ascii=False)}")
|
shared["documents"] = all_chunks
|
||||||
shared["texts"] = all_chunks
|
|
||||||
|
|
||||||
return "default"
|
return "default"
|
||||||
|
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -0,0 +1,19 @@
|
||||||
|
from src.pipeline.core.pocket_flow import AsyncBatchNode
|
||||||
|
from src.pipeline.llm import get_embedding
|
||||||
|
|
||||||
|
|
||||||
|
class EmbeddingDocumentsNode(AsyncBatchNode):
|
||||||
|
async def prep_async(self, shared):
|
||||||
|
return shared["documents"]
|
||||||
|
|
||||||
|
async def exec_async(self, document):
|
||||||
|
"""
|
||||||
|
:param document: {text, file_name}
|
||||||
|
"""
|
||||||
|
return {**document, "embedding": await get_embedding(document["text"])}
|
||||||
|
|
||||||
|
async def post_async(self, shared, prep_res, exec_res_list):
|
||||||
|
|
||||||
|
shared["documents"] = exec_res_list
|
||||||
|
|
||||||
|
return "default"
|
||||||
|
|
@ -1,9 +1,12 @@
|
||||||
import pytest
|
import pytest
|
||||||
|
import json
|
||||||
from src.pipeline.nodes.read_document_node import ReadDocumentNode
|
from src.pipeline.nodes.read_document_node import ReadDocumentNode
|
||||||
from src.pipeline.nodes.chunk_document_node import ChunkDocumentsNode
|
from src.pipeline.nodes.chunk_document_node import ChunkDocumentsNode
|
||||||
|
from src.pipeline.nodes.embedding_document_node import EmbeddingDocumentsNode
|
||||||
# from src.pipeline.nodes import ReadDocumentNode, ChunkDocumentsNode
|
# from src.pipeline.nodes import ReadDocumentNode, ChunkDocumentsNode
|
||||||
from src.pipeline.core.pocket_flow import AsyncFlow
|
from src.pipeline.core.pocket_flow import AsyncFlow
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.asyncio
|
@pytest.mark.asyncio
|
||||||
async def test_embedding():
|
async def test_embedding():
|
||||||
|
|
||||||
|
|
@ -20,7 +23,10 @@ async def test_embedding():
|
||||||
|
|
||||||
readNode = ReadDocumentNode()
|
readNode = ReadDocumentNode()
|
||||||
chunkNode = ChunkDocumentsNode()
|
chunkNode = ChunkDocumentsNode()
|
||||||
readNode >> chunkNode
|
embeddingNode = EmbeddingDocumentsNode()
|
||||||
|
readNode >> chunkNode >> embeddingNode
|
||||||
flow = AsyncFlow(readNode)
|
flow = AsyncFlow(readNode)
|
||||||
|
|
||||||
await flow.run_async(shared)
|
await flow.run_async(shared)
|
||||||
|
|
||||||
|
print(json.dumps(shared["documents"], indent=4, ensure_ascii=False))
|
||||||
|
|
|
||||||
Loading…
Reference in New Issue