feat: 处理问题

This commit is contained in:
李如威 2025-07-30 20:45:34 +08:00
parent 5f3fc6ff07
commit af9f064eaf
2 changed files with 51 additions and 39 deletions

View File

@ -37,7 +37,6 @@ def main():
}
rag = SimpleRAG(
embedding_config=embedding_config,
rerank_config=rerank_config
)
print("RAG系统含重排功能初始化完成!")
@ -53,7 +52,6 @@ def main():
"iPhone是苹果公司生产的智能手机具有先进的技术和优秀的用户体验。",
"机器学习是人工智能的一个分支Python是机器学习领域最流行的编程语言之一。"
]
print("正在添加文档...")
rag.ingest(documents)
print(f"文档添加完成! 共添加了 {len(documents)} 个文档")

View File

@ -38,7 +38,7 @@ class BaseRAG(ABC):
:param rerank_config: 重排配置
embedding_config 示例:
本地模型名称: {"type": "local", "model_name": "sentence-transformers/all-MiniLM-L6-v2"}
本地模型名称: {"type": "local", "model_name": "BAAI/bge-small-zh-v1.5"}
本地模型路径: {"type": "local", "model_path": "/path/to/your/model"}
本地部署接口: {"type": "api", "api_url": "http://localhost:8000/embeddings", "model": "your-model"}
@ -50,7 +50,7 @@ class BaseRAG(ABC):
self.vector_store_name = vector_store_name
self.embedding_config = embedding_config or {
"type": "local",
"model_name": "sentence-transformers/all-MiniLM-L6-v2",
"model_name": "BAAI/bge-small-zh-v1.5",
}
self.retriever_top_k = retriever_top_k
self.llm = llm
@ -166,12 +166,16 @@ class BaseRAG(ABC):
except ImportError:
print("警告: langchain_openai未安装无法使用API接口")
# 回退到本地模型
model_name = config.get("model", "sentence-transformers/all-MiniLM-L6-v2")
model_name = config.get(
"model", "sentence-transformers/all-MiniLM-L6-v2"
)
print(f"回退到本地模型: {model_name}")
return HuggingFaceEmbeddings(
model_name=model_name,
model_kwargs=config.get("model_kwargs", {"device": "cpu"}),
encode_kwargs=config.get("encode_kwargs", {"normalize_embeddings": True}),
encode_kwargs=config.get(
"encode_kwargs", {"normalize_embeddings": True}
),
)
else:
@ -191,7 +195,10 @@ class BaseRAG(ABC):
if method == "cross_encoder":
try:
from sentence_transformers import CrossEncoder
model_name = self.rerank_config.get("model", "cross-encoder/ms-marco-MiniLM-L-6-v2")
model_name = self.rerank_config.get(
"model", "cross-encoder/ms-marco-MiniLM-L-6-v2"
)
print(f"正在加载CrossEncoder重排模型: {model_name}")
return CrossEncoder(model_name)
except ImportError:
@ -201,6 +208,7 @@ class BaseRAG(ABC):
elif method == "bge":
try:
from FlagEmbedding import FlagReranker
model_name = self.rerank_config.get("model", "BAAI/bge-reranker-base")
print(f"正在加载BGE重排模型: {model_name}")
return FlagReranker(model_name, use_fp16=True)
@ -212,7 +220,9 @@ class BaseRAG(ABC):
print(f"警告: 不支持的重排方法: {method},将使用相似度重排")
return "similarity"
def _rerank_documents(self, query: str, documents: List[Document]) -> List[Document]:
def _rerank_documents(
self, query: str, documents: List[Document]
) -> List[Document]:
"""对检索到的文档进行重排"""
if not documents:
return documents
@ -264,7 +274,9 @@ class BaseRAG(ABC):
return self._similarity_rerank(query, documents)
def _similarity_rerank(self, query: str, documents: List[Document]) -> List[Document]:
def _similarity_rerank(
self, query: str, documents: List[Document]
) -> List[Document]:
"""基于余弦相似度的简单重排(备选方案)"""
if not documents:
return documents
@ -303,7 +315,7 @@ class BaseRAG(ABC):
loader = TextLoader(file_path, encoding="utf-8")
documents = loader.load()
splitter = RecursiveCharacterTextSplitter(chunk_size=1000, chunk_overlap=200)
splitter = RecursiveCharacterTextSplitter(chunk_size=200, chunk_overlap=20)
return splitter.split_documents(documents)
def add_documents_to_vector_store(self, documents: List[Document]):
@ -338,7 +350,9 @@ class BaseRAG(ABC):
k = k or self.retriever_top_k
return self.vector_store.similarity_search(query, k=k)
def similarity_search_with_rerank(self, query: str, k: int = None) -> List[Document]:
def similarity_search_with_rerank(
self, query: str, k: int = None
) -> List[Document]:
"""
带重排功能的相似性搜索
"""