124 lines
3.7 KiB
Python
124 lines
3.7 KiB
Python
"""
|
||
BaseRAG 本地API接口使用示例
|
||
|
||
这个示例展示了如何配置BaseRAG使用本地部署的嵌入API接口,
|
||
以及当API不可用时如何自动回退到本地模型。
|
||
"""
|
||
|
||
import sys
|
||
import os
|
||
|
||
sys.path.insert(0, os.path.join(os.path.dirname(__file__), "..", "src"))
|
||
|
||
from base_rag import BaseRAG
|
||
|
||
|
||
class LocalAPIRAG(BaseRAG):
|
||
def ingest(self, documents):
|
||
"""批量添加文档"""
|
||
if documents:
|
||
self.vector_store.add_texts(documents)
|
||
print(f"已向向量库添加 {len(documents)} 个文档")
|
||
|
||
def query(self, question, k=3):
|
||
"""查询文档"""
|
||
return self.similarity_search_with_rerank(question, k=k)
|
||
|
||
|
||
def demo_local_api():
|
||
"""演示本地API配置"""
|
||
print("=== 本地API接口配置示例 ===\n")
|
||
|
||
# 本地API配置(假设有本地嵌入服务)
|
||
api_embedding_config = {
|
||
"type": "api",
|
||
"api_url": "http://localhost:8080", # 假设的本地API地址
|
||
"model": "text-embedding-model",
|
||
"api_key": "optional-key" # 可选
|
||
}
|
||
|
||
print("正在尝试连接本地API...")
|
||
try:
|
||
rag_api = LocalAPIRAG(
|
||
vector_store_name="api_test",
|
||
embedding_config=api_embedding_config,
|
||
rerank_config={"enabled": True, "method": "similarity", "top_k": 3}
|
||
)
|
||
print("本地API连接成功!")
|
||
except Exception as e:
|
||
print(f"本地API连接失败: {e}")
|
||
print("系统会自动回退到本地模型")
|
||
|
||
|
||
def demo_local_model():
|
||
"""演示本地模型配置"""
|
||
print("\n=== 本地模型配置示例 ===\n")
|
||
|
||
# 本地模型配置
|
||
local_embedding_config = {
|
||
"type": "local",
|
||
"model_name": "sentence-transformers/all-MiniLM-L6-v2"
|
||
}
|
||
|
||
rag_local = LocalAPIRAG(
|
||
vector_store_name="local_test",
|
||
embedding_config=local_embedding_config,
|
||
rerank_config={"enabled": True, "method": "similarity", "top_k": 3}
|
||
)
|
||
|
||
# 测试文档
|
||
test_documents = [
|
||
"Python是一种高级编程语言,语法简洁明了。",
|
||
"机器学习是人工智能的一个重要分支。",
|
||
"深度学习使用神经网络来模拟人脑的学习过程。",
|
||
"自然语言处理帮助计算机理解和生成人类语言。"
|
||
]
|
||
|
||
print("正在添加测试文档...")
|
||
rag_local.ingest(test_documents)
|
||
|
||
# 测试查询
|
||
query = "什么是机器学习?"
|
||
print(f"\n查询: {query}")
|
||
|
||
results = rag_local.query(query, k=2)
|
||
print("查询结果:")
|
||
for i, doc in enumerate(results, 1):
|
||
print(f" {i}. {doc.page_content}")
|
||
|
||
|
||
def demo_local_path():
|
||
"""演示使用本地模型路径的配置"""
|
||
print("\n=== 本地模型路径配置示例 ===\n")
|
||
|
||
# 假设你有本地下载的模型
|
||
local_path_config = {
|
||
"type": "local",
|
||
"model_path": "/path/to/your/local/model", # 替换为实际路径
|
||
"model_kwargs": {"device": "cpu"}
|
||
}
|
||
|
||
print("本地模型路径配置:")
|
||
print(f" 路径: {local_path_config['model_path']}")
|
||
print(" 注意: 请确保路径存在且包含有效的sentence-transformers模型")
|
||
|
||
|
||
def main():
|
||
"""主函数"""
|
||
print("BaseRAG 本地API和模型配置示例\n")
|
||
|
||
# 演示不同的配置方式
|
||
demo_local_api()
|
||
demo_local_model()
|
||
demo_local_path()
|
||
|
||
print("\n=== 配置建议 ===")
|
||
print("1. 开发测试: 使用本地模型,快速启动")
|
||
print("2. 生产环境: 使用本地API接口,便于扩展和管理")
|
||
print("3. 离线部署: 使用本地模型路径,无需网络连接")
|
||
print("4. 混合部署: API主用,本地模型备用")
|
||
|
||
|
||
if __name__ == "__main__":
|
||
main()
|