rag_server.py•1.35 kB
from pathlib import Path
from pprint import pprint
import pandas as pd
import graphrag.api as api
from graphrag.config.load_config import load_config
from graphrag.index.typing.pipeline_run_result import PipelineRunResult
from typing import Any
from mcp.server.fastmcp import FastMCP
mcp = FastMCP("rag_ML")
USER_AGENT = "rag_ML-app/1.0"
@mcp.tool()
async def rag_ML(query: str) -> str:
"""
用于查询特斯拉与安克创新的对比的相关信息
:param query: 用户提出的具体问题
:return: 最终获得的答案
"""
PROJECT_DIRECTORY = "graphrag"
graphrag_config = load_config(
Path(PROJECT_DIRECTORY)
)
entities = pd.read_parquet(f"{PROJECT_DIRECTORY}/output/entities.parquet")
communities = pd.read_parquet(f"{PROJECT_DIRECTORY}/output/communities.parquet")
community_reports = pd.read_parquet(f"{PROJECT_DIRECTORY}/output/community_reports.parquet")
response, context = await api.global_search(
config = graphrag_config,
entities= entities,
communities= communities,
community_reports= community_reports,
community_level= 2,
dynamic_community_selection= False,
response_type= "Multiple Paragraphs",
query = query,
)
return response
if __name__ == "__main__":
mcp.run(transport='stdio')