Skip to main content
Glama

Model Context Provider (MCP) Server

by Mark850409
gradio_app.py10.3 kB
import gradio as gr import logging import os import json from typing import Optional from mcpcli.llm_client import LLMClient from mcpcli.messages.send_tools_list import send_tools_list from mcpcli.messages.send_call_tool import send_call_tool from mcpcli.tools_handler import convert_to_openai_tools from mcpcli.chat_handler import generate_system_prompt from mcpcli.transport.stdio.stdio_client import stdio_client from mcpcli.config import load_config import argparse # 配置日誌 logging.basicConfig(level=logging.INFO) class GradioState: def __init__(self): self.server_streams = [] self.context_managers = [] self.chat_history = [] self.current_provider = os.getenv("LLM_PROVIDER", "openai") self.current_model = os.getenv("LLM_MODEL", "gpt-4o-mini") state = GradioState() async def connect_server(config_file: str, server_name: str, port: Optional[int] = None): """連接到 MCP 伺服器""" try: # 清理舊連接 for cm in state.context_managers: await cm.__aexit__(None, None, None) state.context_managers.clear() state.server_streams.clear() # 載入伺服器配置 server_params = await load_config(config_file, server_name, port) # 建立 stdio 通信 cm = stdio_client(server_params) read_stream, write_stream = await cm.__aenter__() state.context_managers.append(cm) state.server_streams.append((read_stream, write_stream)) return f"已成功連接到伺服器: {server_name}" except Exception as e: return f"連接失敗: {str(e)}" async def chat( message: str, provider: str, model: str, ollama_host: Optional[str] = None, history=None ): """處理聊天請求""" try: if not state.server_streams: return history + [("錯誤", "尚未連接到伺服器")] if history is None: history = [] # 取得工具列表 read_stream, write_stream = state.server_streams[0] tools_response = await send_tools_list(read_stream, write_stream) tools = tools_response.get("tools", []) # 建立 LLM 客戶端 client = LLMClient( provider=provider, model=model, ollama_host=ollama_host ) # 生成系統提示 system_prompt = generate_system_prompt(tools) # 建立完整的訊息歷史 messages = [ {"role": "system", "content": system_prompt} ] + state.chat_history + [ {"role": "user", "content": message} ] # 轉換工具格式 openai_tools = convert_to_openai_tools(tools) # 取得 AI 回應 response = client.create_completion(messages, tools=openai_tools) # 取得回應文本和工具呼叫 ai_message = response.get("response", "") tool_calls = response.get("tool_calls", []) # 處理工具調用 tool_results = [] if tool_calls: for tool_call in tool_calls: try: # 解析工具調用參數 tool_name = tool_call.function.name if hasattr(tool_call, 'function') else tool_call["function"]["name"] tool_args = tool_call.function.arguments if hasattr(tool_call, 'function') else tool_call["function"]["arguments"] if isinstance(tool_args, str): tool_args = json.loads(tool_args) # 發送工具調用請求 tool_result = await send_call_tool( tool_name, tool_args, read_stream, write_stream ) tool_results.append(f"工具 {tool_name} 執行結果: {json.dumps(tool_result, indent=2, ensure_ascii=False)}") # 更新聊天歷史 state.chat_history.append({ "role": "function", "name": tool_name, "content": json.dumps(tool_result) }) except Exception as e: tool_results.append(f"工具 {tool_name} 執行錯誤: {str(e)}") # 組合回應 full_response = ai_message if tool_results: full_response += "\n\n" + "\n".join(tool_results) # 更新聊天歷史 state.chat_history.append({"role": "user", "content": message}) if ai_message: state.chat_history.append({"role": "assistant", "content": ai_message}) history.append((message, full_response)) return history except Exception as e: return history + [(message, f"錯誤: {str(e)}")] def clear_history(): """清除聊天歷史""" state.chat_history.clear() return None def create_gradio_interface(): """創建 Gradio 介面""" with gr.Blocks(title="MCP Chat Interface") as interface: gr.Markdown("# MCP 聊天介面") with gr.Row(): with gr.Column(scale=1): server_config = gr.Textbox( label="伺服器配置檔案", value="server_config.json", placeholder="輸入配置檔案路徑" ) server_name = gr.Textbox( label="伺服器名稱", placeholder="輸入伺服器名稱" ) server_port = gr.Number( label="伺服器端口", value=8088, minimum=1, maximum=65535 ) with gr.Column(scale=1): openai_key = gr.Textbox( label="OpenAI API Key", type="password", placeholder="sk-..." ) anthropic_key = gr.Textbox( label="Anthropic API Key", type="password", placeholder="sk-ant-..." ) google_key = gr.Textbox( label="Google API Key", type="password", placeholder="..." ) with gr.Row(): with gr.Column(scale=1): provider = gr.Dropdown( choices=["openai", "anthropic", "gemini", "ollama"], value="openai", label="LLM 供應商" ) model = gr.Textbox( label="模型名稱", value="gpt-4", placeholder="輸入模型名稱" ) ollama_host = gr.Textbox( label="Ollama 主機", value="http://localhost:11434", placeholder="輸入 Ollama 主機位址" ) connect_btn = gr.Button("連接伺服器") connection_status = gr.Textbox( label="連接狀態", interactive=False, value="尚未連接" ) chatbot = gr.Chatbot( height=400, label="聊天記錄" ) with gr.Row(): msg = gr.Textbox( label="輸入訊息", placeholder="在此輸入訊息...", scale=4 ) send_btn = gr.Button("發送", scale=1) clear_btn = gr.Button("清除歷史", scale=1) def update_api_keys(openai_key, anthropic_key, google_key): """更新 API keys""" if openai_key: os.environ['OPENAI_API_KEY'] = openai_key if anthropic_key: os.environ['ANTHROPIC_API_KEY'] = anthropic_key if google_key: os.environ['GOOGLE_API_KEY'] = google_key # 監聽 API key 變更 openai_key.change( update_api_keys, inputs=[openai_key, anthropic_key, google_key] ) anthropic_key.change( update_api_keys, inputs=[openai_key, anthropic_key, google_key] ) google_key.change( update_api_keys, inputs=[openai_key, anthropic_key, google_key] ) # 綁定事件 connect_btn.click( connect_server, inputs=[server_config, server_name, server_port], outputs=connection_status ) msg.submit( chat, inputs=[msg, provider, model, ollama_host, chatbot], outputs=chatbot ) send_btn.click( chat, inputs=[msg, provider, model, ollama_host, chatbot], outputs=chatbot ) clear_btn.click( clear_history, outputs=chatbot ) return interface def main(): """主入口點""" parser = argparse.ArgumentParser(description='MCP Web Client') # 添加命令行參數 parser.add_argument('--openai-key', help='OpenAI API key') parser.add_argument('--anthropic-key', help='Anthropic API key') parser.add_argument('--google-key', help='Google API key') parser.add_argument('--port', type=int, default=7861, help='Server port') args = parser.parse_args() # 設置 API keys if args.openai_key: os.environ['OPENAI_API_KEY'] = args.openai_key logging.info("已設置 OpenAI API key") if args.anthropic_key: os.environ['ANTHROPIC_API_KEY'] = args.anthropic_key logging.info("已設置 Anthropic API key") if args.google_key: os.environ['GOOGLE_API_KEY'] = args.google_key logging.info("已設置 Google API key") # 啟動 Gradio 介面 interface = create_gradio_interface() interface.launch( server_name="127.0.0.1", server_port=args.port, share=False ) if __name__ == "__main__": main()

Latest Blog Posts

MCP directory API

We provide all the information about MCP servers via our MCP API.

curl -X GET 'https://glama.ai/api/mcp/v1/servers/Mark850409/20250223_mcp-client'

If you have feedback or need assistance with the MCP directory API, please join our Discord server