Skip to main content
Glama
mcp_client_chainlit.py2.62 kB
import os import chainlit as cl from langchain_mcp_adapters.client import MultiServerMCPClient from langgraph.prebuilt import create_react_agent from langchain_core.messages import HumanMessage, SystemMessage from langchain_openai import ChatOpenAI import asyncio import warnings warnings.filterwarnings("ignore", category=RuntimeWarning, message="async generator ignored GeneratorExit") my_openai_api_key = os.getenv("openai_api_key") print(f"OpenAI API Key: {my_openai_api_key}") # MCP server configuration MCP_SERVER_CONFIG = { "sqlite": { "url": "http://localhost:8080/mcp", "transport": "streamable_http" } } # System prompt for the LangGraph agent SYSTEM_PROMPT = """ You are a helpful assistant that can query a SQLite database using provided MCP tools. Available tools: - list_tables: Lists all tables in the database. - get_table_schema: Retrieves column names and data types for a specific table. - count_rows: Counts rows in a specified table. - execute_query: Runs read-only SQL SELECT queries and returns results. Use these tools to answer user questions about the database. For complex queries, use multiple tools as needed (e.g., check table schema before querying). Provide clear, concise answers based on tool outputs. If an error occurs, explain it to the user in a friendly manner. """ # Initialize the LLM (e.g., OpenAI GPT-4o) llm = ChatOpenAI(model="gpt-4o", openai_api_key=my_openai_api_key) async def create_agent(): """Create a LangGraph agent with MCP tools.""" client = MultiServerMCPClient(MCP_SERVER_CONFIG) tools = await client.get_tools() agent = create_react_agent(llm, tools) yield agent @cl.on_message async def handle_message(message: cl.Message): """Handle incoming user messages in Chainlit.""" # Convert Chainlit message to LangChain HumanMessage human_message = HumanMessage(content=message.content) # Create and invoke the agent async for agent in create_agent(): response = await agent.ainvoke({"messages": [SystemMessage(content=SYSTEM_PROMPT), human_message]}) # Extract the final response final_message = response["messages"][-1].content # Send response back to the user await cl.Message(content=final_message).send() @cl.on_chat_start async def on_start(): """Initialize the Chainlit app.""" await cl.Message(content="Welcome to the SQLite MCP Client! Ask about the database, e.g., list tables, check schemas, count rows, or run SELECT queries.").send() if __name__ == "__main__": print("Run this script using: chainlit run mcp_client_chainlit.py")

Latest Blog Posts

MCP directory API

We provide all the information about MCP servers via our MCP API.

curl -X GET 'https://glama.ai/api/mcp/v1/servers/bhargava2019/fastmcp-sql-server'

If you have feedback or need assistance with the MCP directory API, please join our Discord server