Skip to main content
Glama

Financial Data MCP Server

client_mcp.py1.43 kB
import asyncio from dotenv import load_dotenv load_dotenv() # from langchain_groq.chat_models import ChatGroq from langchain_openai.chat_models import ChatOpenAI from langchain_mcp_adapters.client import MultiServerMCPClient from langgraph.checkpoint.memory import MemorySaver from langgraph.prebuilt import create_react_agent from langchain_core.messages import HumanMessage client = MultiServerMCPClient( { "mcp_financas": { "url": "http://localhost:4200/mcp", "transport": "streamable_http", }, # "mcp_financas": { # "command": "python", # "args": ["D:/AulasYoutube/tutorial_mcp_langgraph/server_mcp.py"], # "transport": "stdio", # }, } ) async def create_graph(): llm = ChatOpenAI(model="gpt-4.1-mini", temperature=0) # llm = ChatGroq(model="escolher_modelo_aqui", temperature=0) financas_tools = await client.get_tools() graph = create_react_agent(model=llm, tools=financas_tools, checkpointer=MemorySaver()) return graph async def main(): config = {"configurable": {"thread_id": "conversa_1"}} agent = await create_graph() while True: message = input("User: ") response = await agent.ainvoke({"messages": [HumanMessage(content=message)]}, config=config) print("AI: " + response["messages"][-1].content) if __name__ == "__main__": asyncio.run(main())

MCP directory API

We provide all the information about MCP servers via our MCP API.

curl -X GET 'https://glama.ai/api/mcp/v1/servers/gustavo-sacchi/tutorial_mcp_langgraph'

If you have feedback or need assistance with the MCP directory API, please join our Discord server