Skip to main content
Glama

MCP Search Server

by Nghiauet
main.py2.59 kB
import asyncio import time from pydantic import BaseModel from prompt import SYSTEM_INSTRUCTION_FOR_AUTOMATION from mcp_agent.app import MCPApp from mcp_agent.agents.agent import Agent from mcp_agent.workflows.llm.augmented_llm_google import GoogleAugmentedLLM class Essay(BaseModel): title: str body: str conclusion: str # Remove the programmatic settings and let MCPApp load from config files app = MCPApp( name="mcp_basic_agent" # settings=settings # Comment out or remove this line ) # The app will automatically load from: # - mcp_agent.config.yaml (for non-sensitive configuration) # - mcp_agent.secrets.yaml (for API keys and sensitive data) async def example_usage(): async with app.run() as agent_app: logger = agent_app.logger context = agent_app.context logger.info("Current config:", data=context.config.model_dump()) stock_agent = Agent( name="stock_information_agent", instruction=SYSTEM_INSTRUCTION_FOR_AUTOMATION, server_names=["stock_information_mcp_server"], ) async with stock_agent: logger.info( "stock_information_agent: Connected to server, calling list_tools..." ) result = await stock_agent.list_tools() logger.info("Tools available:", data=result.model_dump()) llm = await stock_agent.attach_llm(GoogleAugmentedLLM) # Example using streaming text generation (string output) logger.info("Starting streaming text generation...") print("LLM Response (streaming text):") async for text_chunk in llm.generate_str_stream( message="Given the balance sheet of Vinamilk, please provide a brief analysis of their financial position." ): # text_chunk is already a string, just print it directly print(text_chunk, end="", flush=True) print("\n") # Example using streaming text generation (string output) logger.info("Starting streaming text generation...") print("\nLLM Response (streaming text):") async for text_chunk in llm.generate_str_stream( message="What are the key financial metrics I should look at when analyzing Vinamilk?" ): print(text_chunk, end="", flush=True) print("\n") if __name__ == "__main__": start = time.time() asyncio.run(example_usage()) end = time.time() t = end - start print(f"Total run time: {t:.2f}s")

MCP directory API

We provide all the information about MCP servers via our MCP API.

curl -X GET 'https://glama.ai/api/mcp/v1/servers/Nghiauet/mcp-agent'

If you have feedback or need assistance with the MCP directory API, please join our Discord server