Skip to main content
Glama

SEC Filing MCP Server

client_runner.py1.55 kB
import os import sys import asyncio from pydantic_ai import Agent from openai import AsyncOpenAI from pydantic_ai.mcp import MCPServerStreamableHTTP from pydantic_ai.models.fallback import FallbackModel from pydantic_ai.providers.openai import OpenAIProvider from pydantic_ai.models.openai import OpenAIResponsesModel from dotenv import load_dotenv load_dotenv(override = True) sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__)))) from client.prompt import QUERY_SYSTEM_PROMPT class Client: def __init__(self): self.openai_client = AsyncOpenAI(api_key = os.environ.get('OPENAI_API_KEY')) self.server = MCPServerStreamableHTTP('http://127.0.0.1:8000/mcp') self.model = FallbackModel( OpenAIResponsesModel( model_name = 'gpt-5', provider = OpenAIProvider(openai_client=self.openai_client) ), OpenAIResponsesModel( model_name = 'gpt-4.1', provider = OpenAIProvider(openai_client = self.openai_client) ) ) async def query(self, query: str) -> str: agent = Agent( self.model, name = 'query_agent', system_prompt = QUERY_SYSTEM_PROMPT, toolsets = [self.server] ) return await agent.run(query) async def main(query: str): client = Client() result = await client.query(query) print(result.output) if __name__ == '__main__': asyncio.run(main('What is the latest 10-K for Apple Inc.?'))

Latest Blog Posts

MCP directory API

We provide all the information about MCP servers via our MCP API.

curl -X GET 'https://glama.ai/api/mcp/v1/servers/SharhadBashar/SEC-filing-mcp'

If you have feedback or need assistance with the MCP directory API, please join our Discord server