Skip to main content
Glama

MCP Agile Flow

by smian0
fastmcp-examples.txt21.2 kB
This file is a merged representation of the entire codebase, combined into a single document by Repomix. ================================================================ File Summary ================================================================ Purpose: -------- This file contains a packed representation of the entire repository's contents. It is designed to be easily consumable by AI systems for analysis, code review, or other automated processes. File Format: ------------ The content is organized as follows: 1. This summary section 2. Repository information 3. Directory structure 4. Multiple file entries, each consisting of: a. A separator line (================) b. The file path (File: path/to/file) c. Another separator line d. The full contents of the file e. A blank line Usage Guidelines: ----------------- - This file should be treated as read-only. Any changes should be made to the original repository files, not this packed version. - When processing this file, use the file path to distinguish between different files in the repository. - Be aware that this file may contain sensitive information. Handle it with the same level of security as you would the original repository. Notes: ------ - Some files may have been excluded based on .gitignore rules and Repomix's configuration - Binary files are not included in this packed representation. Please refer to the Repository Structure section for a complete list of file paths, including binary files - Files matching patterns in .gitignore are excluded - Files matching default ignore patterns are excluded - Files are sorted by Git change count (files with more changes are at the bottom) Additional Info: ---------------- ================================================================ Directory Structure ================================================================ complex_inputs.py desktop.py echo.py memory.py parameter_descriptions.py readme-quickstart.py screenshot.py simple_echo.py text_me.py unicode_example.py ================================================================ Files ================================================================ ================ File: complex_inputs.py ================ """ FastMCP Complex inputs Example Demonstrates validation via pydantic with complex models. """ from typing import Annotated from pydantic import BaseModel, Field from mcp.server.fastmcp import FastMCP mcp = FastMCP("Shrimp Tank") class ShrimpTank(BaseModel): class Shrimp(BaseModel): name: Annotated[str, Field(max_length=10)] shrimp: list[Shrimp] @mcp.tool() def name_shrimp( tank: ShrimpTank, # You can use pydantic Field in function signatures for validation. extra_names: Annotated[list[str], Field(max_length=10)], ) -> list[str]: """List all shrimp names in the tank""" return [shrimp.name for shrimp in tank.shrimp] + extra_names ================ File: desktop.py ================ """ FastMCP Desktop Example A simple example that exposes the desktop directory as a resource. """ from pathlib import Path from mcp.server.fastmcp import FastMCP # Create server mcp = FastMCP("Demo") @mcp.resource("dir://desktop") def desktop() -> list[str]: """List the files in the user's desktop""" desktop = Path.home() / "Desktop" return [str(f) for f in desktop.iterdir()] @mcp.tool() def add(a: int, b: int) -> int: """Add two numbers""" return a + b ================ File: echo.py ================ """ FastMCP Echo Server """ from mcp.server.fastmcp import FastMCP # Create server mcp = FastMCP("Echo Server") @mcp.tool() def echo_tool(text: str) -> str: """Echo the input text""" return text @mcp.resource("echo://static") def echo_resource() -> str: return "Echo!" @mcp.resource("echo://{text}") def echo_template(text: str) -> str: """Echo the input text""" return f"Echo: {text}" @mcp.prompt("echo") def echo_prompt(text: str) -> str: return text ================ File: memory.py ================ # /// script # dependencies = ["pydantic-ai-slim[openai]", "asyncpg", "numpy", "pgvector"] # /// # uv pip install 'pydantic-ai-slim[openai]' asyncpg numpy pgvector """ Recursive memory system inspired by the human brain's clustering of memories. Uses OpenAI's 'text-embedding-3-small' model and pgvector for efficient similarity search. """ import asyncio import math import os from dataclasses import dataclass from datetime import datetime, timezone from pathlib import Path from typing import Annotated, Self import asyncpg import numpy as np from openai import AsyncOpenAI from pgvector.asyncpg import register_vector # Import register_vector from pydantic import BaseModel, Field from pydantic_ai import Agent from mcp.server.fastmcp import FastMCP MAX_DEPTH = 5 SIMILARITY_THRESHOLD = 0.7 DECAY_FACTOR = 0.99 REINFORCEMENT_FACTOR = 1.1 DEFAULT_LLM_MODEL = "openai:gpt-4o" DEFAULT_EMBEDDING_MODEL = "text-embedding-3-small" mcp = FastMCP( "memory", dependencies=[ "pydantic-ai-slim[openai]", "asyncpg", "numpy", "pgvector", ], ) DB_DSN = "postgresql://postgres:postgres@localhost:54320/memory_db" # reset memory with rm ~/.fastmcp/{USER}/memory/* PROFILE_DIR = ( Path.home() / ".fastmcp" / os.environ.get("USER", "anon") / "memory" ).resolve() PROFILE_DIR.mkdir(parents=True, exist_ok=True) def cosine_similarity(a: list[float], b: list[float]) -> float: a_array = np.array(a, dtype=np.float64) b_array = np.array(b, dtype=np.float64) return np.dot(a_array, b_array) / ( np.linalg.norm(a_array) * np.linalg.norm(b_array) ) async def do_ai[T]( user_prompt: str, system_prompt: str, result_type: type[T] | Annotated, deps=None, ) -> T: agent = Agent( DEFAULT_LLM_MODEL, system_prompt=system_prompt, result_type=result_type, ) result = await agent.run(user_prompt, deps=deps) return result.data @dataclass class Deps: openai: AsyncOpenAI pool: asyncpg.Pool async def get_db_pool() -> asyncpg.Pool: async def init(conn): await conn.execute("CREATE EXTENSION IF NOT EXISTS vector;") await register_vector(conn) pool = await asyncpg.create_pool(DB_DSN, init=init) return pool class MemoryNode(BaseModel): id: int | None = None content: str summary: str = "" importance: float = 1.0 access_count: int = 0 timestamp: float = Field( default_factory=lambda: datetime.now(timezone.utc).timestamp() ) embedding: list[float] @classmethod async def from_content(cls, content: str, deps: Deps): embedding = await get_embedding(content, deps) return cls(content=content, embedding=embedding) async def save(self, deps: Deps): async with deps.pool.acquire() as conn: if self.id is None: result = await conn.fetchrow( """ INSERT INTO memories (content, summary, importance, access_count, timestamp, embedding) VALUES ($1, $2, $3, $4, $5, $6) RETURNING id """, self.content, self.summary, self.importance, self.access_count, self.timestamp, self.embedding, ) self.id = result["id"] else: await conn.execute( """ UPDATE memories SET content = $1, summary = $2, importance = $3, access_count = $4, timestamp = $5, embedding = $6 WHERE id = $7 """, self.content, self.summary, self.importance, self.access_count, self.timestamp, self.embedding, self.id, ) async def merge_with(self, other: Self, deps: Deps): self.content = await do_ai( f"{self.content}\n\n{other.content}", "Combine the following two texts into a single, coherent text.", str, deps, ) self.importance += other.importance self.access_count += other.access_count self.embedding = [(a + b) / 2 for a, b in zip(self.embedding, other.embedding)] self.summary = await do_ai( self.content, "Summarize the following text concisely.", str, deps ) await self.save(deps) # Delete the merged node from the database if other.id is not None: await delete_memory(other.id, deps) def get_effective_importance(self): return self.importance * (1 + math.log(self.access_count + 1)) async def get_embedding(text: str, deps: Deps) -> list[float]: embedding_response = await deps.openai.embeddings.create( input=text, model=DEFAULT_EMBEDDING_MODEL, ) return embedding_response.data[0].embedding async def delete_memory(memory_id: int, deps: Deps): async with deps.pool.acquire() as conn: await conn.execute("DELETE FROM memories WHERE id = $1", memory_id) async def add_memory(content: str, deps: Deps): new_memory = await MemoryNode.from_content(content, deps) await new_memory.save(deps) similar_memories = await find_similar_memories(new_memory.embedding, deps) for memory in similar_memories: if memory.id != new_memory.id: await new_memory.merge_with(memory, deps) await update_importance(new_memory.embedding, deps) await prune_memories(deps) return f"Remembered: {content}" async def find_similar_memories(embedding: list[float], deps: Deps) -> list[MemoryNode]: async with deps.pool.acquire() as conn: rows = await conn.fetch( """ SELECT id, content, summary, importance, access_count, timestamp, embedding FROM memories ORDER BY embedding <-> $1 LIMIT 5 """, embedding, ) memories = [ MemoryNode( id=row["id"], content=row["content"], summary=row["summary"], importance=row["importance"], access_count=row["access_count"], timestamp=row["timestamp"], embedding=row["embedding"], ) for row in rows ] return memories async def update_importance(user_embedding: list[float], deps: Deps): async with deps.pool.acquire() as conn: rows = await conn.fetch( "SELECT id, importance, access_count, embedding FROM memories" ) for row in rows: memory_embedding = row["embedding"] similarity = cosine_similarity(user_embedding, memory_embedding) if similarity > SIMILARITY_THRESHOLD: new_importance = row["importance"] * REINFORCEMENT_FACTOR new_access_count = row["access_count"] + 1 else: new_importance = row["importance"] * DECAY_FACTOR new_access_count = row["access_count"] await conn.execute( """ UPDATE memories SET importance = $1, access_count = $2 WHERE id = $3 """, new_importance, new_access_count, row["id"], ) async def prune_memories(deps: Deps): async with deps.pool.acquire() as conn: rows = await conn.fetch( """ SELECT id, importance, access_count FROM memories ORDER BY importance DESC OFFSET $1 """, MAX_DEPTH, ) for row in rows: await conn.execute("DELETE FROM memories WHERE id = $1", row["id"]) async def display_memory_tree(deps: Deps) -> str: async with deps.pool.acquire() as conn: rows = await conn.fetch( """ SELECT content, summary, importance, access_count FROM memories ORDER BY importance DESC LIMIT $1 """, MAX_DEPTH, ) result = "" for row in rows: effective_importance = row["importance"] * ( 1 + math.log(row["access_count"] + 1) ) summary = row["summary"] or row["content"] result += f"- {summary} (Importance: {effective_importance:.2f})\n" return result @mcp.tool() async def remember( contents: list[str] = Field( description="List of observations or memories to store" ), ): deps = Deps(openai=AsyncOpenAI(), pool=await get_db_pool()) try: return "\n".join( await asyncio.gather(*[add_memory(content, deps) for content in contents]) ) finally: await deps.pool.close() @mcp.tool() async def read_profile() -> str: deps = Deps(openai=AsyncOpenAI(), pool=await get_db_pool()) profile = await display_memory_tree(deps) await deps.pool.close() return profile async def initialize_database(): pool = await asyncpg.create_pool( "postgresql://postgres:postgres@localhost:54320/postgres" ) try: async with pool.acquire() as conn: await conn.execute(""" SELECT pg_terminate_backend(pg_stat_activity.pid) FROM pg_stat_activity WHERE pg_stat_activity.datname = 'memory_db' AND pid <> pg_backend_pid(); """) await conn.execute("DROP DATABASE IF EXISTS memory_db;") await conn.execute("CREATE DATABASE memory_db;") finally: await pool.close() pool = await asyncpg.create_pool(DB_DSN) try: async with pool.acquire() as conn: await conn.execute("CREATE EXTENSION IF NOT EXISTS vector;") await register_vector(conn) await conn.execute(""" CREATE TABLE IF NOT EXISTS memories ( id SERIAL PRIMARY KEY, content TEXT NOT NULL, summary TEXT, importance REAL NOT NULL, access_count INT NOT NULL, timestamp DOUBLE PRECISION NOT NULL, embedding vector(1536) NOT NULL ); CREATE INDEX IF NOT EXISTS idx_memories_embedding ON memories USING hnsw (embedding vector_l2_ops); """) finally: await pool.close() if __name__ == "__main__": asyncio.run(initialize_database()) ================ File: parameter_descriptions.py ================ """ FastMCP Example showing parameter descriptions """ from pydantic import Field from mcp.server.fastmcp import FastMCP # Create server mcp = FastMCP("Parameter Descriptions Server") @mcp.tool() def greet_user( name: str = Field(description="The name of the person to greet"), title: str = Field(description="Optional title like Mr/Ms/Dr", default=""), times: int = Field(description="Number of times to repeat the greeting", default=1), ) -> str: """Greet a user with optional title and repetition""" greeting = f"Hello {title + ' ' if title else ''}{name}!" return "\n".join([greeting] * times) ================ File: readme-quickstart.py ================ from mcp.server.fastmcp import FastMCP # Create an MCP server mcp = FastMCP("Demo") # Add an addition tool @mcp.tool() def add(a: int, b: int) -> int: """Add two numbers""" return a + b # Add a dynamic greeting resource @mcp.resource("greeting://{name}") def get_greeting(name: str) -> str: """Get a personalized greeting""" return f"Hello, {name}!" ================ File: screenshot.py ================ """ FastMCP Screenshot Example Give Claude a tool to capture and view screenshots. """ import io from mcp.server.fastmcp import FastMCP from mcp.server.fastmcp.utilities.types import Image # Create server mcp = FastMCP("Screenshot Demo", dependencies=["pyautogui", "Pillow"]) @mcp.tool() def take_screenshot() -> Image: """ Take a screenshot of the user's screen and return it as an image. Use this tool anytime the user wants you to look at something they're doing. """ import pyautogui buffer = io.BytesIO() # if the file exceeds ~1MB, it will be rejected by Claude screenshot = pyautogui.screenshot() screenshot.convert("RGB").save(buffer, format="JPEG", quality=60, optimize=True) return Image(data=buffer.getvalue(), format="jpeg") ================ File: simple_echo.py ================ """ FastMCP Echo Server """ from mcp.server.fastmcp import FastMCP # Create server mcp = FastMCP("Echo Server") @mcp.tool() def echo(text: str) -> str: """Echo the input text""" return text ================ File: text_me.py ================ # /// script # dependencies = [] # /// """ FastMCP Text Me Server -------------------------------- This defines a simple FastMCP server that sends a text message to a phone number via https://surgemsg.com/. To run this example, create a `.env` file with the following values: SURGE_API_KEY=... SURGE_ACCOUNT_ID=... SURGE_MY_PHONE_NUMBER=... SURGE_MY_FIRST_NAME=... SURGE_MY_LAST_NAME=... Visit https://surgemsg.com/ and click "Get Started" to obtain these values. """ from typing import Annotated import httpx from pydantic import BeforeValidator from pydantic_settings import BaseSettings, SettingsConfigDict from mcp.server.fastmcp import FastMCP class SurgeSettings(BaseSettings): model_config: SettingsConfigDict = SettingsConfigDict( env_prefix="SURGE_", env_file=".env" ) api_key: str account_id: str my_phone_number: Annotated[ str, BeforeValidator(lambda v: "+" + v if not v.startswith("+") else v) ] my_first_name: str my_last_name: str # Create server mcp = FastMCP("Text me") surge_settings = SurgeSettings() # type: ignore @mcp.tool(name="textme", description="Send a text message to me") def text_me(text_content: str) -> str: """Send a text message to a phone number via https://surgemsg.com/""" with httpx.Client() as client: response = client.post( "https://api.surgemsg.com/messages", headers={ "Authorization": f"Bearer {surge_settings.api_key}", "Surge-Account": surge_settings.account_id, "Content-Type": "application/json", }, json={ "body": text_content, "conversation": { "contact": { "first_name": surge_settings.my_first_name, "last_name": surge_settings.my_last_name, "phone_number": surge_settings.my_phone_number, } }, }, ) response.raise_for_status() return f"Message sent: {text_content}" ================ File: unicode_example.py ================ """ Example FastMCP server that uses Unicode characters in various places to help test Unicode handling in tools and inspectors. """ from mcp.server.fastmcp import FastMCP mcp = FastMCP() @mcp.tool( description="🌟 A tool that uses various Unicode characters in its description: " "á é í ó ú ñ 漢字 🎉" ) def hello_unicode(name: str = "世界", greeting: str = "¡Hola") -> str: """ A simple tool that demonstrates Unicode handling in: - Tool description (emojis, accents, CJK characters) - Parameter defaults (CJK characters) - Return values (Spanish punctuation, emojis) """ return f"{greeting}, {name}! 👋" @mcp.tool(description="🎨 Tool that returns a list of emoji categories") def list_emoji_categories() -> list[str]: """Returns a list of emoji categories with emoji examples.""" return [ "😀 Smileys & Emotion", "👋 People & Body", "🐶 Animals & Nature", "🍎 Food & Drink", "⚽ Activities", "🌍 Travel & Places", "💡 Objects", "❤️ Symbols", "🚩 Flags", ] @mcp.tool(description="🔤 Tool that returns text in different scripts") def multilingual_hello() -> str: """Returns hello in different scripts and writing systems.""" return "\n".join( [ "English: Hello!", "Spanish: ¡Hola!", "French: Bonjour!", "German: Grüß Gott!", "Russian: Привет!", "Greek: Γεια σας!", "Hebrew: !שָׁלוֹם", "Arabic: !مرحبا", "Hindi: नमस्ते!", "Chinese: 你好!", "Japanese: こんにちは!", "Korean: 안녕하세요!", "Thai: สวัสดี!", ] ) if __name__ == "__main__": mcp.run() ================================================================ End of Codebase ================================================================

MCP directory API

We provide all the information about MCP servers via our MCP API.

curl -X GET 'https://glama.ai/api/mcp/v1/servers/smian0/mcp-agile-flow'

If you have feedback or need assistance with the MCP directory API, please join our Discord server