Skip to main content
Glama

MCP Template

example.py1.91 kB
""" Example prompt implementation. """ from typing import Any, Dict import mcp.types as types async def example_prompt_handler(arguments: Dict[str, Any]) -> types.GetPromptResult: """ Example prompt that generates a structured prompt for a given task. Args: arguments: Prompt arguments containing: - task: The task to generate a prompt for - context: Optional context information Returns: GetPromptResult containing the generated prompt """ task = arguments.get("task", "") context = arguments.get("context", "") if not task: prompt_text = "Please provide a task to generate a prompt for." else: prompt_text = f"""You are an AI assistant helping with the following task: Task: {task} """ if context: prompt_text += f"""Additional Context: {context} """ prompt_text += """Please provide a detailed and helpful response to complete this task. Consider best practices, potential challenges, and provide clear explanations.""" return types.GetPromptResult( description=f"Prompt for task: {task}", messages=[ types.PromptMessage( role="user", content=types.TextContent( type="text", text=prompt_text, ), ) ], ) # Prompt schema definition EXAMPLE_PROMPT_SCHEMA = types.Prompt( name="example_prompt", description="Generates a structured prompt for a given task with optional context", arguments=[ types.PromptArgument( name="task", description="The task to generate a prompt for", required=True, ), types.PromptArgument( name="context", description="Optional additional context for the task", required=False, ), ], )

MCP directory API

We provide all the information about MCP servers via our MCP API.

curl -X GET 'https://glama.ai/api/mcp/v1/servers/riteshsonawala/mcp-template'

If you have feedback or need assistance with the MCP directory API, please join our Discord server