We provide all the information about MCP servers via our MCP API.
curl -X GET 'https://glama.ai/api/mcp/v1/servers/MarkAndersonIX/mcp-gemini-cli-base'
If you have feedback or need assistance with the MCP directory API, please join our Discord server
prompt_mcp.py•1.13 kB
# prompt_mcp.py
import asyncio
from fastmcp import Client
from fastmcp import FastMCP
# create a mcp object with name TopicExplainer
mcp = FastMCP(name='TopicExplainerPrompt')
# create prompt
@mcp.prompt
def explain_topic(topic : str) -> str:
"Generates a query prompt for explanation of topic"
return f"Can you explain {topic} in a beginner friendly manner with simple wordings and no technical jargon. Include Concept & Examples."
# create mcp client to test server directly (final prompt display)
async def test_prompt():
# create a aynschronus loop to run mcp client
async with Client(mcp) as client:
# fetch all prompts
prompts = await client.list_prompts()
print("Available prompts:", [p.name for p in prompts])
# Provide the topic to explain_topic for testing and check results
result = await client.get_prompt("explain_topic", {"topic": "machine learning"}) # change topic
# add more prompts here for testing multiple prompts
print("Generated prompt:", result.messages[0].content.text)
if __name__ == "__main__":
asyncio.run(test_prompt())