Skip to main content
Glama

Qiniu MCP Server

Official
by qiniu
server.py1.89 kB
import asyncio import logging import anyio import click from . import application from .consts import consts logger = logging.getLogger(consts.LOGGER_NAME) logger.info("Starting MCP server") SAMPLE_RESOURCES = { "greeting": "Hello! This is a MCP Server for Qiniu.", "help": "This server provides a few resources and tools for Qiniu.", "about": "This is the MCP server implementation.", } @click.command() @click.option("--port", default=8000, help="Port to listen on for SSE") @click.option( "--transport", type=click.Choice(["stdio", "sse"]), default="stdio", help="Transport type", ) def main(port: int, transport: str) -> int: app = application.server if transport == "sse": from mcp.server.sse import SseServerTransport from starlette.applications import Starlette from starlette.routing import Mount, Route sse = SseServerTransport("/messages/") async def handle_sse(request): async with sse.connect_sse( request.scope, request.receive, request._send ) as streams: await app.run( streams[0], streams[1], app.create_initialization_options() ) starlette_app = Starlette( debug=True, routes=[ Route("/sse", endpoint=handle_sse), Mount("/messages/", app=sse.handle_post_message), ], ) import uvicorn uvicorn.run(starlette_app, host="0.0.0.0", port=port) else: from mcp.server.stdio import stdio_server async def arun(): async with stdio_server() as streams: await app.run( streams[0], streams[1], app.create_initialization_options() ) anyio.run(arun) return 0 if __name__ == "__main__": asyncio.run(main())

MCP directory API

We provide all the information about MCP servers via our MCP API.

curl -X GET 'https://glama.ai/api/mcp/v1/servers/qiniu/qiniu-mcp-server'

If you have feedback or need assistance with the MCP directory API, please join our Discord server