Skip to main content
Glama

Model Context Provider (MCP) Server

by Mark850409
stdio_server.py1.45 kB
import asyncio import json import logging from typing import Optional async def stdio_server(reader: asyncio.StreamReader, writer: asyncio.StreamWriter): """處理客戶端連接的伺服器函數""" try: peer = writer.get_extra_info('peername') logging.info(f"新的客戶端連接: {peer}") while True: # 讀取客戶端訊息 data = await reader.read(1024) if not data: break try: # 解析訊息 message = json.loads(data.decode()) # 處理訊息 response = { "status": "success", "message": "收到訊息" } # 發送回應 writer.write(json.dumps(response).encode()) await writer.drain() except json.JSONDecodeError: logging.error("無效的 JSON 格式") writer.write(json.dumps({ "status": "error", "message": "無效的訊息格式" }).encode()) await writer.drain() except Exception as e: logging.error(f"伺服器錯誤: {str(e)}") finally: writer.close() await writer.wait_closed() logging.info(f"客戶端斷開連接: {peer}")

Latest Blog Posts

MCP directory API

We provide all the information about MCP servers via our MCP API.

curl -X GET 'https://glama.ai/api/mcp/v1/servers/Mark850409/20250223_mcp-client'

If you have feedback or need assistance with the MCP directory API, please join our Discord server