Skip to main content
Glama

DeepClaude MCP Server

by smithery-ai
server.ts6.49 kB
import express from 'express'; import cors from 'cors'; import { ApiClient } from './api-client'; import config from '../config'; import { ChatCompletionRequest, ChatCompletionResponse } from '../types/api'; export class Server { private app: express.Application; private deepseekClient!: ApiClient; private claudeClient!: ApiClient; constructor() { this.app = express(); this.setupMiddleware(); this.setupClients(); this.setupRoutes(); } private setupMiddleware() { this.app.use(express.json()); this.app.use(cors({ origin: config.allowedOrigins, methods: ['GET', 'POST'], allowedHeaders: ['Content-Type', 'Authorization'], })); } private setupClients() { this.deepseekClient = new ApiClient( config.deepseekApiKey, config.deepseekApiUrl, 'deepseek', config.deepseekModel ); this.claudeClient = new ApiClient( config.claudeApiKey, config.claudeApiUrl, config.claudeProvider, config.claudeModel ); } private setupRoutes() { this.app.post('/v1/chat/completions', this.handleChatCompletions.bind(this)); this.app.get('/health', (_, res) => res.send('OK')); } private async handleChatCompletions(req: express.Request, res: express.Response) { const request = req.body as ChatCompletionRequest; const { messages, stream = true, ...modelArgs } = request; if (stream) { res.setHeader('Content-Type', 'text/event-stream'); res.setHeader('Cache-Control', 'no-cache'); res.setHeader('Connection', 'keep-alive'); try { const chatId = `chatcmpl-${Date.now().toString(16)}`; const createdTime = Math.floor(Date.now() / 1000); let reasoningContent = ''; // DeepSeek 推理阶段 const deepseekStream = await this.deepseekClient.streamChat( messages, config.deepseekModel, modelArgs ); deepseekStream.on('data', (chunk: Buffer) => { console.log('deepseek stream data', chunk.toString()); const reasoningContentKey = 'reasoning_content'; const lines = chunk.toString().split('\n'); for (const line of lines) { if (line.startsWith('data: ')) { const data = line.slice(5); if (data === '[DONE]') continue; try { const jsonData = JSON.parse(data); if (jsonData.choices?.[0]?.delta?.[reasoningContentKey]) { reasoningContent += jsonData.choices[0].delta[reasoningContentKey]; // 构建响应 const response: ChatCompletionResponse = { id: chatId, object: 'chat.completion.chunk', created: createdTime, model: config.deepseekModel, choices: [{ index: 0, delta: { role: 'assistant', reasoning_content: jsonData.choices[0].delta[reasoningContentKey], }, }], }; res.write(`data: ${JSON.stringify(response)}\n\n`); } } catch (error) { console.error('Error parsing deepseek response:', error); } } } }); // 等待 DeepSeek 完成 await new Promise((resolve) => deepseekStream.on('end', () => { console.log('deepseek stream end'); resolve(true); })); // Claude 回答阶段 const claudeMessages = [...messages]; console.log('reasoningContent', reasoningContent); // 将推理过程作为系统消息添加到对话中 claudeMessages.push({ role: 'system', content: `Here's the reasoning process:\n${reasoningContent}\n\nBased on this reasoning, provide your response.`, }); const claudeStream = await this.claudeClient.streamChat( claudeMessages, config.claudeModel, modelArgs ); // 只发送一次完整的推理过程 const finalReasoningResponse: ChatCompletionResponse = { id: chatId, object: 'chat.completion.chunk', created: createdTime, model: config.deepseekModel, choices: [{ index: 0, delta: { role: 'assistant', reasoning_content: reasoningContent, }, }], }; res.write(`data: ${JSON.stringify(finalReasoningResponse)}\n\n`); claudeStream.on('data', (chunk: Buffer) => { console.log('claude stream data', chunk.toString()); const lines = chunk.toString().split('\n'); for (const line of lines) { if (line.startsWith('data: ')) { const data = line.slice(5).trim(); if (data === '[DONE]' || !data) continue; try { const jsonData = JSON.parse(data); if (jsonData.choices?.[0]?.delta?.content) { // 构建响应 const response: ChatCompletionResponse = { id: jsonData.id, object: 'chat.completion.chunk', created: jsonData.created, model: jsonData.model, choices: [{ index: 0, delta: { role: 'assistant', content: jsonData.choices[0].delta.content, }, }], }; res.write(`data: ${JSON.stringify(response)}\n\n`); } } catch (error) { console.error('Error parsing claude response:', error, 'Raw data:', data); continue; } } } }); claudeStream.on('end', () => { res.write('data: [DONE]\n\n'); res.end(); }); } catch (error) { console.error('Error in stream chat:', error); res.status(500).json({ error: 'Internal server error' }); } } else { res.status(400).json({ error: 'Only streaming mode is supported' }); } } start() { this.app.listen(config.port, () => { console.log(`Server is running on port ${config.port}`); }); } }

MCP directory API

We provide all the information about MCP servers via our MCP API.

curl -X GET 'https://glama.ai/api/mcp/v1/servers/smithery-ai/DeepClaudeMcp'

If you have feedback or need assistance with the MCP directory API, please join our Discord server