Skip to main content
Glama

DeepClaude MCP Server

by smithery-ai
api-client.ts3.11 kB
import axios, { AxiosInstance } from 'axios'; import { Message, ChatCompletionRequest } from '../types/api'; import config from '../config'; export class ApiClient { private axiosInstance: AxiosInstance; private apiKey: string; private provider: string; private model: string; constructor(apiKey: string, baseURL: string, provider: string = 'anthropic', model: string = 'anthropic/claude-3.5-haiku-20241022:beta') { console.log('info', apiKey, baseURL, provider); this.apiKey = apiKey; this.provider = provider; this.model = model; this.axiosInstance = axios.create({ baseURL, headers: this.getHeaders(), }); } private getHeaders() { switch (this.provider) { case 'openrouter': return { 'Content-Type': 'application/json', 'Authorization': `Bearer ${this.apiKey}`, 'HTTP-Referer': config.siteUrl, 'X-Title': config.siteName }; default: return { 'Content-Type': 'application/json', 'Authorization': `Bearer ${this.apiKey}`, }; } } async streamChat(messages: Message[], model: string, modelArgs: any = {}) { try { let requestData: any; let endpoint: string; console.log('model', model, this.provider); if (model.startsWith('anthropic')) { // OpenRouter Anthropic API 格式 requestData = { model: this.model, messages: messages.map(msg => ({ role: msg.role === 'system' ? 'system' : msg.role === 'assistant' ? 'assistant' : 'user', content: msg.content })), stream: true, temperature: modelArgs.temperature || 1, top_p: modelArgs.top_p || 1, max_tokens: modelArgs.max_tokens || 1000, }; endpoint = '/api/v1/chat/completions'; } else if (model.startsWith('deepseek-')) { // DeepSeek API 格式 requestData = { messages, model: this.model, stream: true, }; endpoint = '/v1/chat/completions'; } else { throw new Error(`Unsupported model: ${model}`); } const response = await this.axiosInstance.post( endpoint, requestData, { responseType: 'stream', transformResponse: (data) => { if (typeof data === 'string') { return data; } return data; } } ); return response.data; } catch (error: any) { if (error.response) { console.error('Error response:', { status: error.response.status, data: error.response.data }); } throw error; } } async chat(messages: Message[], model: string, modelArgs: any = {}) { try { const response = await this.axiosInstance.post('/chat/completions', { messages, model, ...modelArgs, }); return response.data; } catch (error) { console.error('Error in chat:', error); throw error; } } }

MCP directory API

We provide all the information about MCP servers via our MCP API.

curl -X GET 'https://glama.ai/api/mcp/v1/servers/smithery-ai/DeepClaudeMcp'

If you have feedback or need assistance with the MCP directory API, please join our Discord server