import { McpServer } from '@modelcontextprotocol/sdk/server/mcp.js';
import { StdioServerTransport } from '@modelcontextprotocol/sdk/server/stdio.js';
import { z } from 'zod';
import * as dotenv from 'dotenv';
import * as path from 'path';
import * as os from 'os';
import * as fs from 'fs/promises';
import { parse } from 'yaml';
import OpenAI from 'openai';
import { Message, Conversation, ConversationFilter } from './types/conversation.js';
import { ServerConfig } from './types/server.js';
import { OpenRouterError, FileSystemError } from './types/errors.js';
import { OpenRouterProvider } from './providers/openrouter.js';
// Load environment variables from .env file
dotenv.config();
// Determine the appropriate app data directory based on OS
function getAppDataPath(): string {
switch (process.platform) {
case 'win32':
return process.env.APPDATA || path.join(os.homedir(), 'AppData', 'Roaming');
case 'darwin':
return path.join(os.homedir(), 'Library', 'Application Support');
default:
return process.env.XDG_DATA_HOME || path.join(os.homedir(), '.local', 'share');
}
}
// Create the app-specific data directory path
const APP_NAME = 'mcp-conversation-server';
const defaultDataPath = path.join(getAppDataPath(), APP_NAME, 'conversations');
/**
* MCP Conversation Server
*
* Workflow:
* 1. Create a conversation:
* - Use create-conversation tool
* - Specify provider (e.g., 'deepseek') and model (e.g., 'deepseek-chat')
* - Optionally provide a title
*
* 2. Send messages:
* - Use send-message tool
* - Provide conversationId from step 1
* - Set stream: true for real-time responses
* - Messages maintain chat context automatically
*
* 3. Access conversation history:
* - Use resources/read with conversation://{id}/history
* - Full chat history with context is preserved
*
* Error Handling:
* - All errors include detailed messages and proper error codes
* - Automatic retries for transient failures
* - Timeouts are configurable per operation
*/
// Schema definitions
const ListResourcesSchema = z.object({
method: z.literal('resources/list')
});
const ReadResourceSchema = z.object({
method: z.literal('resources/read'),
params: z.object({
uri: z.string()
})
});
const ListToolsSchema = z.object({
method: z.literal('tools/list')
});
const CallToolSchema = z.object({
method: z.literal('tools/call'),
params: z.object({
name: z.string(),
arguments: z.record(z.unknown())
})
});
const ListPromptsSchema = z.object({
method: z.literal('prompts/list')
});
const GetPromptSchema = z.object({
method: z.literal('prompts/get'),
params: z.object({
name: z.string(),
arguments: z.record(z.unknown()).optional()
})
});
// Modify logging to use stderr for ALL non-JSON-RPC messages
function logDebug(...args: any[]): void {
console.error('[DEBUG]', ...args);
}
function logError(...args: any[]): void {
console.error('[ERROR]', ...args);
}
// Create the MCP server instance
const server = new McpServer({
name: 'conversation-server',
version: '1.0.0'
});
// Initialize server configuration
const config: ServerConfig = {
openRouter: {
apiKey: process.env.OPENROUTER_API_KEY || ''
},
models: {}, // Will be populated from YAML config
defaultModel: '', // Will be populated from YAML config
persistence: {
type: 'filesystem',
path: process.env.CONVERSATIONS_PATH || defaultDataPath
},
resources: {
maxSizeBytes: 10 * 1024 * 1024, // 10MB
allowedTypes: ['.txt', '.md', '.json', '.csv', '.cs', '.ts', '.js', '.jsx', '.tsx', '.pdf'],
chunkSize: 1024 // 1KB chunks
}
};
let openRouterProvider: OpenRouterProvider;
// Load models configuration
async function loadModelsConfig(): Promise<ServerConfig> {
try {
// Try to load from build directory first (for production)
const buildConfigPath = path.join(path.dirname(process.argv[1]), 'config', 'models.yaml');
let fileContents: string;
try {
fileContents = await fs.readFile(buildConfigPath, 'utf8');
} catch (error) {
// If not found in build directory, try source directory (for development)
const sourceConfigPath = path.join(process.cwd(), 'config', 'models.yaml');
fileContents = await fs.readFile(sourceConfigPath, 'utf8');
}
const config = parse(fileContents);
// Validate required configuration
if (!config.openRouter?.apiKey) {
throw new Error('Missing openRouter.apiKey in models.yaml configuration');
}
if (!config.models || Object.keys(config.models).length === 0) {
throw new Error('No models configured in models.yaml configuration');
}
if (!config.defaultModel) {
throw new Error('Missing defaultModel in models.yaml configuration');
}
// Set default persistence path if not specified
if (!config.persistence?.path) {
config.persistence = {
path: defaultDataPath
};
}
return {
openRouter: {
apiKey: config.openRouter.apiKey
},
models: config.models,
defaultModel: config.defaultModel,
persistence: {
type: 'filesystem',
path: config.persistence.path
},
resources: {
maxSizeBytes: 10 * 1024 * 1024, // 10MB
allowedTypes: ['.txt', '.md', '.json', '.csv', '.cs', '.ts', '.js', '.jsx', '.tsx', '.pdf'],
chunkSize: 1024 // 1KB chunks
}
};
} catch (error) {
if (error instanceof Error) {
throw new Error(`Failed to load models configuration: ${error.message}`);
}
throw new Error('Failed to load models configuration. Make sure models.yaml exists in the config directory.');
}
}
// Initialize and start the server
async function startServer() {
try {
console.error('Starting MCP Conversation Server...');
// Load and validate the complete configuration from YAML
const config = await loadModelsConfig();
console.error('Using data directory:', config.persistence.path);
// Initialize OpenRouter provider with loaded config
openRouterProvider = new OpenRouterProvider({
apiKey: config.openRouter.apiKey,
models: config.models,
defaultModel: config.defaultModel,
timeouts: {
completion: 30000,
stream: 60000
}
});
// Create data directory if it doesn't exist
await fs.mkdir(config.persistence.path, { recursive: true });
// Validate OpenRouter connection using the provider
await openRouterProvider.validateConfig();
// Set up tools after provider is initialized
setupTools();
console.error('Successfully connected to OpenRouter');
console.error('Available models:', Object.keys(config.models).join(', '));
console.error('Default model:', config.defaultModel);
// Set up server transport
const transport = new StdioServerTransport();
await server.connect(transport);
console.error('Server connected and ready');
} catch (error) {
console.error('Failed to start server:', error);
process.exit(1);
}
}
// Setup server tools
function setupTools() {
// Add create-conversation tool
server.tool(
'create-conversation',
`Creates a new conversation with a specified model.`,
{
model: z.string().describe('The model ID to use for the conversation'),
title: z.string().optional().describe('Optional title for the conversation')
},
async (args: { model: string; title?: string }, _extra: any) => {
const { model, title } = args;
const now = new Date().toISOString();
const conversation: Conversation = {
id: crypto.randomUUID(),
model,
title: title || `Conversation ${now}`,
messages: [],
created: now,
updated: now
};
try {
const conversationPath = path.join(config.persistence.path, `${conversation.id}.json`);
await fs.writeFile(conversationPath, JSON.stringify(conversation, null, 2));
return {
content: [{
type: 'text',
text: JSON.stringify(conversation, null, 2)
}]
};
} catch (error) {
const message = error instanceof Error ? error.message : 'Unknown error';
throw new FileSystemError(`Failed to save conversation: ${message}`);
}
}
);
// Add send-message tool
server.tool(
'send-message',
`Sends a message to an existing conversation and receives a response.`,
{
conversationId: z.string(),
content: z.string(),
stream: z.boolean().optional()
},
async (args: { conversationId: string; content: string; stream?: boolean }, _extra: any) => {
const { conversationId, content, stream = false } = args;
try {
const conversationPath = path.join(config.persistence.path, `${conversationId}.json`);
const conversation: Conversation = JSON.parse(await fs.readFile(conversationPath, 'utf8'));
const userMessage: Message = {
role: 'user',
content,
timestamp: new Date().toISOString()
};
conversation.messages.push(userMessage);
conversation.updated = new Date().toISOString();
try {
if (stream) {
const streamResponse = await openRouterProvider.streamCompletion({
model: conversation.model,
messages: conversation.messages,
stream: true
});
await fs.writeFile(conversationPath, JSON.stringify(conversation, null, 2));
return {
content: [{
type: 'resource',
resource: {
uri: `stream://${conversationId}`,
text: 'Message stream started',
mimeType: 'text/plain'
}
}]
};
} else {
const response = await openRouterProvider.createCompletion({
model: conversation.model,
messages: conversation.messages,
stream: false
});
const assistantMessage: Message = {
role: 'assistant',
content: response.content,
timestamp: new Date().toISOString()
};
conversation.messages.push(assistantMessage);
conversation.updated = new Date().toISOString();
await fs.writeFile(conversationPath, JSON.stringify(conversation, null, 2));
return {
content: [{
type: 'text',
text: JSON.stringify(assistantMessage, null, 2)
}]
};
}
} catch (error) {
const message = error instanceof Error ? error.message : 'Unknown error';
throw new OpenRouterError(`OpenRouter request failed: ${message}`);
}
} catch (error) {
if (error instanceof OpenRouterError) throw error;
const message = error instanceof Error ? error.message : 'Unknown error';
throw new FileSystemError(`Failed to handle message: ${message}`);
}
}
);
// Add list-models tool
server.tool(
'list-models',
`Lists all available models with their configurations and capabilities.`,
{},
async (_args: {}, _extra: any) => {
try {
const models = await openRouterProvider.listAvailableModels();
return {
content: [{
type: 'text',
text: JSON.stringify({
models,
defaultModel: openRouterProvider.getDefaultModel(),
totalModels: models.length
}, null, 2)
}]
};
} catch (error) {
const message = error instanceof Error ? error.message : 'Unknown error';
throw new Error(`Failed to list models: ${message}`);
}
}
);
}
// Start the server
startServer();