Skip to main content
Glama

Reviewer MCP

by jaggederest
ollama.ts1.34 kB
import { AIProvider } from './types.js'; import { ProviderError } from './errors.js'; interface OllamaResponse { model: string; created_at: string; message: { role: string; content: string; }; done: boolean; } export class OllamaProvider implements AIProvider { public readonly name = 'Ollama'; private baseUrl: string; private model: string; constructor(baseUrl = 'http://localhost:11434', model = 'llama2') { this.baseUrl = baseUrl.replace(/\/$/, ''); // Remove trailing slash this.model = model; } async chat(systemPrompt: string, userPrompt: string): Promise<string> { try { const response = await fetch(`${this.baseUrl}/api/chat`, { method: 'POST', headers: { 'Content-Type': 'application/json', }, body: JSON.stringify({ model: this.model, messages: [ { role: 'system', content: systemPrompt }, { role: 'user', content: userPrompt }, ], stream: false, }), }); if (!response.ok) { throw new Error(`Ollama API error: ${response.status} ${response.statusText}`); } const data = await response.json() as OllamaResponse; return data.message.content; } catch (error) { throw new ProviderError(this.name, error); } } }

MCP directory API

We provide all the information about MCP servers via our MCP API.

curl -X GET 'https://glama.ai/api/mcp/v1/servers/jaggederest/mcp_reviewer'

If you have feedback or need assistance with the MCP directory API, please join our Discord server