Skip to main content
Glama
Atomic-Germ

MCP Ollama Consult Server

ModelValidator.ts3.27 kB
import { ConfigManager } from '../config/ConfigManager'; import { OllamaError } from '../types'; export interface AvailableModel { name: string; installed: boolean; isCloud: boolean; } export class ModelValidator { constructor(private config: ConfigManager) {} private looksLikeCloudModel(name: string): boolean { if (!name) return false; return name.includes(':cloud') || name.includes('-cloud'); } private modelIsSafe(model: any): boolean { if (!model) return false; const name = model.name || ''; if (this.looksLikeCloudModel(name)) return true; // Accept models that are installed/local if (model.installed || model.local || model.downloaded) return true; return false; } async getAvailableModels(): Promise<AvailableModel[]> { try { const url = this.config.getApiUrl('/api/tags'); const response = await fetch(url, { method: 'GET', signal: AbortSignal.timeout(this.config.getTimeout()), }); if (!response.ok) { throw new OllamaError( `Failed to fetch models: ${response.statusText}`, 'LIST_MODELS_FAILED' ); } const text = await response.text(); const data = JSON.parse(text) as { models?: any[] }; const models = data.models || []; const available = models .filter((m) => this.modelIsSafe(m)) .map((m) => ({ name: m.name, installed: !this.looksLikeCloudModel(m.name), isCloud: this.looksLikeCloudModel(m.name), })); return available; } catch (error) { if (error instanceof OllamaError) throw error; throw new OllamaError( `Failed to fetch available models: ${error instanceof Error ? error.message : 'Unknown error'}`, 'CONNECTION_FAILED' ); } } async isModelAvailable(modelName: string): Promise<boolean> { if (!modelName) return false; // Cloud models are always acceptable if (this.looksLikeCloudModel(modelName)) return true; try { const available = await this.getAvailableModels(); return available.some((m) => m.name === modelName); } catch (_error) { // If we can't check, assume unavailable to be safe return false; } } async getDefaultModel(): Promise<string> { try { const available = await this.getAvailableModels(); if (available.length === 0) { throw new OllamaError('No models available', 'NO_MODELS_AVAILABLE'); } // Prefer installed models over cloud models const installed = available.find((m) => m.installed); if (installed) return installed.name; // Fall back to first cloud model const cloud = available.find((m) => m.isCloud); if (cloud) return cloud.name; // Fall back to first available return available[0].name; } catch (error) { if (error instanceof OllamaError) throw error; throw new OllamaError( `Failed to determine default model: ${error instanceof Error ? error.message : 'Unknown error'}`, 'CONNECTION_FAILED' ); } } getSuggestions(count: number = 3): Promise<string[]> { return this.getAvailableModels().then((models) => models.slice(0, count).map((m) => m.name)); } }

Implementation Reference

Latest Blog Posts

MCP directory API

We provide all the information about MCP servers via our MCP API.

curl -X GET 'https://glama.ai/api/mcp/v1/servers/Atomic-Germ/mcp-consult'

If you have feedback or need assistance with the MCP directory API, please join our Discord server