ollama.ts•1.34 kB
import { AIProvider } from './types.js';
import { ProviderError } from './errors.js';
interface OllamaResponse {
model: string;
created_at: string;
message: {
role: string;
content: string;
};
done: boolean;
}
export class OllamaProvider implements AIProvider {
public readonly name = 'Ollama';
private baseUrl: string;
private model: string;
constructor(baseUrl = 'http://localhost:11434', model = 'llama2') {
this.baseUrl = baseUrl.replace(/\/$/, ''); // Remove trailing slash
this.model = model;
}
async chat(systemPrompt: string, userPrompt: string): Promise<string> {
try {
const response = await fetch(`${this.baseUrl}/api/chat`, {
method: 'POST',
headers: {
'Content-Type': 'application/json',
},
body: JSON.stringify({
model: this.model,
messages: [
{ role: 'system', content: systemPrompt },
{ role: 'user', content: userPrompt },
],
stream: false,
}),
});
if (!response.ok) {
throw new Error(`Ollama API error: ${response.status} ${response.statusText}`);
}
const data = await response.json() as OllamaResponse;
return data.message.content;
} catch (error) {
throw new ProviderError(this.name, error);
}
}
}