ollama-bridge.ts•6.21 kB
import { Ollama } from "ollama";
import readline from "readline";
const MCP_SERVER_URL = "http://localhost:3333";
const OLLAMA_MODEL = "llama3.2"; // Change to your preferred model
// MCP Tool definitions for Ollama
const tools = [
{
type: "function" as const,
function: {
name: "send_by_cc",
description: "Send a raw MIDI CC (0-127) to FM8 on the configured channel.",
parameters: {
type: "object",
properties: {
cc: {
type: "number",
description: "MIDI CC number (0-127)"
},
value: {
type: "number",
description: "MIDI CC value (0-127)"
}
},
required: ["cc", "value"]
}
}
},
{
type: "function" as const,
function: {
name: "send_by_route",
description: "Send a FM8 matrix modulation route with a value. Routes are like 'A->B' where A is source and B is destination.",
parameters: {
type: "object",
properties: {
source: {
type: "string",
description: "Modulation source (e.g., 'LFO1', 'ENV1', 'A', 'B')"
},
dest: {
type: "string",
description: "Modulation destination (e.g., 'PITCH', 'CUTOFF', 'A', 'B')"
},
value: {
type: "number",
description: "Modulation amount (0-127)"
}
},
required: ["source", "dest", "value"]
}
}
},
{
type: "function" as const,
function: {
name: "list_mappings",
description: "List all available FM8 matrix mappings showing routes, labels, and CC numbers.",
parameters: {
type: "object",
properties: {},
required: []
}
}
},
{
type: "function" as const,
function: {
name: "panic",
description: "Emergency stop - sends Reset All Controllers, All Sound Off, and All Notes Off.",
parameters: {
type: "object",
properties: {
channel: {
type: "number",
description: "MIDI channel (1-16), optional"
}
},
required: []
}
}
},
{
type: "function" as const,
function: {
name: "status",
description: "Get the current status of the FM8 MCP server.",
parameters: {
type: "object",
properties: {},
required: []
}
}
}
] as any;
// Call MCP server tool
async function callMCPTool(toolName: string, args: any): Promise<any> {
try {
const response = await fetch(`${MCP_SERVER_URL}/mcp`, {
method: "POST",
headers: { "Content-Type": "application/json" },
body: JSON.stringify({
jsonrpc: "2.0",
id: Date.now(),
method: "tools/call",
params: {
name: toolName,
arguments: args
}
})
});
if (!response.ok) {
throw new Error(`MCP server error: ${response.status}`);
}
const result = await response.json();
if (result.error) {
throw new Error(result.error.message || "MCP tool call failed");
}
return result.result;
} catch (error) {
console.error(`Error calling MCP tool ${toolName}:`, error);
throw error;
}
}
// Main chat loop
async function chat() {
const ollama = new Ollama({ host: "http://localhost:11434" });
const rl = readline.createInterface({
input: process.stdin,
output: process.stdout
});
const messages: any[] = [];
console.log("\n🎹 FM8 MCP + Ollama Bridge");
console.log("=" .repeat(50));
console.log(`Model: ${OLLAMA_MODEL}`);
console.log(`MCP Server: ${MCP_SERVER_URL}`);
console.log("Type 'exit' to quit\n");
const askQuestion = () => {
rl.question("You: ", async (input) => {
const userInput = input.trim();
if (userInput.toLowerCase() === "exit") {
console.log("\nGoodbye! 👋");
rl.close();
process.exit(0);
}
if (!userInput) {
askQuestion();
return;
}
messages.push({
role: "user",
content: userInput
});
try {
// Call Ollama with tools
const response = await ollama.chat({
model: OLLAMA_MODEL,
messages: messages,
tools: tools,
});
// Handle tool calls
if (response.message.tool_calls && response.message.tool_calls.length > 0) {
messages.push(response.message);
for (const toolCall of response.message.tool_calls) {
console.log(`\n🔧 Calling tool: ${toolCall.function.name}`);
console.log(` Args: ${JSON.stringify(toolCall.function.arguments)}`);
try {
const toolResult = await callMCPTool(
toolCall.function.name,
toolCall.function.arguments
);
console.log(` ✅ Result: ${JSON.stringify(toolResult)}`);
messages.push({
role: "tool",
content: JSON.stringify(toolResult)
});
} catch (error) {
console.error(` ❌ Error: ${error}`);
messages.push({
role: "tool",
content: JSON.stringify({ error: String(error) })
});
}
}
// Get final response after tool execution
const finalResponse = await ollama.chat({
model: OLLAMA_MODEL,
messages: messages
});
messages.push(finalResponse.message);
console.log(`\nAssistant: ${finalResponse.message.content}\n`);
} else {
// No tool calls, just regular response
messages.push(response.message);
console.log(`\nAssistant: ${response.message.content}\n`);
}
} catch (error) {
console.error(`\n❌ Error: ${error}\n`);
}
askQuestion();
});
};
askQuestion();
}
// Start
console.log("Starting FM8 MCP + Ollama Bridge...");
console.log("Make sure your MCP server is running on http://localhost:3333");
console.log("Make sure Ollama is running with your preferred model\n");
chat().catch(console.error);