Skip to main content
Glama
opencodeExecutor.ts1.22 kB
import { executeCommand } from './commandExecutor.js'; import { Logger } from './logger.js'; interface OpenCodeOptions { agent?: string; model?: string; cwd?: string; onProgress?: (output: string) => void; } export async function executeOpenCodeCLI( prompt: string, agent?: string, model?: string, onProgress?: (output: string) => void, cwd?: string ): Promise<string> { const args = ['run']; // Add model if specified if (model) { args.push('-m', model); } // Add agent if specified if (agent) { args.push('--agent', agent); } // Add the prompt args.push(prompt); Logger.debug('Executing opencode:', { args: args.join(' '), cwd }); try { const result = await executeCommand('opencode', args, { cwd, timeout: 300000, // 5 minutes onProgress, }); if (result.exitCode !== 0) { Logger.warn('OpenCode exited with non-zero code:', result.exitCode); } // Combine stdout and stderr for the response const output = [result.stdout, result.stderr].filter(Boolean).join('\n'); return output || 'No output from OpenCode'; } catch (error) { Logger.error('OpenCode execution failed:', error); throw error; } }

Latest Blog Posts

MCP directory API

We provide all the information about MCP servers via our MCP API.

curl -X GET 'https://glama.ai/api/mcp/v1/servers/RhizomaticRobin/cerebras-code-fullstack-mcp'

If you have feedback or need assistance with the MCP directory API, please join our Discord server