Skip to main content
Glama
aider.pyโ€ข3.32 kB
"""Aider CLI adapter.""" import asyncio import subprocess from typing import Any from .base import CLIAdapter class AiderAdapter(CLIAdapter): """Adapter for Aider CLI.""" async def execute(self, task: str, progress_callback: Any = None, timeout: int | None = None, **kwargs: Any) -> tuple[str, str, int]: """Execute task using Aider CLI with optional streaming.""" if progress_callback: return await self.execute_streaming(task, progress_callback, timeout, **kwargs) cmd = self.format_task(task, **kwargs) resolved_cmd = self.resolve_command(cmd) # Sanitize environment for Windows/prompt_toolkit compatibility env = {**subprocess.os.environ, **self.get_env()} env["TERM"] = "dumb" # Prevent prompt_toolkit from seeing xterm-256color on Windows env["PYTHONIOENCODING"] = "utf-8" process = await asyncio.create_subprocess_exec( *resolved_cmd, stdout=asyncio.subprocess.PIPE, stderr=asyncio.subprocess.PIPE, env=env, ) try: effective_timeout = timeout or self.get_timeout() stdout, stderr = await asyncio.wait_for( process.communicate(), timeout=effective_timeout ) return ( stdout.decode("utf-8", errors="replace"), stderr.decode("utf-8", errors="replace"), process.returncode or 0, ) except asyncio.TimeoutError: process.kill() await process.wait() raise TimeoutError(f"Aider CLI timed out after {effective_timeout}s") def get_env(self) -> dict[str, str]: """Get environment variables for Aider.""" env = super().get_env().copy() env["TERM"] = "dumb" # Prevent prompt_toolkit from seeing xterm-256color on Windows env["PYTHONIOENCODING"] = "utf-8" return env def validate(self) -> bool: """Validate Aider is available.""" try: subprocess.run( ["which", "aider"] if subprocess.os.name != "nt" else ["where", "aider"], capture_output=True, check=True, ) return True except subprocess.CalledProcessError: return False def format_task(self, task: str, **kwargs: Any) -> list[str]: """Format task for Aider CLI.""" cmd = ["aider"] # Add message flag for non-interactive mode cmd.append("--message") cmd.append(task) # Add model if specified if model := kwargs.get("model"): cmd.extend(["--model", model]) # Add mode flags if kwargs.get("architect_mode"): cmd.append("--architect") elif kwargs.get("ask_mode"): cmd.append("--ask") # Auto-commit changes if kwargs.get("auto_commit", True): cmd.append("--auto-commits") # Add optimization flags cmd.extend([ "--no-pretty", "--stream", "--no-check-update", "--no-show-release-notes", "--verbose", "--yes-always", ]) # Add any custom args from config cmd.extend(self.get_args()) return cmd

MCP directory API

We provide all the information about MCP servers via our MCP API.

curl -X GET 'https://glama.ai/api/mcp/v1/servers/carlosduplar/multi-agent-mcp'

If you have feedback or need assistance with the MCP directory API, please join our Discord server