Skip to main content
Glama

consult_codex_batch

Process multiple Codex AI queries in batch for CI/CD automation, returning consolidated JSON output with individual timeout and format preferences.

Instructions

Consult multiple Codex queries in batch - perfect for CI/CD automation. Processes multiple prompts and returns consolidated JSON output. Each query can have individual timeout and format preferences. Args: queries: List of query dictionaries with keys: 'query' (required), 'timeout' (optional) directory: Working directory (required) format: Output format - currently only "json" supported for batch Returns: JSON array with all results

Input Schema

TableJSON Schema
NameRequiredDescriptionDefault
queriesYes
directoryYes
formatNojson

Implementation Reference

  • Registration of the consult_codex_batch tool using the @mcp.tool() decorator.
    @mcp.tool()
  • Input schema defined by type hints and documentation for queries (list of dicts), directory (str), format (str), returning str (JSON).
    def consult_codex_batch( queries: List[Dict[str, Union[str, int]]], directory: str, format: str = "json" ) -> str: """ Consult multiple Codex queries in batch - perfect for CI/CD automation. Processes multiple prompts and returns consolidated JSON output. Each query can have individual timeout and format preferences. Args: queries: List of query dictionaries with keys: 'query' (required), 'timeout' (optional) directory: Working directory (required) format: Output format - currently only "json" supported for batch Returns: JSON array with all results """
  • Full implementation of the consult_codex_batch handler, including validation of inputs, batch processing of queries using Codex CLI, error handling, timeouts, and consolidated JSON response.
    @mcp.tool() def consult_codex_batch( queries: List[Dict[str, Union[str, int]]], directory: str, format: str = "json" ) -> str: """ Consult multiple Codex queries in batch - perfect for CI/CD automation. Processes multiple prompts and returns consolidated JSON output. Each query can have individual timeout and format preferences. Args: queries: List of query dictionaries with keys: 'query' (required), 'timeout' (optional) directory: Working directory (required) format: Output format - currently only "json" supported for batch Returns: JSON array with all results """ # Check if codex CLI is available if not _get_codex_command(): return json.dumps({ "status": "error", "error": "Codex CLI not found. Install from OpenAI" }, indent=2) # Validate directory if not os.path.isdir(directory): return json.dumps({ "status": "error", "error": f"Directory does not exist: {directory}" }, indent=2) # Validate queries if not queries or not isinstance(queries, list): return json.dumps({ "status": "error", "error": "Queries must be a non-empty list" }, indent=2) # Force JSON format for batch processing format = "json" results = [] for i, query_item in enumerate(queries): if not isinstance(query_item, dict) or 'query' not in query_item: results.append({ "status": "error", "error": f"Query {i+1}: Must be a dictionary with 'query' key", "index": i }) continue query = str(query_item.get('query', '')) query_timeout = query_item.get('timeout', _get_timeout()) if isinstance(query_timeout, str): try: query_timeout = int(query_timeout) except ValueError: query_timeout = _get_timeout() # Process individual query processed_query = _format_prompt_for_json(query) cmd = _build_codex_exec_command() if _should_skip_git_check(): cmd.append("--skip-git-repo-check") start_time = time.time() try: result = _run_codex_command(cmd, directory, query_timeout, processed_query) execution_time = time.time() - start_time if result.returncode == 0: cleaned_output = _clean_codex_output(result.stdout) raw_response = cleaned_output if cleaned_output else "No output from Codex CLI" # Try to extract JSON from response extracted_json = _extract_json_from_response(raw_response) results.append({ "status": "success", "index": i, "query": query[:100] + "..." if len(query) > 100 else query, # Truncate long queries "response": extracted_json if extracted_json else raw_response, "metadata": { "execution_time": execution_time, "timeout": query_timeout } }) else: results.append({ "status": "error", "index": i, "query": query[:100] + "..." if len(query) > 100 else query, "error": f"Codex CLI Error: {result.stderr.strip()}", "metadata": { "execution_time": execution_time, "timeout": query_timeout } }) except subprocess.TimeoutExpired: results.append({ "status": "error", "index": i, "query": query[:100] + "..." if len(query) > 100 else query, "error": f"Query timed out after {query_timeout} seconds", "metadata": { "timeout": query_timeout } }) except FileNotFoundError as e: codex_path = _get_codex_command() if _is_windows(): error_msg = ( f"Codex CLI not found or not executable. " f"Detected path: {codex_path or 'None'}. " f"Please ensure 'codex' is installed and in your PATH." ) else: error_msg = f"Codex CLI not found: {str(e)}" results.append({ "status": "error", "index": i, "query": query[:100] + "..." if len(query) > 100 else query, "error": error_msg, "metadata": { "platform": platform.system() } }) except Exception as e: results.append({ "status": "error", "index": i, "query": query[:100] + "..." if len(query) > 100 else query, "error": f"Error executing query: {str(e)}", "metadata": {} }) # Return consolidated results return json.dumps({ "status": "completed", "total_queries": len(queries), "successful": len([r for r in results if r["status"] == "success"]), "failed": len([r for r in results if r["status"] == "error"]), "results": results, "metadata": { "directory": directory, "format": format } }, indent=2)

Latest Blog Posts

MCP directory API

We provide all the information about MCP servers via our MCP API.

curl -X GET 'https://glama.ai/api/mcp/v1/servers/eLyiN/codex-bridge'

If you have feedback or need assistance with the MCP directory API, please join our Discord server