Skip to main content
Glama

LLM Researcher

by Code-Hex
config.ts1.19 kB
import dotenv from 'dotenv'; import { readFileSync } from 'fs'; import { homedir } from 'os'; import { join } from 'path'; import type { Config, RCConfig } from './types.js'; dotenv.config(); // Load .llmresearcherrc from home directory if exists let rcConfig: RCConfig = {}; try { const rcPath = join(homedir(), '.llmresearcherrc'); const rcContent = readFileSync(rcPath, 'utf8'); rcConfig = JSON.parse(rcContent) as RCConfig; } catch (error) { // Ignore if file doesn't exist } export const config: Config = { verbose: false, userAgent: process.env.USER_AGENT || rcConfig.userAgent || 'Mozilla/5.0 (compatible; LLMResearcher/1.0)', timeout: parseInt(process.env.TIMEOUT || '') || rcConfig.timeout || 30000, maxRetries: parseInt(process.env.MAX_RETRIES || '') || rcConfig.maxRetries || 3, rateLimitDelay: parseInt(process.env.RATE_LIMIT_DELAY || '') || rcConfig.rateLimitDelay || 1000, cacheEnabled: process.env.CACHE_ENABLED !== 'false' && rcConfig.cacheEnabled !== false, maxResults: parseInt(process.env.MAX_RESULTS || '') || rcConfig.maxResults || 5, log: (...args: any[]) => { if (config.verbose) { console.error('[VERBOSE]', ...args); } } };

MCP directory API

We provide all the information about MCP servers via our MCP API.

curl -X GET 'https://glama.ai/api/mcp/v1/servers/Code-Hex/light-research-mcp'

If you have feedback or need assistance with the MCP directory API, please join our Discord server