# ==============================================================================
# GraphRAG Llama Index - Environment Configuration Template
# ==============================================================================
# Instructions:
# 1. Copy this file to .env: `cp .env.example .env`
# 2. Update the values below to match your local setup.
# ==============================================================================
# --- [DOCKER VOLUME CONFIGURATION] ---
# DOCUMENTS_HOME: Path to your documents folder on the host machine.
# This folder is mounted to /app/documents inside the Docker container.
#
# Examples:
# Windows: E:/ai-workspace/analysis-docs
# Linux: /home/user/documents
# macOS: /Users/username/Documents
#
# This allows you to keep your documents anywhere on your system while
# GraphRAG accesses them via a consistent path inside Docker.
#
# Default: ./documents (relative to this project directory)
DOCUMENTS_HOME=./documents
# GRAPHRAG_REGISTRY_DIR: Location of the GraphRAG index vault (optional)
# This is where GraphRAG stores its internal database files (.duckdb), indices,
# and metadata. By default, this is stored in your home directory.
#
# Default: ~/.graphrag (recommended for most users)
# Custom example: /mnt/fast-ssd/graphrag-vault
# GRAPHRAG_REGISTRY_DIR=~/.graphrag
# --- [RECOMMENDED LOCAL SETUP] ---
# These settings match the Docker Model Runner configuration used in the guide.
# Embedding Configuration
EMBEDDING_PROVIDER=docker_model_runner
EMBEDDING_MODEL=ai/qwen3-embedding:latest
EMBEDDING_URL=http://host.docker.internal:12434
EMBEDDING_DIMENSION=2560 # Use 2560 for Qwen3, 1024 for Granite
# LLM Configuration (Entity Extraction)
LLM_URL=http://host.docker.internal:12434/v1
LLM_MODEL=ai/granite-4.0-micro:latest
LLM_TEMPERATURE=0.1
LLM_MAX_TOKENS=2048
# Extraction Mode
# Options: local_llm (LLM Only), gliner_llm (GLiNER + LLM Hybrid)
ENTITY_EXTRACTION_MODE=local_llm
# --- [OPTIONAL: OPENROUTER / CLOUD] ---
# Enable this to offload complex relationship reasoning to the cloud.
RELATIONSHIP_PROVIDER=local
OPENROUTER_API_KEY=sk-or-v1-your-api-key-here
OPENROUTER_MODEL=xiaomi/mimo-v2-flash:free
# --- [SYSTEM CONFIG] ---
# Active database used by the MCP server and CLI defaults
GRAPHRAG_DATABASE=investment-analysis
# Retrieval tuning
TOP_K=10
RRF_K=60
BM25_LANGUAGE=en # Stopword language: en, zh (Chinese), ja (Japanese), ko (Korean), fr, de, es, etc.