dependencies-install.txt•1.55 kB
# CORRECT DEPENDENCIES FOR CHARNOKS MCP SERVER
# This project uses: Gemini (primary), OpenRouter, Cohere, HuggingFace
# NO OpenAI or Anthropic direct APIs
# === PRODUCTION DEPENDENCIES ===
npm install @google-cloud/language @google/generative-ai @huggingface/inference @modelcontextprotocol/sdk @supabase/supabase-js cohere-ai cors dotenv express express-rate-limit node-fetch openai uuid jsonwebtoken zod ws p-limit
# === DEVELOPMENT DEPENDENCIES ===
npm install --save-dev @jest/globals @types/cors @types/express @types/jest @types/node @types/uuid @types/jsonwebtoken @types/ws @types/p-limit jest tsx typescript
# === IMPORTANT NOTES ===
# - We keep 'openai' package for OpenRouter compatibility (uses OpenAI API format)
# - NO @anthropic-ai/sdk needed (we use OpenRouter instead)
# - Cohere and HuggingFace are for additional model diversity
# - Gemini is the primary AI provider
# === ENVIRONMENT VARIABLES NEEDED ===
# Required: GEMINI_API_KEY, SUPABASE_URL, SUPABASE_SERVICE_ROLE_KEY
# Optional: OPENROUTER_API_KEY, COHERE_API_KEY, HF_TOKEN
# See .env.example for complete list
# === DOCKER DEPLOYMENT ===
# Build once, run anywhere
docker build -t charnoks-mcp-server .
# Run locally
docker run -p 3002:3002 charnoks-mcp-server
# Deploy to production with same command
docker run -d --name production-mcp \
-p 3002:3002 \
-e NODE_ENV=production \
charnoks-mcp-server
# Scale up easily
docker run -d --name mcp-instance-2 -p 3003:3002 charnoks-mcp-server
docker run -d --name mcp-instance-3 -p 3004:3002 charnoks-mcp-server