Skip to main content
Glama

Deep Research MCP Server

by Ozamatash
package.json1.48 kB
{ "name": "open-deep-research", "version": "0.0.1", "main": "dist/mcp-server.js", "type": "module", "scripts": { "format": "prettier --write \"src/**/*.{ts,tsx}\"", "tsx": "tsx --env-file=.env.local", "start": "tsx --env-file=.env.local src/run.ts", "start:stdio": "tsx --env-file=.env.local src/mcp-server.ts", "start:http": "tsx --env-file=.env.local src/http-server.ts", "build": "tsc", "build:watch": "tsc --watch", "serve": "node --env-file=.env.local dist/mcp-server.js", "serve:http": "node --env-file=.env.local dist/http-server.js", "docker": "tsx src/run.ts", "test": "echo \"Error: no test specified\" && exit 1" }, "author": "", "license": "ISC", "description": "", "devDependencies": { "@ianvs/prettier-plugin-sort-imports": "^4.4.1", "@types/express": "^5.0.2", "@types/lodash-es": "^4.17.12", "@types/node": "^22.13.0", "prettier": "^3.4.2", "tsx": "^4.19.2", "typescript": "^5.7.3" }, "dependencies": { "@ai-sdk/anthropic": "^2.0.1", "@ai-sdk/google": "^2.0.3", "@ai-sdk/openai": "^2.0.5", "@ai-sdk/xai": "^2.0.2", "@mendable/firecrawl-js": "^1.16.0", "@modelcontextprotocol/sdk": "^1.12.1", "ai": "^5.0.8", "dotenv": "^16.4.7", "express": "^5.1.0", "js-tiktoken": "^1.0.17", "langfuse": "^3.35.2", "lodash-es": "^4.17.21", "p-limit": "^6.2.0", "zod": "^3.24.1" }, "engines": { "node": "22.x" } }

MCP directory API

We provide all the information about MCP servers via our MCP API.

curl -X GET 'https://glama.ai/api/mcp/v1/servers/Ozamatash/deep-research-mcp'

If you have feedback or need assistance with the MCP directory API, please join our Discord server