Skip to main content
Glama

Pinecone MCP Server

server.json2.54 kB
{ "$schema": "https://registry.nimbletools.ai/schemas/2025-09-22/nimbletools-server.schema.json", "name": "ai.nimbletools/pinecone", "version": "1.0.0", "description": "Pinecone API: vector database for embeddings, similarity search, and RAG applications", "status": "active", "repository": { "url": "https://github.com/NimbleBrainInc/mcp-pinecone", "source": "github", "branch": "main" }, "websiteUrl": "https://www.pinecone.io/", "packages": [ { "registryType": "oci", "registryBaseUrl": "https://docker.io", "identifier": "nimbletools/mcp-pinecone", "version": "1.0.0", "transport": { "type": "streamable-http", "url": "https://mcp.nimbletools.ai/mcp" }, "environmentVariables": [ { "name": "PINECONE_API_KEY", "description": "Pinecone API key (get from https://app.pinecone.io)", "isRequired": true, "isSecret": true, "example": "your_pinecone_api_key" }, { "name": "PINECONE_ENVIRONMENT", "description": "Pinecone environment (e.g., us-west1-gcp, us-east-1-aws)", "isRequired": true, "isSecret": false, "example": "us-west1-gcp" } ] } ], "_meta": { "ai.nimbletools.mcp/v1": { "container": { "healthCheck": { "path": "/health", "port": 8000 } }, "capabilities": { "tools": true, "resources": false, "prompts": false }, "resources": { "limits": { "memory": "256Mi", "cpu": "250m" }, "requests": { "memory": "128Mi", "cpu": "100m" } }, "deployment": { "protocol": "http", "port": 8000, "mcpPath": "/mcp" }, "display": { "name": "Pinecone", "category": "infrastructure-data", "tags": [ "pinecone", "vector-database", "embeddings", "similarity-search", "rag", "semantic-search", "ai", "machine-learning", "requires-api-key" ], "branding": { "logoUrl": "https://static.nimbletools.ai/logos/pinecone.png", "iconUrl": "https://static.nimbletools.ai/icons/pinecone.png" }, "documentation": { "readmeUrl": "https://raw.githubusercontent.com/NimbleBrainInc/mcp-pinecone/main/README.md" } } } } }

MCP directory API

We provide all the information about MCP servers via our MCP API.

curl -X GET 'https://glama.ai/api/mcp/v1/servers/NimbleBrainInc/mcp-pinecone'

If you have feedback or need assistance with the MCP directory API, please join our Discord server