Skip to main content
Glama

search_documentation

Search documentation across installed docsets and snippets to find relevant information using the Dash documentation browser API.

Instructions

Search for documentation across docset identifiers and snippets. Args: query: The search query string docset_identifiers: Comma-separated list of docset identifiers to search in (from list_installed_docsets) search_snippets: Whether to include snippets in search results max_results: Maximum number of results to return (1-1000) Results are automatically truncated if they would exceed 25,000 tokens.

Input Schema

TableJSON Schema
NameRequiredDescriptionDefault
queryYes
docset_identifiersYes
search_snippetsNo
max_resultsNo

Implementation Reference

  • The primary handler function for the 'search_documentation' tool. It is decorated with @mcp.tool() for registration, validates inputs, queries the Dash API server, processes and truncates results based on token limits, and returns structured SearchResults.
    @mcp.tool() async def search_documentation( ctx: Context, query: str, docset_identifiers: str, search_snippets: bool = True, max_results: int = 100, ) -> SearchResults: """ Search for documentation across docset identifiers and snippets. Args: query: The search query string docset_identifiers: Comma-separated list of docset identifiers to search in (from list_installed_docsets) search_snippets: Whether to include snippets in search results max_results: Maximum number of results to return (1-1000) Results are automatically truncated if they would exceed 25,000 tokens. """ if not query.strip(): await ctx.error("Query cannot be empty") return SearchResults(error="Query cannot be empty") if not docset_identifiers.strip(): await ctx.error("docset_identifiers cannot be empty. Get the docset identifiers using list_installed_docsets") return SearchResults(error="docset_identifiers cannot be empty. Get the docset identifiers using list_installed_docsets") if max_results < 1 or max_results > 1000: await ctx.error("max_results must be between 1 and 1000") return SearchResults(error="max_results must be between 1 and 1000") try: base_url = await working_api_base_url(ctx) if base_url is None: return SearchResults(error="Failed to connect to Dash API Server. Please ensure Dash is running and the API server is enabled (in Dash Settings > Integration).") params = { "query": query, "docset_identifiers": docset_identifiers, "search_snippets": search_snippets, "max_results": max_results, } await ctx.debug(f"Searching Dash API with query: '{query}'") with httpx.Client(timeout=30.0) as client: response = client.get(f"{base_url}/search", params=params) response.raise_for_status() result = response.json() # Check for warning message in response warning_message = None if "message" in result: warning_message = result["message"] await ctx.warning(warning_message) results = result.get("results", []) # Filter out empty dict entries (Dash API returns [{}] for no results) results = [r for r in results if r] if not results and ' ' in query: return SearchResults(results=[], error="Nothing found. Try to search for fewer terms.") await ctx.info(f"Found {len(results)} results") # Build result list with token limit checking token_limit = 25000 current_tokens = 100 # Base overhead for response structure limited_results = [] for item in results: search_result = SearchResult( name=item["name"], type=item["type"], platform=item.get("platform"), load_url=item["load_url"], docset=item.get("docset"), description=item.get("description"), language=item.get("language"), tags=item.get("tags") ) # Estimate tokens for this result result_tokens = estimate_tokens(search_result) if current_tokens + result_tokens > token_limit: await ctx.warning(f"Token limit reached. Returning {len(limited_results)} of {len(results)} results to stay under 25k token limit.") break limited_results.append(search_result) current_tokens += result_tokens if len(limited_results) < len(results): await ctx.info(f"Returned {len(limited_results)} results (truncated from {len(results)} due to token limit)") return SearchResults(results=limited_results, error=warning_message) except httpx.HTTPStatusError as e: if e.response.status_code == 400: error_text = e.response.text if "Docset with identifier" in error_text and "not found" in error_text: await ctx.error("Invalid docset identifier. Run list_installed_docsets to see available docsets.") return SearchResults(error="Invalid docset identifier. Run list_installed_docsets to see available docsets, then use the exact identifier from that list.") elif "No docsets found" in error_text: await ctx.error("No valid docsets found for search.") return SearchResults(error="No valid docsets found for search. Either provide valid docset identifiers from list_installed_docsets, or set search_snippets=true to search snippets only.") else: await ctx.error(f"Bad request: {error_text}") return SearchResults(error=f"Bad request: {error_text}. Please ensure Dash is running and the API server is enabled (in Dash Settings > Integration).") elif e.response.status_code == 403: error_text = e.response.text if "API access blocked due to Dash trial expiration" in error_text: await ctx.error("Dash trial expired. Purchase Dash to continue using the API.") return SearchResults(error="Your Dash trial has expired. Purchase Dash at https://kapeli.com/dash to continue using the API. During trial expiration, API access is blocked.") else: await ctx.error(f"Forbidden: {error_text}") return SearchResults(error=f"Forbidden: {error_text}. Please ensure Dash is running and the API server is enabled (in Dash Settings > Integration).") await ctx.error(f"HTTP error: {e}") return SearchResults(error=f"HTTP error: {e}. Please ensure Dash is running and the API server is enabled (in Dash Settings > Integration).") except Exception as e: await ctx.error(f"Search failed: {e}") return SearchResults(error=f"Search failed: {e}. Please ensure Dash is running and the API server is enabled (in Dash Settings > Integration).")
  • Pydantic models defining the input/output schema for the search_documentation tool: SearchResult for individual results and SearchResults for the overall response structure.
    class SearchResult(BaseModel): """A search result from documentation.""" name: str = Field(description="Name of the documentation entry") type: str = Field(description="Type of result (Function, Class, etc.)") platform: Optional[str] = Field(description="Platform of the result", default=None) load_url: str = Field(description="URL to load the documentation") docset: Optional[str] = Field(description="Name of the docset", default=None) description: Optional[str] = Field(description="Additional description", default=None) language: Optional[str] = Field(description="Programming language (snippet results only)", default=None) tags: Optional[str] = Field(description="Tags (snippet results only)", default=None) class SearchResults(BaseModel): """Result from searching documentation.""" results: list[SearchResult] = Field(description="List of search results", default_factory=list) error: Optional[str] = Field(description="Error message if there was an issue", default=None)

Latest Blog Posts

MCP directory API

We provide all the information about MCP servers via our MCP API.

curl -X GET 'https://glama.ai/api/mcp/v1/servers/Kapeli/dash-mcp-server'

If you have feedback or need assistance with the MCP directory API, please join our Discord server