Skip to main content
Glama

search_models

Find AI models for image generation by entering descriptive queries. This tool helps users discover suitable models from the Replicate API for their specific project needs.

Instructions

Search for models using semantic search.

Input Schema

TableJSON Schema
NameRequiredDescriptionDefault
queryYes

Implementation Reference

  • The main handler function for the 'search_models' MCP tool. It uses the ReplicateClient to perform the search and formats the results into a ModelList.
    @mcp.tool( name="search_models", description="Search for models using semantic search.", ) async def search_models(query: str) -> ModelList: """Search for models using semantic search. Args: query: Search query string Returns: ModelList containing the matching models and pagination info Raises: RuntimeError: If the Replicate client fails to initialize Exception: If the API request fails """ async with ReplicateClient() as client: result = await client.search_models(query) return ModelList( models=[Model(**model) for model in result["models"]], next_cursor=result.get("next_cursor"), total_count=result.get("total_models") )
  • Registration of the 'search_models' tool with FastMCP using the @mcp.tool decorator, including name and description.
    @mcp.tool( name="search_models", description="Search for models using semantic search.", )
  • Helper method in ReplicateClient that implements the semantic model search using Replicate's QUERY API endpoint. Called by the tool handler.
    async def search_models( self, query: str, cursor: Optional[str] = None, ) -> dict[str, Any]: """Search for models using the QUERY endpoint. Args: query: Search query string cursor: Optional pagination cursor Returns: Dict containing search results with pagination info Raises: Exception: If the API request fails """ if not self.client: raise RuntimeError("Client not initialized. Check error property for details.") try: # Build URL with cursor if provided url = "/models" if cursor: url = f"{url}?cursor={cursor}" # Make QUERY request response = await self.http_client.request( "QUERY", url, content=query, headers={"Content-Type": "text/plain"} ) response.raise_for_status() data = response.json() # Format response with complete model structure return { "models": [ { "id": f"{model['owner']}/{model['name']}", "owner": model["owner"], "name": model["name"], "description": model.get("description"), "visibility": model.get("visibility", "public"), "github_url": model.get("github_url"), "paper_url": model.get("paper_url"), "license_url": model.get("license_url"), "run_count": model.get("run_count"), "cover_image_url": model.get("cover_image_url"), "default_example": model.get("default_example"), "featured": model.get("featured", False), "tags": model.get("tags", []), "latest_version": model.get("latest_version", { "id": model.get("latest_version", {}).get("id"), "created_at": model.get("latest_version", {}).get("created_at"), "cog_version": model.get("latest_version", {}).get("cog_version"), "openapi_schema": model.get("latest_version", {}).get("openapi_schema"), "model": f"{model['owner']}/{model['name']}", "replicate_version": model.get("latest_version", {}).get("replicate_version"), "hardware": model.get("latest_version", {}).get("hardware"), } if model.get("latest_version") else None), } for model in data.get("results", []) ], "next_cursor": data.get("next"), "total_count": data.get("total"), } except httpx.HTTPError as err: logger.error(f"HTTP error during model search: {str(err)}") raise Exception(f"Failed to search models: {str(err)}") from err except Exception as err: logger.error(f"Failed to search models: {str(err)}") raise Exception(f"Failed to search models: {str(err)}") from err

Latest Blog Posts

MCP directory API

We provide all the information about MCP servers via our MCP API.

curl -X GET 'https://glama.ai/api/mcp/v1/servers/gerred/mcp-server-replicate'

If you have feedback or need assistance with the MCP directory API, please join our Discord server