Skip to main content
Glama

Grocery Search MCP Server

test_server.py1.87 kB
#!/usr/bin/env python3 """Test script for the Grocery Search MCP Server.""" import asyncio import json import logging from grocery_search_mcp.scraper import ScraperFactory from grocery_search_mcp.models import SearchRequest logging.basicConfig(level=logging.INFO) logger = logging.getLogger(__name__) async def test_scraper(): """Test the scraper functionality.""" logger.info("Testing scraper functionality...") # Test queries test_queries = [ "protein bar", "greek yogurt", "chicken breast", "tofu" ] scraper = ScraperFactory.create_scraper("trader_joes") async with scraper: for query in test_queries: logger.info(f"\nSearching for: {query}") products = await scraper.search(query) if products: for product in products: print(f" - {product.name}") print(f" Price: ${product.price:.2f}") print(f" Protein: {product.protein_g}g") print(f" Protein per $: {product.protein_per_dollar:.1f}g") print(f" Macros missing: {product.macros_missing}") print() else: print(f" No products found for '{query}'") def test_models(): """Test the data models.""" logger.info("Testing data models...") # Test SearchRequest request = SearchRequest(query="protein bar", store="trader_joes") print(f"Search request: {request}") # Test with default store request2 = SearchRequest(query="greek yogurt") print(f"Search request with default store: {request2}") if __name__ == "__main__": print("=== Testing Grocery Search MCP Server ===\n") test_models() print("\n" + "="*50 + "\n") asyncio.run(test_scraper())

MCP directory API

We provide all the information about MCP servers via our MCP API.

curl -X GET 'https://glama.ai/api/mcp/v1/servers/ajaykallepalli/MCP_Food_Search'

If you have feedback or need assistance with the MCP directory API, please join our Discord server