Skip to main content
Glama

MCP RAG Server

direct_query.py3.94 kB
#!/usr/bin/env python """ Direct query script to test the MCP server without interactive prompts. """ import sys import requests import json from sentence_transformers import SentenceTransformer import numpy as np # Server URL SERVER_URL = "http://localhost:8000" def direct_query(query_text, top_k=3): """Query the server directly with the specified text""" print(f"Querying: '{query_text}'") try: # First try the MCP API endpoint url = f"{SERVER_URL}/mcp/action" payload = { "action_type": "retrieve_documents", "payload": { "query": query_text, "top_k": top_k } } response = requests.post(url, json=payload, timeout=5) if response.status_code == 200: result = response.json() if result["status"] == "success": print(f"\nFound {result['data']['total_results']} documents via MCP API:") for i, doc in enumerate(result['data']['documents']): print(f"\n--- Result {i+1} (Score: {doc['score']:.4f}) ---") print(f"Source: {doc['path']} (Chunk {doc['chunk_index']})") print(f"\nContent snippet: {doc['content'][:200]}...") return result['data']['documents'] else: print(f"Error from MCP API: {result['error']}") else: print(f"HTTP Error from MCP API: {response.status_code}") # Fall back to direct API endpoint if MCP endpoint fails print("\nFalling back to direct query endpoint...") url = f"{SERVER_URL}/query" payload = { "query": query_text, "top_k": top_k } response = requests.post(url, json=payload, timeout=5) if response.status_code == 200: result = response.json() documents = result["documents"] print(f"\nFound {len(documents)} documents via direct API:") for i, doc in enumerate(documents): print(f"\n--- Result {i+1} (Score: {doc['score']:.4f}) ---") print(f"Source: {doc['path']} (Chunk {doc['chunk_index']})") print(f"\nContent snippet: {doc['content'][:200]}...") return documents else: print(f"HTTP Error from direct API: {response.status_code} - {response.text}") except requests.exceptions.ConnectionError: print(f"Connection Error: Could not connect to server at {SERVER_URL}") print("Make sure the server is running with 'python main.py'") except requests.exceptions.Timeout: print("Timeout Error: The server took too long to respond") except requests.exceptions.RequestException as e: print(f"Request Error: {str(e)}") except Exception as e: print(f"Unexpected error: {str(e)}") # If all API calls fail, try local processing if we have access to the model try: print("\nAPI calls failed. Attempting local processing...") # Load the model model = SentenceTransformer("all-MiniLM-L6-v2") # Get embedding query_embedding = model.encode(query_text) print("Generated query embedding locally, but need access to indexed documents.") print("Please ensure the server is running and accessible.") except Exception as e: print(f"Local processing also failed: {str(e)}") return [] def main(): """Main function to parse arguments and run queries""" if len(sys.argv) < 2: print("Usage: python direct_query.py 'your query text'") sys.exit(1) query = " ".join(sys.argv[1:]) direct_query(query) if __name__ == "__main__": main()

MCP directory API

We provide all the information about MCP servers via our MCP API.

curl -X GET 'https://glama.ai/api/mcp/v1/servers/ProbonoBonobo/sui-mcp-server'

If you have feedback or need assistance with the MCP directory API, please join our Discord server