Skip to main content
Glama

MCP Orchestration Server

test_llm_functionality.py•2.84 kB
#!/usr/bin/env python3 """Test LLM functionality with user's credentials""" import os from pathlib import Path def test_llm_functionality(): print("šŸ” Testing LLM Functionality with Your Credentials") print("=" * 50) try: from data.multimodal.pdf_reader import EnhancedPDFReader # Initialize reader with your credentials reader = EnhancedPDFReader() print(f"āœ… LLM Available: {reader.llm is not None}") print(f"āœ… Embeddings Available: {reader.embeddings is not None}") print(f"āœ… API Key Set: {bool(reader.api_key)}") print(f"āœ… Model: {reader.model_name}") if reader.llm and reader.embeddings: # Find a PDF to test with pdf_dir = Path("data/multimodal/uploaded_pdfs") pdf_files = list(pdf_dir.glob("*.pdf")) if pdf_files: test_pdf = pdf_files[0] print(f"\nšŸ” Testing with PDF: {test_pdf.name}") # Load and process PDF success = reader.load_and_process_pdf(str(test_pdf), verbose=True) if success: print("āœ… PDF loaded successfully for Q&A") # Test asking a question question = "What is this document about?" print(f"\nšŸ¤” Asking: {question}") answer = reader.ask_question(question, verbose=True) if answer and not answer.startswith("āŒ"): print(f"āœ… LLM Answer received:") print(f"šŸ“ {answer[:200]}...") # Test document summary print(f"\nšŸ“„ Getting document summary...") summary = reader.get_document_summary(max_length=150) print(f"āœ… Summary: {summary[:150]}...") print("\nšŸŽ‰ LLM Functionality: FULLY WORKING!") return True else: print(f"āŒ LLM Answer failed: {answer}") return False else: print("āŒ PDF processing failed") return False else: print("āš ļø No PDF files found for testing") print("āœ… LLM components are working (no test files)") return True else: print("āŒ LLM components not properly initialized") return False except Exception as e: print(f"āŒ LLM Test failed: {e}") return False if __name__ == "__main__": test_llm_functionality()

MCP directory API

We provide all the information about MCP servers via our MCP API.

curl -X GET 'https://glama.ai/api/mcp/v1/servers/Nisarg-123-web/MCP2'

If you have feedback or need assistance with the MCP directory API, please join our Discord server