Skip to main content
Glama

Katamari MCP Server

by ciphernaut
test_lm_studio_simple.pyโ€ข2.35 kB
#!/usr/bin/env python3 """ Simple test for LM Studio without local fallback requirement. """ import asyncio import sys from pathlib import Path # Add project root to path sys.path.insert(0, str(Path(__file__).parent.parent)) from katamari_mcp.acp.llm_client import LMStudioBackend, LLMConfig async def test_lm_studio_only(): """Test LM Studio backend directly.""" print("๐Ÿš€ Testing LM Studio Backend Only...") # Create LM Studio configuration config = LLMConfig( backend="lm_studio", model_name="auto", api_base="http://localhost:1234", temperature=0.7, max_tokens=1024 ) backend = LMStudioBackend() try: # Initialize success = await backend.initialize(config) print(f"โœ… Initialization: {success}") if not success: return False # Get model info model_info = await backend.get_model_info() print(f"๐Ÿ“Š Available models: {len(model_info.get('available_models', []))}") print(f"๐ŸŽฏ Selected model: {model_info.get('selected_model')}") # Test generation print("\n๐Ÿงช Testing code generation...") prompt = "Generate a simple Python function that adds two numbers and returns the result." response = await backend.generate(prompt, max_tokens=200) print("โœ… Generation successful!") print("\n๐Ÿ“„ Generated response:") print("-" * 60) print(response.strip()) print("-" * 60) return True except Exception as e: print(f"โŒ Error: {e}") import traceback traceback.print_exc() return False finally: await backend.cleanup() async def main(): """Main test function.""" print("๐Ÿ”ง LM Studio Direct Test") print("=" * 50) success = await test_lm_studio_only() print("\n" + "=" * 50) if success: print("๐ŸŽ‰ LM Studio backend is working perfectly!") print(" Ready for autonomous code generation!") else: print("๐Ÿ’ฅ LM Studio test failed!") print(" Check that LM Studio is running on localhost:1234") return 0 if success else 1 if __name__ == "__main__": exit_code = asyncio.run(main()) sys.exit(exit_code)

MCP directory API

We provide all the information about MCP servers via our MCP API.

curl -X GET 'https://glama.ai/api/mcp/v1/servers/ciphernaut/katamari-mcp'

If you have feedback or need assistance with the MCP directory API, please join our Discord server