Skip to main content
Glama
test_single_scenario.py1.07 kB
#!/usr/bin/env python3 """ Test singolo scenario per debug """ import sys import asyncio from pathlib import Path # Aggiungi il path src per gli import sys.path.insert(0, str(Path(__file__).parent.parent / "src")) from llm_core import quick_chat from llm_core.prompts import PromptType async def test_simple_chat(): """Test semplice chat""" print("🧪 Testing simple chat...") try: response = await quick_chat( user_id="test_user", user_name="Test User", message="Ciao! Come stai?", prompt_type=PromptType.GENERAL_CHAT, role="User", expertise_level="beginner", communication_style="casual" ) print(f"✅ Response: {response[:100]}...") return True except Exception as e: print(f"❌ Error: {e}") import traceback traceback.print_exc() return False if __name__ == "__main__": success = asyncio.run(test_simple_chat()) print(f"Result: {'SUCCESS' if success else 'FAILED'}")

Latest Blog Posts

MCP directory API

We provide all the information about MCP servers via our MCP API.

curl -X GET 'https://glama.ai/api/mcp/v1/servers/ilvolodel/iris-legacy'

If you have feedback or need assistance with the MCP directory API, please join our Discord server