Skip to main content
Glama
test_local.py2.11 kB
# Copyright (C) 2023 the project owner # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. import asyncio import os import sys # Add current directory to path sys.path.append(os.getcwd()) from delia.mcp_server import call_llamacpp, backend_manager async def main(): print("Sending message to Local GPU (llama.cpp)...") # Ensure backend manager loads settings backend_manager._load_settings() # Get the local backend config local_backend = backend_manager.get_backend("llamacpp-local") if not local_backend: print("Error: 'llamacpp-local' backend not found in settings.") return if not local_backend.enabled: print("Error: 'llamacpp-local' is disabled.") return # Check health first print(f"Checking health of {local_backend.url}...") is_healthy = await local_backend.check_health() if not is_healthy: print("❌ Local backend is OFFLINE. Is llama-server running?") return # Send request print("Sending 'Hello'...") result = await call_llamacpp( model=local_backend.models["quick"], # Use the 'quick' model prompt="Hello! Are you running on the local GPU?", task_type="quick", backend_obj=local_backend ) if result.get("success"): print("\n✅ Response from Local GPU:") print(f"{result['response']}") else: print("\n❌ Failed!") print(f"Error: {result['error']}") if __name__ == "__main__": asyncio.run(main())

Latest Blog Posts

MCP directory API

We provide all the information about MCP servers via our MCP API.

curl -X GET 'https://glama.ai/api/mcp/v1/servers/zbrdc/delia'

If you have feedback or need assistance with the MCP directory API, please join our Discord server