Skip to main content
Glama
MigoXLab

Dingo MCP Server

by MigoXLab
3h_eval.py1.22 kB
import os from pathlib import Path from dingo.config import InputArgs from dingo.exec import Executor if __name__ == '__main__': OPENAI_MODEL = 'deepseek-chat' OPENAI_URL = 'https://api.deepseek.com/v1' OPENAI_KEY = os.getenv("OPENAI_KEY") input_data = { "input_path": str(Path("test/data/test_3h_jsonl.jsonl")), "dataset": { "source": "local", "format": "jsonl", "field": { "prompt": "input", "content": "response", "context": "response" } }, "executor": { "prompt_list": ["PromptTextHarmless", "PromptTextHelpful", "PromptTextHonest"], "result_save": { "bad": True, "good": True } }, "evaluator": { "llm_config": { "LLMText3HHarmless": { "model": OPENAI_MODEL, "key": OPENAI_KEY, "api_url": OPENAI_URL, } } } } input_args = InputArgs(**input_data) executor = Executor.exec_map["local"](input_args) result = executor.execute() print(result)

Latest Blog Posts

MCP directory API

We provide all the information about MCP servers via our MCP API.

curl -X GET 'https://glama.ai/api/mcp/v1/servers/MigoXLab/dingo'

If you have feedback or need assistance with the MCP directory API, please join our Discord server