getSecondOpinion
Leverage diverse LLM providers to generate responses tailored to your prompts. Select providers, configure models, and adjust parameters for dynamic AI-driven insights on the MindBridge MCP Server.
Instructions
Get responses from various LLM providers
Input Schema
Name | Required | Description | Default |
---|---|---|---|
frequency_penalty | No | ||
maxTokens | No | ||
model | Yes | ||
presence_penalty | No | ||
prompt | Yes | ||
provider | Yes | ||
reasoning_effort | No | ||
stop_sequences | No | ||
stream | No | ||
systemPrompt | No | ||
temperature | No | ||
top_k | No | ||
top_p | No |
Input Schema (JSON Schema)
{
"$schema": "http://json-schema.org/draft-07/schema#",
"additionalProperties": false,
"properties": {
"frequency_penalty": {
"maximum": 2,
"minimum": -2,
"type": "number"
},
"maxTokens": {
"default": 1024,
"exclusiveMinimum": 0,
"type": "number"
},
"model": {
"minLength": 1,
"type": "string"
},
"presence_penalty": {
"maximum": 2,
"minimum": -2,
"type": "number"
},
"prompt": {
"minLength": 1,
"type": "string"
},
"provider": {
"enum": [
"openai",
"anthropic",
"deepseek",
"google",
"openrouter",
"ollama",
"openaiCompatible"
],
"type": "string"
},
"reasoning_effort": {
"anyOf": [
{
"anyOf": [
{
"not": {}
},
{
"enum": [
"low",
"medium",
"high"
],
"type": "string"
}
]
},
{
"type": "null"
}
]
},
"stop_sequences": {
"items": {
"type": "string"
},
"type": "array"
},
"stream": {
"type": "boolean"
},
"systemPrompt": {
"anyOf": [
{
"anyOf": [
{
"not": {}
},
{
"type": "string"
}
]
},
{
"type": "null"
}
]
},
"temperature": {
"maximum": 1,
"minimum": 0,
"type": "number"
},
"top_k": {
"exclusiveMinimum": 0,
"type": "number"
},
"top_p": {
"maximum": 1,
"minimum": 0,
"type": "number"
}
},
"required": [
"prompt",
"provider",
"model"
],
"type": "object"
}