We provide all the information about MCP servers via our MCP API.
curl -X GET 'https://glama.ai/api/mcp/v1/servers/deepset-ai/hayhooks'
If you have feedback or need assistance with the MCP directory API, please join our Discord server
components:
prompt_1:
init_parameters:
required_variables: "*"
template: |
{% message role="user" %}
Answer this question: {{query}}
Answer:
{% endmessage %}
type: haystack.components.builders.chat_prompt_builder.ChatPromptBuilder
llm_1:
init_parameters:
api_base_url: null
api_key:
env_vars:
- OPENAI_API_KEY
strict: true
type: env_var
generation_kwargs: {}
model: gpt-4o-mini
streaming_callback: null
type: haystack.components.generators.chat.openai.OpenAIChatGenerator
prompt_2:
init_parameters:
required_variables: "*"
template: |
{% message role="user" %}
Refine this response: {{previous_reply[0].text}}
Improved answer:
{% endmessage %}
type: haystack.components.builders.chat_prompt_builder.ChatPromptBuilder
llm_2:
init_parameters:
api_base_url: null
api_key:
env_vars:
- OPENAI_API_KEY
strict: true
type: env_var
generation_kwargs: {}
model: gpt-4o-mini
streaming_callback: null
type: haystack.components.generators.chat.openai.OpenAIChatGenerator
connections:
- receiver: llm_1.messages
sender: prompt_1.prompt
- receiver: prompt_2.previous_reply
sender: llm_1.replies
- receiver: llm_2.messages
sender: prompt_2.prompt
metadata: {}
inputs:
query: prompt_1.query
outputs:
replies: llm_2.replies
streaming_components:
- llm_1
- llm_2