We provide all the information about MCP servers via our MCP API.
curl -X GET 'https://glama.ai/api/mcp/v1/servers/Yang-Charles/build-mcp'
If you have feedback or need assistance with the MCP directory API, please join our Discord server
llm_chat.py•547 B
from langchain_community.chat_models.tongyi import ChatTongyi
from typing import List, Optional
from langchain_core.messages import BaseMessage
class DashScopeChat(ChatTongyi):
def _generate(self, messages: List[BaseMessage], **kwargs):
# 只保留后端认识的两个值
tc = kwargs.get("tool_choice")
if tc is not None and not isinstance(tc, str):
kwargs["tool_choice"] = "auto"
return super()._generate(messages, **kwargs)
# model = DashScopeChat(model="qwen-max", temperature=0)