Skip to main content
Glama

MCP Claude Code

by SDGLBL
test_model_capabilities.py1.51 kB
"""Tests for model capability checking functions.""" from unittest.mock import patch from mcp_claude_code.tools.agent.tool_adapter import ( supports_parallel_function_calling, ) class TestModelCapabilities: """Tests for model capability checking functions.""" @patch("litellm.supports_parallel_function_calling") def test_supports_parallel_function_calling(self, mock_litellm_supports_parallel): """Test that supports_parallel_function_calling properly calls litellm.""" # Set up the mock mock_litellm_supports_parallel.return_value = True # Test with a model that supports parallel function calling assert supports_parallel_function_calling("gpt-4-turbo-preview") is True mock_litellm_supports_parallel.assert_called_with(model="gpt-4-turbo-preview") # Test with a provider-prefixed model mock_litellm_supports_parallel.reset_mock() mock_litellm_supports_parallel.return_value = True assert supports_parallel_function_calling("openai/gpt-4-turbo-preview") is True mock_litellm_supports_parallel.assert_called_with( model="openai/gpt-4-turbo-preview" ) # Test with a model that doesn't support parallel function calling mock_litellm_supports_parallel.reset_mock() mock_litellm_supports_parallel.return_value = False assert supports_parallel_function_calling("gpt-4") is False mock_litellm_supports_parallel.assert_called_with(model="gpt-4")

MCP directory API

We provide all the information about MCP servers via our MCP API.

curl -X GET 'https://glama.ai/api/mcp/v1/servers/SDGLBL/mcp-claude-code'

If you have feedback or need assistance with the MCP directory API, please join our Discord server