Skip to main content
Glama
kaman05010

MCP Wikipedia Server

by kaman05010
errors.py3.7 kB
from __future__ import annotations from collections.abc import Sequence from enum import Enum from typing import Any from warnings import warn from typing_extensions import deprecated # EmptyChannelError is re-exported from langgraph.channels.base from langgraph.checkpoint.base import EmptyChannelError # noqa: F401 from langgraph.types import Command, Interrupt from langgraph.warnings import LangGraphDeprecatedSinceV10 __all__ = ( "EmptyChannelError", "ErrorCode", "GraphRecursionError", "InvalidUpdateError", "GraphBubbleUp", "GraphInterrupt", "NodeInterrupt", "ParentCommand", "EmptyInputError", "TaskNotFound", ) class ErrorCode(Enum): GRAPH_RECURSION_LIMIT = "GRAPH_RECURSION_LIMIT" INVALID_CONCURRENT_GRAPH_UPDATE = "INVALID_CONCURRENT_GRAPH_UPDATE" INVALID_GRAPH_NODE_RETURN_VALUE = "INVALID_GRAPH_NODE_RETURN_VALUE" MULTIPLE_SUBGRAPHS = "MULTIPLE_SUBGRAPHS" INVALID_CHAT_HISTORY = "INVALID_CHAT_HISTORY" def create_error_message(*, message: str, error_code: ErrorCode) -> str: return ( f"{message}\n" "For troubleshooting, visit: https://python.langchain.com/docs/" f"troubleshooting/errors/{error_code.value}" ) class GraphRecursionError(RecursionError): """Raised when the graph has exhausted the maximum number of steps. This prevents infinite loops. To increase the maximum number of steps, run your graph with a config specifying a higher `recursion_limit`. Troubleshooting Guides: - [GRAPH_RECURSION_LIMIT](https://python.langchain.com/docs/troubleshooting/errors/GRAPH_RECURSION_LIMIT) Examples: graph = builder.compile() graph.invoke( {"messages": [("user", "Hello, world!")]}, # The config is the second positional argument {"recursion_limit": 1000}, ) """ pass class InvalidUpdateError(Exception): """Raised when attempting to update a channel with an invalid set of updates. Troubleshooting Guides: - [INVALID_CONCURRENT_GRAPH_UPDATE](https://python.langchain.com/docs/troubleshooting/errors/INVALID_CONCURRENT_GRAPH_UPDATE) - [INVALID_GRAPH_NODE_RETURN_VALUE](https://python.langchain.com/docs/troubleshooting/errors/INVALID_GRAPH_NODE_RETURN_VALUE) """ pass class GraphBubbleUp(Exception): pass class GraphInterrupt(GraphBubbleUp): """Raised when a subgraph is interrupted, suppressed by the root graph. Never raised directly, or surfaced to the user.""" def __init__(self, interrupts: Sequence[Interrupt] = ()) -> None: super().__init__(interrupts) @deprecated( "NodeInterrupt is deprecated. Please use `langgraph.types.interrupt` instead.", stacklevel=2, ) class NodeInterrupt(GraphInterrupt): """Raised by a node to interrupt execution. Deprecated in V1.0.0 in favor of [`interrupt`][langgraph.types.interrupt]. """ def __init__(self, value: Any, id: str | None = None) -> None: warn( "NodeInterrupt is deprecated. Please use `langgraph.types.interrupt` instead.", LangGraphDeprecatedSinceV10, stacklevel=2, ) if id is None: super().__init__([Interrupt(value=value)]) else: super().__init__([Interrupt(value=value, id=id)]) class ParentCommand(GraphBubbleUp): args: tuple[Command] def __init__(self, command: Command) -> None: super().__init__(command) class EmptyInputError(Exception): """Raised when graph receives an empty input.""" pass class TaskNotFound(Exception): """Raised when the executor is unable to find a task (for distributed mode).""" pass

Latest Blog Posts

MCP directory API

We provide all the information about MCP servers via our MCP API.

curl -X GET 'https://glama.ai/api/mcp/v1/servers/kaman05010/MCPClientServer'

If you have feedback or need assistance with the MCP directory API, please join our Discord server