Skip to main content
Glama
twelvedata

Twelve Data MCP Server

Official
by twelvedata

GetTimeSeriesMacd

Calculate MACD indicator values for financial instruments to identify trend reversals and trading signals using moving average convergence divergence analysis.

Instructions

The Moving Average Convergence Divergence (MACD) is a momentum indicator that measures the difference between two moving averages, with a signal line used to identify potential trend reversals and trading opportunities.

Input Schema

TableJSON Schema
NameRequiredDescriptionDefault
paramsYes

Implementation Reference

  • Generic handler executed by all individual tool functions, including GetTimeSeriesMacd. Constructs API request to https://api.twelvedata.com/macd (inferred from tool name convention and test data), injects apikey, sends GET with params, handles errors, and validates response using Pydantic model.
    async def _call_endpoint( endpoint: str, params: P, response_model: Type[R], ctx: Context ) -> R: params.apikey = extract_twelve_data_apikey( twelve_data_apikey=twelve_data_apikey, transport=transport, ctx=ctx ) params_dict = params.model_dump(exclude_none=True) resolved_endpoint = resolve_path_params(endpoint, params_dict) async with httpx.AsyncClient( trust_env=False, headers={ "accept": "application/json", "user-agent": "python-httpx/0.24.0" }, ) as client: resp = await client.get( f"{api_base}/{resolved_endpoint}", params=params_dict ) resp.raise_for_status() resp_json = resp.json() if isinstance(resp_json, dict): status = resp_json.get("status") if status == "error": code = resp_json.get('code') raise HTTPException( status_code=code, detail=f"Failed to perform request," f" code = {code}, message = {resp_json.get('message')}" ) return response_model.model_validate(resp_json)
  • Registers all tools (including GetTimeSeriesMacd) using register_all_tools when local vector DB is available. This enables direct tool calls.
    register_all_tools(server=server, _call_endpoint=_call_endpoint) u_tool = register_u_tool(
  • Generates the tools.py module with specific handler stubs and register_all_tools function for each API operationId, mapping GetTimeSeriesMacd to '/macd' endpoint.
    def generate_code(ops): def fix_case(name: str) -> str: return name[0].upper() + name[1:] if name.lower().startswith("advanced") else name lines = [ 'from mcp.server import FastMCP', 'from mcp.server.fastmcp import Context', '' ] # Import request models for op, _, _ in ops: lines.append(f'from .request_models import {fix_case(op)}Request') lines.append('') # Import response models for op, _, _ in ops: lines.append(f'from .response_models import {fix_case(op)}200Response') lines.append('') # Register tools lines.append('def register_all_tools(server: FastMCP, _call_endpoint):') for op, desc, key in ops: fixed_op = fix_case(op) lines += [ f' @server.tool(name="{op}",', f' description="{desc}")', f' async def {op}(params: {fixed_op}Request, ctx: Context) -> {fixed_op}200Response:', f' return await _call_endpoint("{key}", params, {fixed_op}200Response, ctx)', '' ] return '\n'.join(lines)
  • Generates Pydantic models for tool input/output schemas from OpenAPI spec, including the specific response model for GetTimeSeriesMacd.
    cmd = [ 'datamodel-codegen', '--input', str(openapi_path), '--input-file-type', 'openapi', '--output', str(output_path), '--output-model-type', 'pydantic_v2.BaseModel', '--reuse-model', '--use-title-as-name', '--disable-timestamp', '--field-constraints', '--use-double-quotes', ] subprocess.run(cmd, check=True) # Append aliases alias_lines = [ '', '# Aliases for response models', 'GetMarketMovers200Response = MarketMoversResponseBody', 'GetTimeSeriesPercent_B200Response = GetTimeSeriesPercentB200Response', '' ] with open(output_path, 'a', encoding='utf-8') as f: f.write('\n'.join(alias_lines)) print(f"[SUCCESS] Models generated using CLI and aliases added: {output_path}")
  • Generates Pydantic request models (input schemas) for each tool from OpenAPI parameters, including common fields like outputsize and apikey for GetTimeSeriesMacd.
    def main(): # Load the OpenAPI specification with open(OPENAPI_PATH, "r", encoding="utf-8") as f: spec = json.load(f) components = spec.get("components", {}).get("schemas", {}) request_models: List[str] = [] request_names: set = set() for path, methods in spec.get("paths", {}).items(): for http_method, op in methods.items(): opid = op.get("operationId") if not opid: continue class_name = canonical_class_name(opid, "Request") # Collect parameters from path, query, header, etc. props: dict = {} for param in op.get("parameters", []): name = param["name"] sch = param.get("schema", {"type": "string"}) typ = python_type(sch, components) required = param.get("required", False) desc = param.get("description") or sch.get("description") examples = collect_examples(param, sch) default = sch.get("default", None) props[name] = { "type": typ, "required": required, "description": desc, "examples": examples, "default": default, } # Collect JSON body properties body = op.get("requestBody", {}) \ .get("content", {}) \ .get("application/json", {}) \ .get("schema") if body: body_sch = resolve_schema(body, components) for name, sch in body_sch.get("properties", {}).items(): typ = python_type(sch, components) required = name in body_sch.get("required", []) desc = sch.get("description") examples = collect_examples({}, sch) default = sch.get("default", None) props[name] = { "type": typ, "required": required, "description": desc, "examples": examples, "default": default, } if "outputsize" not in props: props["outputsize"] = { "type": "int", "required": False, "description": ( "Number of data points to retrieve. Supports values in the range from `1` to `5000`. " "Default `10` when no date parameters are set, otherwise set to maximum" ), "examples": [10], "default": 10, } else: props["outputsize"]["default"] = 10 props["outputsize"]["description"] = props["outputsize"]["description"].replace( 'Default `30`', 'Default `10`' ) props["outputsize"]["examples"] = [10] # Add apikey with default="demo" props["apikey"] = { "type": "str", "required": False, "description": "API key", "examples": ["demo"], "default": "demo", } if "interval" in props: props["interval"]["required"] = False props["interval"]["default"] = "1day" # Append plan availability to the description if x-starting-plan is present starting_plan = op.get("x-starting-plan") description = op.get("description", "") if starting_plan: addon = f" Available starting from the `{starting_plan}` plan." description = (description or "") + addon code = gen_class(class_name, props, description) if class_name not in request_names: request_models.append(code) request_names.add(class_name) # Write all generated models to the target file header = ( "from pydantic import BaseModel, Field\n" "from typing import Any, List, Optional\n\n" ) Path(REQUESTS_FILE).write_text(header + "\n\n".join(request_models), encoding="utf-8") print(f"Generated request models: {REQUESTS_FILE}")

Latest Blog Posts

MCP directory API

We provide all the information about MCP servers via our MCP API.

curl -X GET 'https://glama.ai/api/mcp/v1/servers/twelvedata/mcp'

If you have feedback or need assistance with the MCP directory API, please join our Discord server