GetCurrencyConversion
Convert currency amounts using real-time exchange rates for forex and cryptocurrency pairs. Specify a date for historical rates or get current conversions.
Instructions
This API call returns real-time exchange rate and converted amount for currency pair. Works with forex and cryptocurrency.
Input Schema
TableJSON Schema
| Name | Required | Description | Default |
|---|---|---|---|
| params | Yes |
Implementation Reference
- Core handler logic executed by the generated GetCurrencyConversion tool function. Makes HTTP GET request to Twelve Data API endpoint '/currency_conversion' with resolved parameters and apikey, validates response with Pydantic model.async def _call_endpoint( endpoint: str, params: P, response_model: Type[R], ctx: Context ) -> R: params.apikey = extract_twelve_data_apikey( twelve_data_apikey=twelve_data_apikey, transport=transport, ctx=ctx ) params_dict = params.model_dump(exclude_none=True) resolved_endpoint = resolve_path_params(endpoint, params_dict) async with httpx.AsyncClient( trust_env=False, headers={ "accept": "application/json", "user-agent": "python-httpx/0.24.0" }, ) as client: resp = await client.get( f"{api_base}/{resolved_endpoint}", params=params_dict ) resp.raise_for_status() resp_json = resp.json() if isinstance(resp_json, dict): status = resp_json.get("status") if status == "error": code = resp_json.get('code') raise HTTPException( status_code=code, detail=f"Failed to perform request," f" code = {code}, message = {resp_json.get('message')}" ) return response_model.model_validate(resp_json)
- src/mcp_server_twelve_data/server.py:88-88 (registration)Registers all generated MCP tools, including GetCurrencyConversion, passing the server and the generic _call_endpoint handler.register_all_tools(server=server, _call_endpoint=_call_endpoint)
- scripts/generate_tools.py:58-69 (helper)Code generation script that creates the register_all_tools function and the specific tool registration for GetCurrencyConversion (name from operationId, endpoint '/currency_conversion', params/response models).lines.append('def register_all_tools(server: FastMCP, _call_endpoint):') for op, desc, key in ops: fixed_op = fix_case(op) lines += [ f' @server.tool(name="{op}",', f' description="{desc}")', f' async def {op}(params: {fixed_op}Request, ctx: Context) -> {fixed_op}200Response:', f' return await _call_endpoint("{key}", params, {fixed_op}200Response, ctx)', '' ] return '\n'.join(lines)
- Generates input schema (Pydantic BaseModel) for tool parameters, including apikey, outputsize, etc., from OpenAPI parameters for operationId 'GetCurrencyConversion'.import json from pathlib import Path import keyword from typing import Any, List, Optional OPENAPI_PATH = "../extra/openapi_clean.json" REQUESTS_FILE = "../data/request_models.py" PRIMITIVES = { "string": "str", "integer": "int", "number": "float", "boolean": "bool", "object": "dict", "array": "list", } def canonical_class_name(opid: str, suffix: str) -> str: if not opid: return "" return opid[0].upper() + opid[1:] + suffix def safe_field_name(name: str) -> str: # Append underscore if name is a Python keyword if keyword.iskeyword(name): return name + "_" return name def python_type(schema: dict, components: dict) -> str: # Resolve $ref to the corresponding model class name if "$ref" in schema: ref_name = schema["$ref"].split("/")[-1] return canonical_class_name(ref_name, "") # Handle allOf by delegating to the first subschema if "allOf" in schema: for subschema in schema["allOf"]: return python_type(subschema, components) t = schema.get("type", "string") if t == "array": # Construct type for lists recursively return f"list[{python_type(schema.get('items', {}), components)}]" return PRIMITIVES.get(t, "Any") def resolve_schema(schema: dict, components: dict) -> dict: # Fully resolve $ref and allOf compositions into a merged schema if "$ref" in schema: ref = schema["$ref"].split("/")[-1] return resolve_schema(components.get(ref, {}), components) if "allOf" in schema: merged = {"properties": {}, "required": [], "description": ""} for subschema in schema["allOf"]: sub = resolve_schema(subschema, components) merged["properties"].update(sub.get("properties", {})) merged["required"].extend(sub.get("required", [])) if sub.get("description"): merged["description"] += sub["description"] + "\n" merged["required"] = list(set(merged["required"])) merged["description"] = merged["description"].strip() or None return merged return schema def collect_examples(param: dict, sch: dict) -> List[Any]: # Collect all examples from parameter, schema, and enums without deduplication examples: List[Any] = [] if "example" in param: examples.append(param["example"]) if "examples" in param: exs = param["examples"] if isinstance(exs, dict): for v in exs.values(): examples.append(v["value"] if isinstance(v, dict) and "value" in v else v) elif isinstance(exs, list): examples.extend(exs) if "example" in sch: examples.append(sch["example"]) if "examples" in sch: exs = sch["examples"] if isinstance(exs, dict): for v in exs.values(): examples.append(v["value"] if isinstance(v, dict) and "value" in v else v) elif isinstance(exs, list): examples.extend(exs) # Include enum values as examples if present if "enum" in sch and isinstance(sch["enum"], list): examples.extend(sch["enum"]) return [e for e in examples if e is not None] def gen_field(name: str, typ: str, required: bool, desc: Optional[str], examples: List[Any], default: Any) -> str: name = safe_field_name(name) # Wrap in Optional[...] if default is None and field is not required if default is None and not required: typ = f"Optional[{typ}]" args: List[str] = [] if required: args.append("...") else: args.append(f"default={repr(default)}") if desc: args.append(f"description={repr(desc)}") if examples: args.append(f"examples={repr(examples)}") return f" {name}: {typ} = Field({', '.join(args)})" def gen_class(name: str, props: dict, desc: Optional[str]) -> str: lines = [f"class {name}(BaseModel):"] if desc: # Add class docstring if description is present lines.append(f' """{desc.replace(chr(34)*3, "")}"""') if not props: lines.append(" pass") else: for pname, fdict in props.items(): lines.append(gen_field( pname, fdict["type"], fdict["required"], fdict["description"], fdict["examples"], fdict["default"] )) return "\n".join(lines) def main(): # Load the OpenAPI specification with open(OPENAPI_PATH, "r", encoding="utf-8") as f: spec = json.load(f) components = spec.get("components", {}).get("schemas", {}) request_models: List[str] = [] request_names: set = set() for path, methods in spec.get("paths", {}).items(): for http_method, op in methods.items(): opid = op.get("operationId") if not opid: continue class_name = canonical_class_name(opid, "Request") # Collect parameters from path, query, header, etc. props: dict = {} for param in op.get("parameters", []): name = param["name"] sch = param.get("schema", {"type": "string"}) typ = python_type(sch, components) required = param.get("required", False) desc = param.get("description") or sch.get("description") examples = collect_examples(param, sch) default = sch.get("default", None) props[name] = { "type": typ, "required": required, "description": desc, "examples": examples, "default": default, } # Collect JSON body properties body = op.get("requestBody", {}) \ .get("content", {}) \ .get("application/json", {}) \ .get("schema") if body: body_sch = resolve_schema(body, components) for name, sch in body_sch.get("properties", {}).items(): typ = python_type(sch, components) required = name in body_sch.get("required", []) desc = sch.get("description") examples = collect_examples({}, sch) default = sch.get("default", None) props[name] = { "type": typ, "required": required, "description": desc, "examples": examples, "default": default, } if "outputsize" not in props: props["outputsize"] = { "type": "int", "required": False, "description": ( "Number of data points to retrieve. Supports values in the range from `1` to `5000`. " "Default `10` when no date parameters are set, otherwise set to maximum" ), "examples": [10], "default": 10, } else: props["outputsize"]["default"] = 10 props["outputsize"]["description"] = props["outputsize"]["description"].replace( 'Default `30`', 'Default `10`' ) props["outputsize"]["examples"] = [10] # Add apikey with default="demo" props["apikey"] = { "type": "str", "required": False, "description": "API key", "examples": ["demo"], "default": "demo", } if "interval" in props: props["interval"]["required"] = False props["interval"]["default"] = "1day" # Append plan availability to the description if x-starting-plan is present starting_plan = op.get("x-starting-plan") description = op.get("description", "") if starting_plan: addon = f" Available starting from the `{starting_plan}` plan." description = (description or "") + addon code = gen_class(class_name, props, description) if class_name not in request_names: request_models.append(code) request_names.add(class_name) # Write all generated models to the target file header = ( "from pydantic import BaseModel, Field\n" "from typing import Any, List, Optional\n\n" ) Path(REQUESTS_FILE).write_text(header + "\n\n".join(request_models), encoding="utf-8") print(f"Generated request models: {REQUESTS_FILE}")
- Generates output schema (Pydantic BaseModel) for tool responses from OpenAPI using datamodel-codegen.import subprocess from pathlib import Path openapi_path = '../extra/openapi_clean.json' output_path = '../data/response_models.py' cmd = [ 'datamodel-codegen', '--input', str(openapi_path), '--input-file-type', 'openapi', '--output', str(output_path), '--output-model-type', 'pydantic_v2.BaseModel', '--reuse-model', '--use-title-as-name', '--disable-timestamp', '--field-constraints', '--use-double-quotes', ] subprocess.run(cmd, check=True) # Append aliases alias_lines = [ '', '# Aliases for response models', 'GetMarketMovers200Response = MarketMoversResponseBody', 'GetTimeSeriesPercent_B200Response = GetTimeSeriesPercentB200Response', '' ] with open(output_path, 'a', encoding='utf-8') as f: f.write('\n'.join(alias_lines)) print(f"[SUCCESS] Models generated using CLI and aliases added: {output_path}")