get_aggregates
Query stock market aggregate bars (OHLC) by ticker, date range, and bar size, with options for split adjustment and sorting.
Instructions
Aggregated OHLC bars for a stock over a date range.
Input Schema
| Name | Required | Description | Default |
|---|---|---|---|
| ticker | Yes | Stock symbol (e.g. "AAPL"). Case-sensitive. | |
| multiplier | Yes | Size of the timespan multiplier (e.g. 5 with timespan="minute" => 5-min bars). | |
| timespan | Yes | Bar size: second, minute, hour, day, week, month, quarter, year. | |
| from_ | Yes | Start date "YYYY-MM-DD" or millisecond unix timestamp. | |
| to | Yes | End date "YYYY-MM-DD" or millisecond unix timestamp. | |
| adjusted | No | Whether to adjust for splits. Default true. | |
| sort | No | "asc" or "desc" by timestamp. | asc |
| limit | No | Max bars (Massive cap 50000). Default 50 to keep responses small. |
Output Schema
| Name | Required | Description | Default |
|---|---|---|---|
No arguments | |||
Implementation Reference
- The get_aggregates handler: builds the REST path /v2/aggs/ticker/{ticker}/range/{multiplier}/{timespan}/{from_}/{to} and calls the Massive API client.
async def get_aggregates( ticker: str, multiplier: int, timespan: Timespan, from_: str, to: str, adjusted: bool = True, sort: Literal["asc", "desc"] = "asc", limit: int = 50, ) -> dict[str, Any]: """Aggregated OHLC bars for a stock over a date range. Args: ticker: Stock symbol (e.g. "AAPL"). Case-sensitive. multiplier: Size of the timespan multiplier (e.g. 5 with timespan="minute" => 5-min bars). timespan: Bar size: second, minute, hour, day, week, month, quarter, year. from_: Start date "YYYY-MM-DD" or millisecond unix timestamp. to: End date "YYYY-MM-DD" or millisecond unix timestamp. adjusted: Whether to adjust for splits. Default true. sort: "asc" or "desc" by timestamp. limit: Max bars (Massive cap 50000). Default 50 to keep responses small. """ path = f"/v2/aggs/ticker/{ticker}/range/{multiplier}/{timespan}/{from_}/{to}" return await client.get(path, {"adjusted": str(adjusted).lower(), "sort": sort, "limit": limit}) - Input parameters and return type for get_aggregates: ticker (str), multiplier (int), timespan (Literal of time units), from_ (str), to (str), adjusted (bool), sort (asc/desc), limit (int). Returns dict[str,Any].
async def get_aggregates( ticker: str, multiplier: int, timespan: Timespan, from_: str, to: str, adjusted: bool = True, sort: Literal["asc", "desc"] = "asc", limit: int = 50, ) -> dict[str, Any]: - src/massive_mcp/tools/aggregates.py:12-37 (registration)Registration via @mcp.tool() decorator inside the register() function, called from server.py line 38 (aggregates.register(mcp, client)).
def register(mcp: FastMCP, client: MassiveClient) -> None: @mcp.tool() async def get_aggregates( ticker: str, multiplier: int, timespan: Timespan, from_: str, to: str, adjusted: bool = True, sort: Literal["asc", "desc"] = "asc", limit: int = 50, ) -> dict[str, Any]: """Aggregated OHLC bars for a stock over a date range. Args: ticker: Stock symbol (e.g. "AAPL"). Case-sensitive. multiplier: Size of the timespan multiplier (e.g. 5 with timespan="minute" => 5-min bars). timespan: Bar size: second, minute, hour, day, week, month, quarter, year. from_: Start date "YYYY-MM-DD" or millisecond unix timestamp. to: End date "YYYY-MM-DD" or millisecond unix timestamp. adjusted: Whether to adjust for splits. Default true. sort: "asc" or "desc" by timestamp. limit: Max bars (Massive cap 50000). Default 50 to keep responses small. """ path = f"/v2/aggs/ticker/{ticker}/range/{multiplier}/{timespan}/{from_}/{to}" return await client.get(path, {"adjusted": str(adjusted).lower(), "sort": sort, "limit": limit}) - src/massive_mcp/client.py:47-115 (helper)MassiveClient.get(): the HTTP helper that makes the actual GET request with retries, auth, and response trimming.
async def get( self, path: str, params: dict[str, Any] | None = None, *, trim: bool = True ) -> dict[str, Any]: merged: dict[str, Any] = {k: v for k, v in (params or {}).items() if v is not None} if self._settings.auth_mode == "query": merged["apiKey"] = self._settings.api_key last_exc: Exception | None = None for attempt in range(MAX_RETRIES): try: resp = await self._http.get(path, params=merged) except httpx.HTTPError as exc: last_exc = exc await asyncio.sleep(2**attempt) continue if resp.status_code == 429: retry_after = float(resp.headers.get("Retry-After", 2**attempt)) await asyncio.sleep(min(retry_after, 30)) continue if 500 <= resp.status_code < 600 and attempt < MAX_RETRIES - 1: await asyncio.sleep(2**attempt) continue if resp.status_code == 401: hint = ( "auth rejected — verify MASSIVE_API_KEY; " "if you used MASSIVE_AUTH_MODE=bearer, try 'query' (or vice versa)" ) raise MassiveAPIError(401, hint, _strip_secrets(str(resp.request.url))) try: data = resp.json() except ValueError: data = {"raw": resp.text} if not resp.is_success: msg = data.get("error") or data.get("message") or resp.reason_phrase or "request failed" raise MassiveAPIError(resp.status_code, str(msg), _strip_secrets(str(resp.request.url))) return _trim(data) if trim else data raise MassiveAPIError(0, f"network error after {MAX_RETRIES} retries: {last_exc}", path) def _trim(data: dict[str, Any]) -> dict[str, Any]: """If `results` is a huge array, truncate and surface a hint to paginate.""" results = data.get("results") if isinstance(results, list) and len(results) > TRIM_THRESHOLD: kept = results[:TRIM_THRESHOLD] data = dict(data) data["results"] = kept data["_truncated_note"] = ( f"response had {len(results)} items; truncated to {TRIM_THRESHOLD}. " "Re-call with a tighter `limit` or use `cursor`/`next_url` to page." ) if "next_url" in data and data.get("next_url"): cursor = _extract_cursor(data["next_url"]) if cursor: data["next_cursor"] = cursor return data def _extract_cursor(next_url: str) -> str | None: parts = urlsplit(next_url) for kv in parts.query.split("&"): if kv.startswith("cursor="): return kv.split("=", 1)[1] return None - Timespan type alias used by get_aggregates schema.
Timespan = Literal["second", "minute", "hour", "day", "week", "month", "quarter", "year"]