Skip to main content
Glama
request-queue.js4.88 kB
var __assign = (this && this.__assign) || function () { __assign = Object.assign || function(t) { for (var s, i = 1, n = arguments.length; i < n; i++) { s = arguments[i]; for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p)) t[p] = s[p]; } return t; }; return __assign.apply(this, arguments); }; var __read = (this && this.__read) || function (o, n) { var m = typeof Symbol === "function" && o[Symbol.iterator]; if (!m) return o; var i = m.call(o), r, ar = [], e; try { while ((n === void 0 || n-- > 0) && !(r = i.next()).done) ar.push(r.value); } catch (error) { e = { error: error }; } finally { try { if (r && !r.done && (m = i["return"])) m.call(i); } finally { if (e) throw e.error; } } return ar; }; var __spreadArray = (this && this.__spreadArray) || function (to, from, pack) { if (pack || arguments.length === 2) for (var i = 0, l = from.length, ar; i < l; i++) { if (ar || !(i in from)) { if (!ar) ar = Array.prototype.slice.call(from, 0, i); ar[i] = from[i]; } } return to.concat(ar || Array.prototype.slice.call(from)); }; import { each } from './utils'; import { isArray, isUndefined } from './utils/type-utils'; import { clampToRange } from './utils/number-utils'; export var DEFAULT_FLUSH_INTERVAL_MS = 3000; var RequestQueue = /** @class */ (function () { function RequestQueue(sendRequest, config) { // We start in a paused state and only start flushing when enabled by the parent this.isPaused = true; this.queue = []; this.flushTimeoutMs = clampToRange((config === null || config === void 0 ? void 0 : config.flush_interval_ms) || DEFAULT_FLUSH_INTERVAL_MS, 250, 5000, 'flush interval', DEFAULT_FLUSH_INTERVAL_MS); this.sendRequest = sendRequest; } RequestQueue.prototype.enqueue = function (req) { this.queue.push(req); if (!this.flushTimeout) { this.setFlushTimeout(); } }; RequestQueue.prototype.unload = function () { var _this = this; this.clearFlushTimeout(); var requests = this.queue.length > 0 ? this.formatQueue() : {}; var requestValues = Object.values(requests); // Always force events to be sent before recordings, as events are more important, and recordings are bigger and thus less likely to arrive var sortedRequests = __spreadArray(__spreadArray([], __read(requestValues.filter(function (r) { return r.url.indexOf('/e') === 0; })), false), __read(requestValues.filter(function (r) { return r.url.indexOf('/e') !== 0; })), false); sortedRequests.map(function (req) { _this.sendRequest(__assign(__assign({}, req), { transport: 'sendBeacon' })); }); }; RequestQueue.prototype.enable = function () { this.isPaused = false; this.setFlushTimeout(); }; RequestQueue.prototype.setFlushTimeout = function () { var _this = this; if (this.isPaused) { return; } this.flushTimeout = setTimeout(function () { _this.clearFlushTimeout(); if (_this.queue.length > 0) { var requests = _this.formatQueue(); var _loop_1 = function (key) { var req = requests[key]; var now = new Date().getTime(); if (req.data && isArray(req.data)) { each(req.data, function (data) { data['offset'] = Math.abs(data['timestamp'] - now); delete data['timestamp']; }); } _this.sendRequest(req); }; for (var key in requests) { _loop_1(key); } } }, this.flushTimeoutMs); }; RequestQueue.prototype.clearFlushTimeout = function () { clearTimeout(this.flushTimeout); this.flushTimeout = undefined; }; RequestQueue.prototype.formatQueue = function () { var requests = {}; each(this.queue, function (request) { var _a; var req = request; var key = (req ? req.batchKey : null) || req.url; if (isUndefined(requests[key])) { // TODO: What about this -it seems to batch data into an array - do we always want that? requests[key] = __assign(__assign({}, req), { data: [] }); } (_a = requests[key].data) === null || _a === void 0 ? void 0 : _a.push(req.data); }); this.queue = []; return requests; }; return RequestQueue; }()); export { RequestQueue }; //# sourceMappingURL=request-queue.js.map

Latest Blog Posts

MCP directory API

We provide all the information about MCP servers via our MCP API.

curl -X GET 'https://glama.ai/api/mcp/v1/servers/sadiuysal/mem0-mcp-server-ts'

If you have feedback or need assistance with the MCP directory API, please join our Discord server