Skip to main content
Glama

Convex MCP server

Official
by get-convex
tiktoken.ts987 B
"use node"; import { v } from "convex/values"; import { action } from "./_generated/server"; import { Tiktoken } from "tiktoken/lite"; // eslint-disable-next-line @typescript-eslint/no-require-imports const gpt2_base = require("tiktoken/encoders/gpt2.json"); export const encode = action({ args: { str: v.string() }, handler: async (_, { str }): Promise<number[]> => { const enc = new Tiktoken( gpt2_base.bpe_ranks, gpt2_base.special_tokens, gpt2_base.pat_str, ); const tok = enc.encode(str); enc.free(); return Array.from(tok); }, }); export const decode = action({ args: { arr: v.array(v.number()) }, handler: async (_, { arr }: { arr: number[] }): Promise<string> => { const enc = new Tiktoken( gpt2_base.bpe_ranks, gpt2_base.special_tokens, gpt2_base.pat_str, ); const buf = new Uint32Array(arr); const result = new TextDecoder().decode(enc.decode(buf)); enc.free(); return result; }, });

MCP directory API

We provide all the information about MCP servers via our MCP API.

curl -X GET 'https://glama.ai/api/mcp/v1/servers/get-convex/convex-backend'

If you have feedback or need assistance with the MCP directory API, please join our Discord server