Skip to main content
Glama

Convex MCP server

Official
by get-convex
main.py1.18 kB
import argparse import json import os from itertools import chain, islice from convex import ConvexClient from dotenv import load_dotenv parser = argparse.ArgumentParser( prog="Vector Importer", description="Imports vectors from jsonl files in a specific format", ) parser.add_argument( "filename", help="The .jsonl file (uncompressed) from https://drive.google.com/file/d/1qRJWC4kiM9xZ-oTbiqK9ii0vPciNHhkI/view?usp=drive_link", ) args = parser.parse_args() load_dotenv(".env.local") load_dotenv() client = ConvexClient(os.getenv("CONVEX_URL")) def read_embeddings(): with open(args.filename, "r") as f: for jsonline in f: yield json.loads(jsonline) def chunked_embeddings(size, embeddings_json): for first in embeddings_json: yield chain([first], islice(embeddings_json, size - 1)) for chunk in chunked_embeddings(90, read_embeddings()): mapped = list( map( lambda jsonobj: dict( input=jsonobj[0]["input"], embedding=jsonobj[1]["data"][0]["embedding"] ), chunk, ) ) client.mutation("importEmbeddings:importEmbedding", dict(docs=mapped))

MCP directory API

We provide all the information about MCP servers via our MCP API.

curl -X GET 'https://glama.ai/api/mcp/v1/servers/get-convex/convex-backend'

If you have feedback or need assistance with the MCP directory API, please join our Discord server