Skip to main content
Glama

Fieldnote MCP

Official
context_injector.py1.24 kB
#!/usr/bin/env python3 import os, sys, json, subprocess, shlex ROOT = os.path.dirname(os.path.dirname(__file__)) CFG_PATH = os.path.join(ROOT, "config", "memory.config.json") sys.path.append(os.path.join(ROOT, "scripts")) from config_loader import load_config # type: ignore CFG = load_config(CFG_PATH) def call_python(script, arg): cmd = f'{sys.executable} {shlex.quote(script)} {shlex.quote(arg)}' p = subprocess.run(cmd, shell=True, capture_output=True, text=True) if p.returncode != 0: raise RuntimeError(p.stderr) return p.stdout def make_context(query): res = json.loads(call_python(os.path.join(ROOT, "scripts", "memory_search.py"), query)) dense = res.get("dense", []) th = CFG["injection"]["score_threshold"] snippets = [] for d in dense: if d["score"] < th: continue snippets.append(f'# Memory {d["id"]} (score {d["score"]:.3f})\n{d["text"]}') joined = "\n\n".join(snippets) max_chars = int(CFG["injection"].get("max_tokens", 2000)) * 8 return joined[:max_chars] if __name__ == "__main__": if len(sys.argv) < 2: print("Usage: context-inject '<query>'") sys.exit(1) ctx = make_context(sys.argv[1]) print(ctx)

MCP directory API

We provide all the information about MCP servers via our MCP API.

curl -X GET 'https://glama.ai/api/mcp/v1/servers/Fieldnote-Echo/Fieldnote-MCP'

If you have feedback or need assistance with the MCP directory API, please join our Discord server