Skip to main content
Glama

Notion MCP Server V2

by ankitmalik84
config.pyโ€ข1.33 kB
from pyprojroot import here from yaml import load, Loader class Config: def __init__(self): with open(here("config/config.yml"), "r") as f: config = load(f, Loader=Loader) # directories self.db_path = here(config["directories"]["db_path"]) self.vectordb_dir = here(config["directories"]["vectordb_dir"]) # llm_config self.chat_model = config["llm_config"]["chat_model"] self.summary_model = config["llm_config"]["summary_model"] self.rag_model = config["llm_config"]["rag_model"] self.temperature = config["llm_config"]["temperature"] # chat_history_config self.max_history_pairs = config["chat_history_config"]["max_history_pairs"] self.max_characters = config["chat_history_config"]["max_characters"] self.max_tokens = config["chat_history_config"]["max_tokens"] # search_config # self.num_retrieved_content = config["search_config"]["num_retrieved_content"] # agent_config self.max_function_calls = config["agent_config"]["max_function_calls"] # vectordb_config self.collection_name = config["vectordb_config"]["collection_name"] self.embedding_model = config["vectordb_config"]["embedding_model"] self.k = config["vectordb_config"]["k"]

MCP directory API

We provide all the information about MCP servers via our MCP API.

curl -X GET 'https://glama.ai/api/mcp/v1/servers/ankitmalik84/Agentic_Longterm_Memory'

If you have feedback or need assistance with the MCP directory API, please join our Discord server