We provide all the information about MCP servers via our MCP API.
curl -X GET 'https://glama.ai/api/mcp/v1/servers/AI-enthusiasts/crawl4ai-rag-mcp'
If you have feedback or need assistance with the MCP directory API, please join our Discord server
from .model_loader import *
from _typeshed import Incomplete
__location__: Incomplete
def get_available_memory(device): ...
def calculate_batch_size(device): ...
def get_device(): ...
def set_model_device(model): ...
def get_home_folder(): ...
def load_bert_base_uncased(): ...
def load_HF_embedding_model(model_name: str = 'BAAI/bge-small-en-v1.5') -> tuple: ...
def load_text_classifier(): ...
def load_text_multilabel_classifier(): ...
def load_nltk_punkt(): ...
def load_spacy_model(): ...
def download_all_models(remove_existing: bool = False) -> None: ...
def main() -> None: ...