We provide all the information about MCP servers via our MCP API.
curl -X GET 'https://glama.ai/api/mcp/v1/servers/mourad-ghafiri/youtube-mcp-server'
If you have feedback or need assistance with the MCP directory API, please join our Discord server
import os
import torch
import sys
import logging
class Config:
# Directories
TRANSCRIPTIONS_DIR = "transcriptions"
# Models
WHISPER_MODEL_NAME = "tiny"
SILERO_REPO = "snakers4/silero-vad"
SILERO_MODEL = "silero_vad"
# Audio
SAMPLING_RATE = 16000
SEGMENT_PADDING_MS = 200 # Padding added to each segment
# Concurrency
MAX_WORKERS = 4
@staticmethod
def get_device() -> str:
"""Determines the best available device for inference."""
if torch.backends.mps.is_available():
return "mps"
elif torch.cuda.is_available():
return "cuda"
return "cpu"
@staticmethod
def setup_logging(logger_name: str = "youtube-mcp-server") -> logging.Logger:
logger = logging.getLogger(logger_name)
if not logger.handlers:
logging.basicConfig(
level=logging.INFO,
format='%(asctime)s - %(name)s - %(levelname)s - %(message)s',
stream=sys.stderr
)
return logger