We provide all the information about MCP servers via our MCP API.
curl -X GET 'https://glama.ai/api/mcp/v1/servers/fctr-id/fctr-okta-mcp-server'
If you have feedback or need assistance with the MCP directory API, please join our Discord server
__init__.py•1.58 kB
# src/fctr_okta_mcp/prompts/__init__.py
"""
Prompt templates for the Okta MCP Server.
This module provides functions to load prompt templates from text files.
"""
from pathlib import Path
from functools import lru_cache
PROMPTS_DIR = Path(__file__).parent
@lru_cache(maxsize=None)
def load_prompt(name: str) -> str:
"""
Load a prompt template from a text file.
Args:
name: The name of the prompt file (without .txt extension)
Returns:
The prompt text content
Raises:
FileNotFoundError: If the prompt file doesn't exist
"""
prompt_file = PROMPTS_DIR / f"{name}.txt"
if not prompt_file.exists():
raise FileNotFoundError(f"Prompt file not found: {prompt_file}")
return prompt_file.read_text(encoding="utf-8")
def get_code_generation_prompt(selected_operations: str = "", is_direct_mode: bool = False) -> str:
"""
Get the code generation prompt template.
Args:
selected_operations: Formatted string of selected operations
is_direct_mode: Whether to use the direct mode template (skips test instructions)
Returns:
The code generation prompt with operations filled in
"""
template_name = "direct_mode_code_generation_prompt" if is_direct_mode else "agent_mode_code_generation_prompt"
template = load_prompt(template_name)
# Use string replacement instead of .format() to avoid conflicts
# with curly braces in code examples like params={"limit": 200}
return template.replace("{{selected_operations}}", selected_operations)