Skip to main content
Glama

Caltrain MCP Server

by davidyen1124
lint.py•2.05 kB
#!/usr/bin/env python3 """ Local development script to run all CI checks. Run this before pushing to catch issues early. """ import os import subprocess import sys from pathlib import Path def run_command(cmd: list[str], description: str) -> bool: """Run a command and return True if successful.""" print(f"\nšŸ” {description}") print(f"Running: {' '.join(cmd)}") result = subprocess.run(cmd, capture_output=True, text=True) if result.returncode == 0: print(f"āœ… {description} passed") if result.stdout.strip(): print(result.stdout) return True else: print(f"āŒ {description} failed") if result.stdout.strip(): print("STDOUT:", result.stdout) if result.stderr.strip(): print("STDERR:", result.stderr) return False def main() -> int: """Run all checks and return exit code.""" # Change to project root project_root = Path(__file__).parent.parent os.chdir(project_root) checks = [ (["uv", "run", "ruff", "check", "."], "Ruff linting"), (["uv", "run", "ruff", "format", "--check", "."], "Ruff formatting"), (["uv", "run", "mypy", "src"], "MyPy type checking"), ( [ "uv", "run", "pytest", "--cov=src/caltrain_mcp", "--cov-report=term-missing", ], "Tests with coverage", ), ] print("šŸš€ Running all CI checks locally...") failed_checks = [] for cmd, description in checks: if not run_command(cmd, description): failed_checks.append(description) if failed_checks: print(f"\nāŒ {len(failed_checks)} check(s) failed:") for check in failed_checks: print(f" - {check}") print("\nPlease fix the issues above before pushing.") return 1 else: print("\nšŸŽ‰ All checks passed! Ready to push.") return 0 if __name__ == "__main__": sys.exit(main())

MCP directory API

We provide all the information about MCP servers via our MCP API.

curl -X GET 'https://glama.ai/api/mcp/v1/servers/davidyen1124/caltrain-mcp'

If you have feedback or need assistance with the MCP directory API, please join our Discord server