We provide all the information about MCP servers via our MCP API.
curl -X GET 'https://glama.ai/api/mcp/v1/servers/Young-Keun-LEE/mju-crawler-mcpserver'
If you have feedback or need assistance with the MCP directory API, please join our Discord server
import subprocess
import json
from typing import List, Dict
from pathlib import Path
from crochet import setup, wait_for
from mcp.server.fastmcp import FastMCP
from smithery.decorators import smithery
import sys
"""Server for crawling Myongji University notices
using Scrapy and exposing them as a Smithery tool."""
# Initialize Crochet
setup()
# @wait_for allows synchronous (blocking) code such as subprocess.run
# to safely execute in Smithery’s asynchronous environment.
@wait_for(timeout=60.0) # Increase timeout to 60 seconds since crawling may take longer.
def _run_scrapy_command() -> List[Dict]:
"""
Executes the 'uv run scrapy crawl' command in a subprocess
and reads the generated JSON output file.
"""
spider_name = "mju_notice"
output_filename = "notices_output.json"
output_path = Path(output_filename)
# Remove any existing output file from previous runs
if output_path.exists():
output_path.unlink()
# ★★★ Key Change: Call the virtual environment's executables directly instead of using 'uv run'. ★★★
# `sys.executable` points to the full path of the Python interpreter
# currently running this script (e.g., /app/.venv/bin/python).
python_executable = sys.executable
# The `scrapy` executable is located in the same directory ('bin' or 'Scripts')
# as the `python` executable. We construct its full path based on this.
scrapy_executable = str(Path(python_executable).parent / 'scrapy')
# The command to execute (no longer dependent on 'uv')
command = [
scrapy_executable, # The full path to the scrapy executable in the virtual environment
"crawl", spider_name,
"-o", output_filename
]
print(f"Running command: {' '.join(command)}")
# Run the command as a subprocess
result = subprocess.run(command, capture_output=True, text=True, encoding='utf-8')
# Log errors if the command fails
if result.returncode != 0:
print("[ERROR] Failed to execute 'scrapy crawl' command:")
print(result.stderr)
# Return an empty list to indicate "no notices found"
return []
# Verify the output file was successfully created
if not output_path.exists():
print("[ERROR] Crawling succeeded but the output file was not created.")
return []
# Read the output file into the 'notices' variable
with open(output_path, 'r', encoding='utf-8') as f:
notices = json.load(f)
# Remove the temporary file after use
output_path.unlink()
return notices
@smithery.server()
def create_server():
server = FastMCP("mju_notice_bot")
@server.tool()
def get_mju_notices(limit: int = 5) -> List[Dict]:
print(f"-> Tool 'get_mju_notices' called with limit={limit}")
try:
# Run the Scrapy crawl command and retrieve the results
all_notices = _run_scrapy_command()
if not all_notices:
return [{"error": "Crawl completed, but no notices were found."}]
return all_notices[:limit]
except Exception as e:
print(f"[ERROR] in get_mju_notices: {e}")
return [{"error": f"An unexpected error or timeout occurred during the crawl: {str(e)}"}]
return server