Skip to main content
Glama
baptitse-jn

LinkedIn MCP Server

by baptitse-jn
stop_linkedin.sh5.04 kB
#!/bin/bash # LinkedIn MCP Client Stop Script # Stops both the Netlify MCP server and LinkedIn FastAPI client set -e # Colors for output RED='\033[0;31m' GREEN='\033[0;32m' YELLOW='\033[1;33m' BLUE='\033[0;34m' NC='\033[0m' # No Color # Configuration SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)" PROCESSES_DIR="$SCRIPT_DIR/.processes" echo -e "${BLUE}🛑 Stopping LinkedIn MCP Infrastructure${NC}" echo "=====================================" # Function to stop a service by PID file stop_service() { local service_name=$1 local pid_file="$PROCESSES_DIR/$2.pid" if [ -f "$pid_file" ]; then local pid=$(cat "$pid_file") if ps -p $pid > /dev/null 2>&1; then echo -e "${YELLOW}🔄 Stopping $service_name (PID: $pid)...${NC}" kill $pid # Wait for process to stop local count=0 while ps -p $pid > /dev/null 2>&1 && [ $count -lt 10 ]; do sleep 1 count=$((count + 1)) done if ps -p $pid > /dev/null 2>&1; then echo -e "${YELLOW}⚠️ Force killing $service_name...${NC}" kill -9 $pid fi echo -e "${GREEN}✅ $service_name stopped${NC}" else echo -e "${YELLOW}⚠️ $service_name not running (PID $pid not found)${NC}" fi rm -f "$pid_file" else echo -e "${YELLOW}⚠️ No PID file found for $service_name${NC}" fi } # Function to stop processes by port stop_by_port() { local service_name=$1 local port=$2 local pids=$(lsof -ti:$port 2>/dev/null || true) if [ -n "$pids" ]; then echo -e "${YELLOW}🔄 Stopping $service_name on port $port...${NC}" for pid in $pids; do kill $pid 2>/dev/null || true done sleep 2 # Force kill if still running pids=$(lsof -ti:$port 2>/dev/null || true) if [ -n "$pids" ]; then echo -e "${YELLOW}⚠️ Force killing processes on port $port...${NC}" for pid in $pids; do kill -9 $pid 2>/dev/null || true done fi echo -e "${GREEN}✅ $service_name stopped${NC}" else echo -e "${BLUE}ℹ️ No processes running on port $port${NC}" fi } # Main stop function main() { # Stop by PID files first stop_service "FastAPI Client" "fastapi" stop_service "Netlify Server" "netlify" # Stop by ports as backup stop_by_port "FastAPI Client" "8002" stop_by_port "Netlify Server" "8888" # Clean up processes directory if [ -d "$PROCESSES_DIR" ]; then echo -e "${BLUE}🧹 Cleaning up process files...${NC}" rm -f "$PROCESSES_DIR"/*.pid # Keep log files but notify about them if ls "$PROCESSES_DIR"/*.log >/dev/null 2>&1; then echo -e "${BLUE}📝 Log files preserved in $PROCESSES_DIR${NC}" fi fi echo -e "\n${GREEN}🏁 LinkedIn MCP Infrastructure stopped successfully${NC}" echo "" echo -e "${BLUE}💡 To restart services, run: ./start_linkedin.sh${NC}" } # Handle script arguments case "${1:-stop}" in "stop") main ;; "force") echo -e "${YELLOW}🔥 Force stopping all LinkedIn MCP processes...${NC}" # Kill all processes on our ports stop_by_port "FastAPI Client" "8002" stop_by_port "Netlify Server" "8888" # Kill any remaining netlify dev processes pkill -f "netlify dev" 2>/dev/null || true # Kill any remaining uvicorn processes for our app pkill -f "linkedin_client:app" 2>/dev/null || true # Clean up rm -rf "$PROCESSES_DIR"/*.pid 2>/dev/null || true echo -e "${GREEN}✅ Force stop completed${NC}" ;; "clean") echo -e "${YELLOW}🧹 Cleaning up all LinkedIn MCP files...${NC}" # Stop services first main # Remove all process files and logs rm -rf "$PROCESSES_DIR" 2>/dev/null || true # Remove virtual environment if [ -d "$SCRIPT_DIR/.venv" ]; then echo -e "${YELLOW}🗑️ Removing Python virtual environment...${NC}" rm -rf "$SCRIPT_DIR/.venv" fi echo -e "${GREEN}✅ Clean up completed${NC}" ;; "help") echo "LinkedIn MCP Infrastructure Stop Script" echo "" echo "Usage: $0 [command]" echo "" echo "Commands:" echo " stop (default) - Gracefully stop all services" echo " force - Force kill all processes" echo " clean - Stop and clean up all files" echo " help - Show this help" ;; *) echo -e "${RED}❌ Unknown command: $1${NC}" echo "Use '$0 help' for usage information" exit 1 ;; esac

Latest Blog Posts

MCP directory API

We provide all the information about MCP servers via our MCP API.

curl -X GET 'https://glama.ai/api/mcp/v1/servers/baptitse-jn/linkedin_mcp'

If you have feedback or need assistance with the MCP directory API, please join our Discord server