Skip to main content
Glama
utilities.ts18.2 kB
import { z } from "zod"; /** * Utility Tools - Practical helper tools that generate real output */ // ============================================ // Git Helper Tool // ============================================ export const gitHelperSchema = { name: "git_helper", description: "Generates git commands and workflows for common scenarios", inputSchema: z.object({ scenario: z.enum([ "undo-commit", "squash-commits", "rebase-branch", "cherry-pick", "stash-work", "create-branch", "merge-conflict", "reset-file", "amend-commit", "bisect-bug", "clean-branches", "tag-release" ]).describe("The git scenario you need help with"), details: z.string().optional().describe("Additional context like branch names, commit count") }) }; const gitScenarios: Record<string, (details?: string) => string> = { "undo-commit": () => `# Undo Last Commit (Keep Changes) git reset --soft HEAD~1 # Undo Last Commit (Discard Changes) git reset --hard HEAD~1 # Undo Commit Already Pushed (Safe) git revert HEAD git push # View what was undone git reflog`, "squash-commits": (d) => `# Squash Last ${d || 'N'} Commits Interactively git rebase -i HEAD~${d || 'N'} # In editor, change 'pick' to 'squash' (or 's') for commits to squash # Example: # pick abc123 First commit # squash def456 Second commit # squash ghi789 Third commit # After saving, edit the combined commit message # Force push if already pushed git push --force-with-lease`, "rebase-branch": (d) => `# Rebase current branch onto ${d || 'main'} git fetch origin git rebase origin/${d || 'main'} # If conflicts occur: # 1. Fix conflicts in files # 2. git add <fixed-files> # 3. git rebase --continue # To abort rebase git rebase --abort # Force push after rebase git push --force-with-lease`, "cherry-pick": (d) => `# Cherry-pick specific commit git cherry-pick ${d || '<commit-hash>'} # Cherry-pick multiple commits git cherry-pick <hash1> <hash2> <hash3> # Cherry-pick range (exclusive start) git cherry-pick A..B # Cherry-pick without committing git cherry-pick -n ${d || '<commit-hash>'} # If conflicts: git cherry-pick --continue # after fixing git cherry-pick --abort # to cancel`, "stash-work": () => `# Stash current changes git stash # Stash with a message git stash push -m "WIP: feature description" # Stash including untracked files git stash -u # List all stashes git stash list # Apply most recent stash (keep in stash) git stash apply # Apply and remove from stash git stash pop # Apply specific stash git stash apply stash@{2} # Drop a stash git stash drop stash@{0}`, "create-branch": (d) => `# Create and switch to new branch git checkout -b ${d || 'feature/new-feature'} # Or with newer git: git switch -c ${d || 'feature/new-feature'} # Create branch from specific commit git checkout -b ${d || 'feature/new-feature'} <commit-hash> # Create branch from remote git checkout -b ${d || 'feature/new-feature'} origin/main # Push new branch to remote git push -u origin ${d || 'feature/new-feature'}`, "merge-conflict": () => `# When you have merge conflicts: # 1. See conflicted files git status # 2. Open files and look for conflict markers: # <<<<<<< HEAD # (your changes) # ======= # (their changes) # >>>>>>> branch-name # 3. Edit files to resolve conflicts # 4. Mark as resolved git add <resolved-file> # 5. Complete the merge git commit # Abort merge if needed git merge --abort # Use a merge tool git mergetool`, "reset-file": (d) => `# Discard changes to specific file git checkout -- ${d || '<file-path>'} # Or with newer git: git restore ${d || '<file-path>'} # Unstage a file (keep changes) git reset HEAD ${d || '<file-path>'} # Or with newer git: git restore --staged ${d || '<file-path>'} # Reset file to specific commit git checkout <commit-hash> -- ${d || '<file-path>'}`, "amend-commit": () => `# Amend last commit message git commit --amend -m "New message" # Amend last commit with staged changes git add <files> git commit --amend --no-edit # Amend and change message git add <files> git commit --amend -m "Updated message" # Push amended commit (if already pushed) git push --force-with-lease`, "bisect-bug": () => `# Start bisecting to find bug git bisect start # Mark current commit as bad git bisect bad # Mark known good commit git bisect good <commit-hash> # Git will checkout commits for you to test # After testing each: git bisect good # if no bug git bisect bad # if bug present # When found, git shows the bad commit # Exit bisect mode git bisect reset`, "clean-branches": () => `# Delete local branch git branch -d feature-branch # safe delete git branch -D feature-branch # force delete # Delete remote branch git push origin --delete feature-branch # Prune remote-tracking branches git fetch --prune # List merged branches (safe to delete) git branch --merged main # Delete all merged branches except main/master git branch --merged | grep -v "main\\|master" | xargs git branch -d`, "tag-release": (d) => `# Create annotated tag git tag -a ${d || 'v1.0.0'} -m "Release ${d || 'v1.0.0'}" # Create tag for specific commit git tag -a ${d || 'v1.0.0'} <commit-hash> -m "Release ${d || 'v1.0.0'}" # Push tag to remote git push origin ${d || 'v1.0.0'} # Push all tags git push origin --tags # List tags git tag -l # Delete tag git tag -d ${d || 'v1.0.0'} git push origin --delete ${d || 'v1.0.0'}`, }; export function gitHelperHandler(args: { scenario: string; details?: string }) { const { scenario, details } = args; const generator = gitScenarios[scenario]; if (!generator) { return { content: [{ type: "text", text: `Unknown scenario: ${scenario}` }] }; } const commands = generator(details); return { content: [{ type: "text", text: `# Git: ${scenario}\n\n\`\`\`bash\n${commands}\n\`\`\`\n\n💡 **Tip**: Always use \`--force-with-lease\` instead of \`--force\` when pushing rewritten history.` }] }; } // ============================================ // Package.json Generator // ============================================ export const packageJsonSchema = { name: "generate_package_json", description: "Generates a complete package.json with scripts, dependencies, and configuration", inputSchema: z.object({ name: z.string().describe("Project name"), type: z.enum(["node-api", "react", "next", "express", "cli", "library"]).describe("Project type"), typescript: z.boolean().optional().default(true), features: z.array(z.string()).optional().describe("Additional features like testing, linting") }) }; export function packageJsonHandler(args: { name: string; type: string; typescript?: boolean; features?: string[] }) { const { name, type, typescript = true, features = [] } = args; const base = { name, version: "1.0.0", description: "", main: typescript ? "dist/index.js" : "src/index.js", types: typescript ? "dist/index.d.ts" : undefined, type: "module", scripts: {} as Record<string, string>, keywords: [], author: "", license: "MIT", dependencies: {} as Record<string, string>, devDependencies: {} as Record<string, string>, }; // Type-specific config const configs: Record<string, { scripts: Record<string, string>; deps: Record<string, string>; devDeps: Record<string, string> }> = { "node-api": { scripts: { dev: "tsx watch src/index.ts", build: "tsc", start: "node dist/index.js", }, deps: { express: "^4.18.2", cors: "^2.8.5", dotenv: "^16.3.1" }, devDeps: { "@types/express": "^4.17.21", "@types/cors": "^2.8.17" } }, express: { scripts: { dev: "tsx watch src/server.ts", build: "tsc", start: "node dist/server.js", }, deps: { express: "^4.18.2", cors: "^2.8.5", helmet: "^7.1.0", "express-rate-limit": "^7.1.5" }, devDeps: { "@types/express": "^4.17.21", "@types/cors": "^2.8.17" } }, react: { scripts: { dev: "vite", build: "tsc && vite build", preview: "vite preview", }, deps: { react: "^18.2.0", "react-dom": "^18.2.0" }, devDeps: { "@vitejs/plugin-react": "^4.2.1", vite: "^5.0.10" } }, next: { scripts: { dev: "next dev", build: "next build", start: "next start", lint: "next lint", }, deps: { next: "^14.1.0", react: "^18.2.0", "react-dom": "^18.2.0" }, devDeps: { "@types/react": "^18.2.48", "@types/react-dom": "^18.2.18" } }, cli: { scripts: { dev: "tsx src/cli.ts", build: "tsc && chmod +x dist/cli.js", start: "node dist/cli.js", }, deps: { commander: "^12.0.0", chalk: "^5.3.0", ora: "^8.0.1" }, devDeps: {} }, library: { scripts: { dev: "tsup src/index.ts --watch", build: "tsup src/index.ts --format cjs,esm --dts", prepublishOnly: "npm run build", }, deps: {}, devDeps: { tsup: "^8.0.1" } } }; const config = configs[type]; base.scripts = { ...base.scripts, ...config.scripts }; base.dependencies = { ...base.dependencies, ...config.deps }; base.devDependencies = { ...base.devDependencies, ...config.devDeps }; // TypeScript if (typescript) { base.devDependencies.typescript = "^5.3.3"; base.devDependencies.tsx = "^4.7.0"; base.devDependencies["@types/node"] = "^20.11.5"; } // Features if (features.includes("testing") || features.includes("test")) { base.scripts.test = "vitest"; base.scripts["test:coverage"] = "vitest --coverage"; base.devDependencies.vitest = "^1.2.1"; base.devDependencies["@vitest/coverage-v8"] = "^1.2.1"; } if (features.includes("linting") || features.includes("lint")) { base.scripts.lint = "eslint src/"; base.scripts["lint:fix"] = "eslint src/ --fix"; base.devDependencies.eslint = "^8.56.0"; base.devDependencies["@typescript-eslint/eslint-plugin"] = "^6.19.1"; base.devDependencies["@typescript-eslint/parser"] = "^6.19.1"; } if (features.includes("prettier") || features.includes("format")) { base.scripts.format = "prettier --write src/"; base.devDependencies.prettier = "^3.2.4"; } // Clean undefined values const clean = JSON.parse(JSON.stringify(base)); return { content: [{ type: "text", text: `# Generated package.json for ${type} project\n\n\`\`\`json\n${JSON.stringify(clean, null, 2)}\n\`\`\`\n\n## Quick Start\n\`\`\`bash\nnpm install\nnpm run dev\n\`\`\`` }] }; } // ============================================ // Dockerfile Generator // ============================================ export const dockerfileSchema = { name: "generate_dockerfile", description: "Generates optimized Dockerfile for different project types", inputSchema: z.object({ type: z.enum(["node", "python", "go", "rust", "static"]).describe("Project type"), port: z.number().optional().default(3000), multiStage: z.boolean().optional().default(true) }) }; export function dockerfileHandler(args: { type: string; port?: number; multiStage?: boolean }) { const { type, port = 3000, multiStage = true } = args; const dockerfiles: Record<string, string> = { node: multiStage ? `# Build stage FROM node:20-alpine AS builder WORKDIR /app COPY package*.json ./ RUN npm ci COPY . . RUN npm run build # Production stage FROM node:20-alpine AS production WORKDIR /app ENV NODE_ENV=production COPY package*.json ./ RUN npm ci --only=production && npm cache clean --force COPY --from=builder /app/dist ./dist # Non-root user for security RUN addgroup -g 1001 -S nodejs && adduser -S nodejs -u 1001 USER nodejs EXPOSE ${port} CMD ["node", "dist/index.js"]` : `FROM node:20-alpine WORKDIR /app COPY package*.json ./ RUN npm ci COPY . . RUN npm run build EXPOSE ${port} CMD ["node", "dist/index.js"]`, python: multiStage ? `# Build stage FROM python:3.12-slim AS builder WORKDIR /app ENV PYTHONDONTWRITEBYTECODE=1 PYTHONUNBUFFERED=1 RUN pip install --upgrade pip COPY requirements.txt . RUN pip wheel --no-cache-dir --wheel-dir /wheels -r requirements.txt # Production stage FROM python:3.12-slim WORKDIR /app ENV PYTHONDONTWRITEBYTECODE=1 PYTHONUNBUFFERED=1 COPY --from=builder /wheels /wheels RUN pip install --no-cache /wheels/* COPY . . RUN adduser --system --no-create-home appuser USER appuser EXPOSE ${port} CMD ["python", "-m", "uvicorn", "main:app", "--host", "0.0.0.0", "--port", "${port}"]` : `FROM python:3.12-slim WORKDIR /app COPY requirements.txt . RUN pip install -r requirements.txt COPY . . EXPOSE ${port} CMD ["python", "main.py"]`, go: `# Build stage FROM golang:1.22-alpine AS builder WORKDIR /app COPY go.mod go.sum ./ RUN go mod download COPY . . RUN CGO_ENABLED=0 GOOS=linux go build -ldflags="-w -s" -o main . # Production stage FROM alpine:3.19 RUN apk --no-cache add ca-certificates WORKDIR /app COPY --from=builder /app/main . RUN adduser -D -g '' appuser USER appuser EXPOSE ${port} CMD ["./main"]`, rust: `# Build stage FROM rust:1.75-alpine AS builder WORKDIR /app RUN apk add --no-cache musl-dev COPY Cargo.toml Cargo.lock ./ RUN mkdir src && echo "fn main() {}" > src/main.rs RUN cargo build --release && rm -rf src COPY . . RUN cargo build --release # Production stage FROM alpine:3.19 RUN apk --no-cache add ca-certificates WORKDIR /app COPY --from=builder /app/target/release/app . RUN adduser -D appuser USER appuser EXPOSE ${port} CMD ["./app"]`, static: `FROM nginx:alpine COPY dist/ /usr/share/nginx/html/ COPY nginx.conf /etc/nginx/nginx.conf EXPOSE 80 CMD ["nginx", "-g", "daemon off;"]`, }; const dockerfile = dockerfiles[type]; return { content: [{ type: "text", text: `# Dockerfile for ${type} (port ${port})\n\n\`\`\`dockerfile\n${dockerfile}\n\`\`\`\n\n## Build & Run\n\`\`\`bash\ndocker build -t myapp .\ndocker run -p ${port}:${port} myapp\n\`\`\`` }] }; } // ============================================ // Environment Variables Template // ============================================ export const envTemplateSchema = { name: "generate_env_template", description: "Generates .env.example with common environment variables", inputSchema: z.object({ type: z.enum(["api", "fullstack", "database", "auth", "all"]).describe("Type of environment variables needed"), database: z.enum(["postgres", "mysql", "mongodb", "redis", "none"]).optional() }) }; export function envTemplateHandler(args: { type: string; database?: string }) { const { type, database } = args; const sections: Record<string, string> = { server: `# Server NODE_ENV=development PORT=3000 HOST=localhost`, database_postgres: `# PostgreSQL Database DATABASE_URL=postgresql://user:password@localhost:5432/mydb DB_HOST=localhost DB_PORT=5432 DB_NAME=mydb DB_USER=user DB_PASSWORD=password`, database_mysql: `# MySQL Database DATABASE_URL=mysql://user:password@localhost:3306/mydb DB_HOST=localhost DB_PORT=3306 DB_NAME=mydb DB_USER=user DB_PASSWORD=password`, database_mongodb: `# MongoDB MONGODB_URI=mongodb://localhost:27017/mydb MONGODB_DB=mydb`, database_redis: `# Redis REDIS_URL=redis://localhost:6379 REDIS_HOST=localhost REDIS_PORT=6379 REDIS_PASSWORD=`, auth: `# Authentication JWT_SECRET=your-super-secret-key-change-in-production JWT_EXPIRES_IN=24h REFRESH_TOKEN_SECRET=another-secret-key REFRESH_TOKEN_EXPIRES_IN=7d # OAuth (optional) GOOGLE_CLIENT_ID= GOOGLE_CLIENT_SECRET= GITHUB_CLIENT_ID= GITHUB_CLIENT_SECRET=`, email: `# Email SMTP_HOST=smtp.example.com SMTP_PORT=587 SMTP_USER= SMTP_PASS= EMAIL_FROM=noreply@example.com`, storage: `# File Storage AWS_ACCESS_KEY_ID= AWS_SECRET_ACCESS_KEY= AWS_REGION=us-east-1 AWS_BUCKET=my-bucket # Or Cloudflare R2 R2_ACCOUNT_ID= R2_ACCESS_KEY_ID= R2_SECRET_ACCESS_KEY= R2_BUCKET=`, api_keys: `# External APIs STRIPE_SECRET_KEY=sk_test_... STRIPE_WEBHOOK_SECRET=whsec_... OPENAI_API_KEY=sk-... SENDGRID_API_KEY=SG...`, monitoring: `# Monitoring & Logging SENTRY_DSN= LOG_LEVEL=info ENABLE_METRICS=true`, }; let envContent = "# Environment Variables\n# Copy to .env and fill in your values\n\n"; // Always add server envContent += sections.server + "\n\n"; // Add database if specified if (database && database !== "none") { envContent += (sections[`database_${database}`] || "") + "\n\n"; } // Type-specific sections if (type === "auth" || type === "all" || type === "fullstack") { envContent += sections.auth + "\n\n"; } if (type === "all" || type === "fullstack") { envContent += sections.email + "\n\n"; envContent += sections.storage + "\n\n"; envContent += sections.api_keys + "\n\n"; envContent += sections.monitoring + "\n\n"; } if (type === "api") { envContent += sections.api_keys + "\n\n"; } return { content: [{ type: "text", text: `# Generated .env.example\n\n\`\`\`env\n${envContent.trim()}\n\`\`\`\n\n⚠️ **Never commit .env to git!** Add it to .gitignore.` }] }; } // Export all export const utilityTools = { gitHelperSchema, gitHelperHandler, packageJsonSchema, packageJsonHandler, dockerfileSchema, dockerfileHandler, envTemplateSchema, envTemplateHandler, };

Latest Blog Posts

MCP directory API

We provide all the information about MCP servers via our MCP API.

curl -X GET 'https://glama.ai/api/mcp/v1/servers/millsydotdev/Code-MCP'

If you have feedback or need assistance with the MCP directory API, please join our Discord server