Skip to main content
Glama
robots.ts1.17 kB
import type { MetadataRoute } from "next"; import { createBaseUrl } from "@/libs/base-url"; import { env } from "@/env"; /** * Generates the robots.txt configuration for the application. * * For production: * - Allows crawling of public pages * - Includes sitemap reference * - Protects sensitive routes * * For non-production: * - Disallows all crawling to prevent search engine indexing * * @returns {MetadataRoute.Robots} Next.js compatible robots.txt configuration */ export default function robots(): MetadataRoute.Robots { // Check if this is NOT production const isNonProduction = env.NODE_ENV !== "production"; // For non-production environments, block all crawling if (isNonProduction) { return { rules: { userAgent: "*", disallow: ["/"], }, }; } // For production, allow crawling with restrictions return { rules: { userAgent: "*", allow: "/", disallow: [ "/api/", // Protect API routes "/_next/", // Protect Next.js internal routes "/admin/", // Protect admin routes if any ], }, sitemap: `${createBaseUrl()}/sitemap.xml`, }; }

Latest Blog Posts

MCP directory API

We provide all the information about MCP servers via our MCP API.

curl -X GET 'https://glama.ai/api/mcp/v1/servers/lightfastai/lightfast-mcp'

If you have feedback or need assistance with the MCP directory API, please join our Discord server