/**
* Environment configuration utilities for HybridHub
* Handles command-line arguments, environment variables, and .env files
*/
import { config as loadDotEnv } from "dotenv";
import fs from "fs";
import path from "path";
import { fileURLToPath } from "url";
import { findConfigFile, loadTomlConfig, validateTomlConfig } from "./toml-loader.js";
import type {
TomlConfig,
DatabaseSourceConfig,
StorageSourceConfig,
DatabaseType,
StorageProviderType,
} from "../types/config.js";
// Create __dirname equivalent for ES modules
const __filename = fileURLToPath(import.meta.url);
const __dirname = path.dirname(__filename);
// Load .env file
loadDotEnv();
/**
* Parse command line arguments
*/
function parseArgs(): Map<string, string> {
const args = new Map<string, string>();
for (const arg of process.argv.slice(2)) {
if (arg.startsWith("--")) {
const [key, ...valueParts] = arg.slice(2).split("=");
args.set(key, valueParts.join("=") || "true");
}
}
return args;
}
const cliArgs = parseArgs();
/**
* Load environment files from various locations
* Returns the name of the file that was loaded, or null if none was found
*/
export function loadEnvFiles(): string | null {
const isDevelopment = process.env.NODE_ENV === "development" || process.argv[1]?.includes("tsx");
const envFileNames = isDevelopment
? [".env.local", ".env"]
: [".env"];
const envPaths = [];
for (const fileName of envFileNames) {
envPaths.push(
fileName,
path.join(__dirname, "..", "..", fileName),
path.join(process.cwd(), fileName)
);
}
for (const envPath of envPaths) {
if (fs.existsSync(envPath)) {
loadDotEnv({ path: envPath });
return path.basename(envPath);
}
}
return null;
}
/**
* Resolve transport type (stdio or http)
*/
export function resolveTransport(): { type: "stdio" | "http"; source: string } {
const transportArg = cliArgs.get("transport");
if (transportArg === "http" || transportArg === "stdio") {
return { type: transportArg, source: "CLI argument" };
}
const transportEnv = process.env.HYBRIDHUB_TRANSPORT;
if (transportEnv === "http" || transportEnv === "stdio") {
return { type: transportEnv, source: "environment variable" };
}
return { type: "stdio", source: "default" };
}
/**
* Resolve HTTP port
*/
export function resolvePort(): { port: number; source: string } {
const portArg = cliArgs.get("port");
if (portArg) {
const port = parseInt(portArg, 10);
if (!isNaN(port)) {
return { port, source: "CLI argument" };
}
}
const portEnv = process.env.HYBRIDHUB_PORT || process.env.PORT;
if (portEnv) {
const port = parseInt(portEnv, 10);
if (!isNaN(port)) {
return { port, source: "environment variable" };
}
}
return { port: 8080, source: "default" };
}
/**
* Resolve API key configuration for HTTP authentication
* Only uses HYBRIDHUB_API_KEY to avoid conflicts with other projects
*/
export function resolveApiKey(): { apiKey: string | null; source: string } {
const apiKeyArg = cliArgs.get("api-key");
if (apiKeyArg) {
return { apiKey: apiKeyArg, source: "CLI argument" };
}
// Only check HYBRIDHUB_API_KEY to avoid picking up API keys from other projects
const apiKeyEnv = process.env.HYBRIDHUB_API_KEY;
if (apiKeyEnv) {
return { apiKey: apiKeyEnv, source: "environment variable" };
}
return { apiKey: null, source: "not configured" };
}
/**
* Build a single database source configuration from environment variables
*/
function buildDatabaseSourceFromEnv(): DatabaseSourceConfig | null {
// Check for DSN first
const dsn = process.env.DB_DSN || process.env.DSN;
if (dsn) {
let dbType: DatabaseType = "postgres";
try {
const url = new URL(dsn);
const protocol = url.protocol.replace(":", "");
if (protocol === "postgresql" || protocol === "postgres") {
dbType = "postgres";
} else if (protocol === "mysql") {
dbType = "mysql";
} else if (protocol === "mariadb") {
dbType = "mariadb";
} else if (protocol === "sqlserver" || protocol === "mssql") {
dbType = "sqlserver";
} else if (protocol === "sqlite") {
dbType = "sqlite";
}
} catch {
// Fall back to type detection from env
}
return {
id: process.env.DB_ID || "default",
type: (process.env.DB_TYPE as DatabaseType) || dbType,
dsn,
readonly: process.env.DB_READONLY === "true",
max_rows: process.env.DB_MAX_ROWS ? parseInt(process.env.DB_MAX_ROWS, 10) : undefined,
connection_timeout: process.env.DB_TIMEOUT ? parseInt(process.env.DB_TIMEOUT, 10) : undefined,
};
}
// Build from individual params
const dbType = process.env.DB_TYPE as DatabaseType;
const dbHost = process.env.DB_HOST;
const dbUser = process.env.DB_USER;
const dbPassword = process.env.DB_PASSWORD;
const dbName = process.env.DB_NAME || process.env.DB_DATABASE;
// For SQLite, only type and database path are required
if (dbType === "sqlite" && dbName) {
return {
id: process.env.DB_ID || "default",
type: dbType,
database: dbName,
readonly: process.env.DB_READONLY === "true",
max_rows: process.env.DB_MAX_ROWS ? parseInt(process.env.DB_MAX_ROWS, 10) : undefined,
};
}
// For network databases
if (!dbType || !dbHost || !dbUser || !dbPassword || !dbName) {
return null;
}
return {
id: process.env.DB_ID || "default",
type: dbType,
host: dbHost,
port: process.env.DB_PORT ? parseInt(process.env.DB_PORT, 10) : undefined,
database: dbName,
user: dbUser,
password: dbPassword,
instanceName: process.env.DB_INSTANCE,
readonly: process.env.DB_READONLY === "true",
max_rows: process.env.DB_MAX_ROWS ? parseInt(process.env.DB_MAX_ROWS, 10) : undefined,
connection_timeout: process.env.DB_TIMEOUT ? parseInt(process.env.DB_TIMEOUT, 10) : undefined,
ssh_host: process.env.SSH_HOST,
ssh_port: process.env.SSH_PORT ? parseInt(process.env.SSH_PORT, 10) : undefined,
ssh_user: process.env.SSH_USER,
ssh_password: process.env.SSH_PASSWORD,
ssh_key: process.env.SSH_KEY,
ssh_passphrase: process.env.SSH_PASSPHRASE,
};
}
/**
* Build a single storage source configuration from environment variables
*/
function buildStorageSourceFromEnv(): StorageSourceConfig | null {
const type = process.env.STORAGE_TYPE as StorageProviderType;
const endpoint = process.env.STORAGE_ENDPOINT;
const accessKey = process.env.STORAGE_ACCESS_KEY;
const secretKey = process.env.STORAGE_SECRET_KEY;
if (!type || !endpoint || !accessKey || !secretKey) {
return null;
}
return {
id: process.env.STORAGE_ID || "default",
type,
endpoint,
access_key: accessKey,
secret_key: secretKey,
region: process.env.STORAGE_REGION,
default_bucket: process.env.STORAGE_DEFAULT_BUCKET,
connection_timeout: process.env.STORAGE_TIMEOUT
? parseInt(process.env.STORAGE_TIMEOUT, 10)
: undefined,
security_token: process.env.STORAGE_SECURITY_TOKEN,
ssl: process.env.STORAGE_SSL !== "false",
path_style: process.env.STORAGE_PATH_STYLE === "true",
};
}
/**
* Resolve all source configurations
* Priority: TOML config > environment variables
*/
export async function resolveSourceConfigs(): Promise<{
databases: DatabaseSourceConfig[];
storages: StorageSourceConfig[];
database_tools?: any[];
storage_tools?: any[];
source: string;
} | null> {
// 1. Check for TOML config file
const configPath = cliArgs.get("config");
const configFile = findConfigFile(configPath);
if (configFile) {
const config = loadTomlConfig(configFile);
if (config) {
validateTomlConfig(config);
return {
databases: config.databases || [],
storages: config.storages || [],
database_tools: config.database_tools,
storage_tools: config.storage_tools,
source: `TOML config: ${configFile}`,
};
}
}
// 2. Load from .env files
loadEnvFiles();
// 3. Build from environment variables
const databases: DatabaseSourceConfig[] = [];
const storages: StorageSourceConfig[] = [];
const dbSource = buildDatabaseSourceFromEnv();
if (dbSource) {
databases.push(dbSource);
}
const storageSource = buildStorageSourceFromEnv();
if (storageSource) {
storages.push(storageSource);
}
if (databases.length === 0 && storages.length === 0) {
return null;
}
return {
databases,
storages,
source: "environment variables",
};
}
/**
* Get database type from DSN or config
*/
function getDatabaseType(source: DatabaseSourceConfig): string {
if (source.type) {
return source.type;
}
if (source.dsn) {
try {
const url = new URL(source.dsn);
const protocol = url.protocol.replace(":", "");
if (protocol === "postgresql" || protocol === "postgres") return "postgres";
if (protocol === "mysql") return "mysql";
if (protocol === "mariadb") return "mariadb";
if (protocol === "sqlserver" || protocol === "mssql") return "sqlserver";
if (protocol === "sqlite") return "sqlite";
return protocol;
} catch {
return "unknown";
}
}
return "unknown";
}
/**
* Redact sensitive information from database source config for logging
*/
export function redactDatabaseSourceConfig(source: DatabaseSourceConfig): string {
const dbType = getDatabaseType(source);
const parts = [`${source.id} (${dbType})`];
if (source.dsn) {
try {
const url = new URL(source.dsn);
url.password = "****";
parts.push(url.toString());
} catch {
parts.push(source.dsn.replace(/\/\/([^:]+):([^@]+)@/, "//$1:****@"));
}
} else if (source.host) {
parts.push(`${source.host}:${source.port || "default"}/${source.database}`);
}
return parts.join(": ");
}
/**
* Redact sensitive information from storage source config for logging
*/
export function redactStorageSourceConfig(source: StorageSourceConfig): string {
const masked = {
...source,
access_key: source.access_key.substring(0, 4) + "****",
secret_key: "****",
security_token: source.security_token ? "****" : undefined,
};
return `${masked.id} (${masked.type}): ${masked.endpoint}`;
}
/**
* Get summary of all configured sources
*/
export function getSourcesSummary(
databases: DatabaseSourceConfig[],
storages: StorageSourceConfig[]
): string {
const lines: string[] = [];
if (databases.length > 0) {
lines.push(`Database sources (${databases.length}):`);
for (const db of databases) {
lines.push(` - ${redactDatabaseSourceConfig(db)}`);
}
}
if (storages.length > 0) {
lines.push(`Storage sources (${storages.length}):`);
for (const storage of storages) {
lines.push(` - ${redactStorageSourceConfig(storage)}`);
}
}
return lines.join("\n");
}