diff --git a/.gitignore b/.gitignore index 6eeeb56..3e3d452 100644 --- a/.gitignore +++ b/.gitignore @@ -28,6 +28,19 @@ pids *.seed *.pid.lock +# Node artifacts +node_modules/ +cli/node_modules/ +cli/lib/node_modules/ + +# TypeScript build output +cli/dist/ +cli/**/*.js +cli/**/*.js.map +cli/**/*.d.ts +cli/**/*.d.ts.map +!cli/jest.config.js + # Generated config files (these are created by the sources-generator) config/pgwatch-postgres/sources.yml config/pgwatch-prometheus/sources.yml diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml new file mode 100644 index 0000000..c112146 --- /dev/null +++ b/.gitlab-ci.yml @@ -0,0 +1,174 @@ +stages: + - test + +cli:smoke:test: + stage: test + image: alpine:3.20 + variables: + GIT_STRATEGY: fetch + before_script: + - apk add --no-cache bash curl git docker-cli docker-compose + script: + - bash -n ./postgres_ai + - | + set -euo pipefail + out=$(./postgres_ai help | tr -d "\r") + echo "$out" | grep -q "Postgres AI CLI" + echo "$out" | grep -q "COMMANDS:" + rules: + - if: '$CI_COMMIT_BRANCH' + +cli:e2e:dind: + stage: test + image: alpine:3.20 + services: + - name: docker:24-dind + command: ["--tls=false"] + variables: + DOCKER_HOST: tcp://docker:2375 + DOCKER_TLS_CERTDIR: "" + GIT_STRATEGY: fetch + before_script: + - apk add --no-cache bash curl git coreutils docker-cli docker-compose openssl + - docker version + script: + - set -euo pipefail + - bash -n ./postgres_ai + - ./postgres_ai check || true + - ./postgres_ai quickstart --demo -y + - timeout 60 ./postgres_ai status + - timeout 10 ./postgres_ai logs grafana || true + - ./postgres_ai config + - ./postgres_ai update-config + - ./postgres_ai list-instances || true + - ./postgres_ai add-key "test_key_123" + - ./postgres_ai show-key + - ./postgres_ai remove-key + - ./postgres_ai generate-grafana-password || true + - ./postgres_ai show-grafana-credentials || true + - ./postgres_ai add-instance "postgresql://postgres:postgres@target-db:5432/target_database" "ci-demo" + - ./postgres_ai test-instance "ci-demo" || true + - printf "y\n" | ./postgres_ai reset sink-postgres + - ./postgres_ai restart + - ./postgres_ai stop + - ./postgres_ai start + - printf "y\n" | ./postgres_ai reset + - ./postgres_ai clean + after_script: + - docker ps -a || true + - docker system prune -af || true + rules: + - if: '$CI_COMMIT_BRANCH' +cli:node:smoke: + stage: test + image: node:20-alpine + variables: + GIT_STRATEGY: fetch + before_script: + - corepack enable || true + script: + - node -v && npm -v + - npm --prefix cli install --no-audit --no-fund + - node ./cli/dist/bin/postgres-ai.js --help + - node ./cli/dist/bin/postgres-ai.js mon status --help + - node ./cli/dist/bin/postgres-ai.js mon targets list --help + - npm install -g ./cli + - echo "prefix=$(npm config get prefix)" && echo "PATH=$PATH" + - command -v postgres-ai && postgres-ai --help + - command -v pgai && pgai --help + - rm -f .pgwatch-config + - node ./cli/dist/bin/postgres-ai.js add-key "test_key_1234567890" + - node ./cli/dist/bin/postgres-ai.js show-key | grep -E "\*{2,}|[0-9]{4}$" + - test -f ~/.config/postgresai/config.json + - grep -q 'test_key' ~/.config/postgresai/config.json + - node ./cli/dist/bin/postgres-ai.js remove-key + - if grep -q 'apiKey' ~/.config/postgresai/config.json; then echo 'key not removed' && exit 1; fi + - node ./cli/dist/bin/postgres-ai.js mon targets list | head -n 1 || true + - node ./cli/dist/bin/postgres-ai.js mon targets add 'postgresql://user:pass@host:5432/db' ci-test || true + - node ./cli/dist/bin/postgres-ai.js mon targets remove ci-test || true + rules: + - if: '$CI_COMMIT_BRANCH' + +cli:node:e2e:dind: + stage: test + image: node:20-alpine + services: + - name: docker:24-dind + command: ["--tls=false"] + variables: + DOCKER_HOST: tcp://docker:2375 + DOCKER_TLS_CERTDIR: "" + GIT_STRATEGY: fetch + before_script: + - corepack enable || true + - apk add --no-cache bash docker-cli docker-compose openssl postgresql-client + - node -v && npm -v && docker version + - npm --prefix cli install --no-audit --no-fund + script: + - ./tests/e2e.cli.sh + after_script: + - docker ps -a || true + rules: + - if: '$CI_COMMIT_BRANCH' + +cli:node:full:dind: + stage: test + image: node:20-alpine + services: + - name: docker:24-dind + command: ["--tls=false"] + variables: + DOCKER_HOST: tcp://docker:2375 + DOCKER_TLS_CERTDIR: "" + GIT_STRATEGY: fetch + before_script: + - corepack enable || true + - apk add --no-cache bash git docker-cli docker-compose openssl postgresql-client + - node -v && npm -v && docker version + - npm --prefix cli install --no-audit --no-fund + script: + - echo "=== Testing quickstart (demo mode) ===" + - node ./cli/dist/bin/postgres-ai.js mon quickstart --demo + - sleep 10 + - node ./cli/dist/bin/postgres-ai.js mon status + - echo "" + - echo "=== Testing shell command ===" + - echo "SELECT 1;" | node ./cli/dist/bin/postgres-ai.js mon shell target-db || true + - echo "" + - echo "=== Testing complete workflow ===" + - node ./cli/dist/bin/postgres-ai.js mon targets add "postgresql://monitor:monitor_pass@target-db:5432/target_database" demo-test + - node ./cli/dist/bin/postgres-ai.js mon targets list + - node ./cli/dist/bin/postgres-ai.js mon targets test demo-test || true + - node ./cli/dist/bin/postgres-ai.js mon health --wait 120 + - node ./cli/dist/bin/postgres-ai.js mon show-grafana-credentials + - echo "" + - echo "=== Cleanup ===" + - node ./cli/dist/bin/postgres-ai.js mon stop + - node ./cli/dist/bin/postgres-ai.js mon clean || true + after_script: + - docker ps -a || true + rules: + - if: '$CI_COMMIT_BRANCH == "main" || $CI_COMMIT_BRANCH =~ /^feature\//' + allow_failure: false + +cli:node:integration: + stage: test + image: node:20-alpine + variables: + GIT_STRATEGY: fetch + before_script: + - corepack enable || true + - node -v && npm -v + - npm --prefix cli install --no-audit --no-fund + script: + - | + set -euo pipefail + : "${PGAI_API_KEY:?PGAI_API_KEY is required for integration tests}" + BASE_URL="${PGAI_BASE_URL:-https://v2.postgres.ai/api/general/}" + echo "Using BASE_URL=$BASE_URL" + # Placeholder: run CLI help until API-backed commands are implemented + node ./cli/dist/bin/postgres-ai.js --help + rules: + - if: '$PGAI_API_KEY' + + diff --git a/Formula/postgresai.rb b/Formula/postgresai.rb new file mode 100644 index 0000000..126260b --- /dev/null +++ b/Formula/postgresai.rb @@ -0,0 +1,24 @@ +# typed: false +# frozen_string_literal: true + +class Postgresai < Formula + desc "postgres_ai CLI (Node.js)" + homepage "https://gitlab.com/postgres-ai/postgres_ai" + url "https://registry.npmjs.org/postgresai/-/postgresai-0.11.0-alpha.8.tgz" + sha256 "" # Will be calculated after publishing to npm + license "Apache-2.0" + + depends_on "node" + + def install + system "npm", "install", *Language::Node.std_npm_install_args(libexec) + bin.install_symlink Dir["#{libexec}/bin/*"] + end + + test do + assert_match version.to_s, shell_output("#{bin}/postgres-ai --version") + assert_match "PostgresAI CLI", shell_output("#{bin}/postgres-ai --help") + assert_match version.to_s, shell_output("#{bin}/pgai --version") + end +end + diff --git a/README.md b/README.md index 2e52413..e1c5d67 100644 --- a/README.md +++ b/README.md @@ -203,6 +203,24 @@ Technical URLs (for advanced users): ./postgres_ai help ``` +### Node.js CLI (early preview) + +```bash +# run without install +node ./cli/bin/postgres-ai.js --help + +# local dev: install aliases into PATH +npm --prefix cli install --no-audit --no-fund +npm link ./cli +postgres-ai --help +pgai --help + +# or install globally after publish (planned) +# npm i -g @postgresai/cli +# postgres-ai --help +# pgai --help +``` + ## šŸ”‘ PostgresAI access token Get your access token at [PostgresAI](https://postgres.ai) for automated report uploads and advanced analysis. diff --git a/cli/README.md b/cli/README.md new file mode 100644 index 0000000..c1eeb72 --- /dev/null +++ b/cli/README.md @@ -0,0 +1,268 @@ +# PostgresAI CLI + +Command-line interface for PostgresAI monitoring and database management. + +## Installation + +### From npm + +```bash +npm install -g postgresai +``` + +Or install the latest alpha release explicitly: +```bash +npm install -g postgresai@alpha +``` + +### From Homebrew (macOS) + +```bash +# Add the PostgresAI tap +brew tap postgres-ai/tap https://gitlab.com/postgres-ai/homebrew-tap.git + +# Install postgresai +brew install postgresai +``` + +## Usage + +The CLI provides three command aliases: +```bash +postgres-ai --help +postgresai --help +pgai --help # short alias +``` + +## Quick start + +### Authentication + +Authenticate via browser to obtain API key: +```bash +pgai auth +``` + +This will: +- Open your browser for authentication +- Prompt you to select an organization +- Automatically save your API key to `~/.config/postgresai/config.json` + +### Start monitoring + +Start monitoring with demo database: +```bash +postgres-ai mon quickstart --demo +``` + +Start monitoring with your own database: +```bash +postgres-ai mon quickstart --db-url postgresql://user:pass@host:5432/db +``` + +Complete automated setup with API key and database: +```bash +postgres-ai mon quickstart --api-key your_key --db-url postgresql://user:pass@host:5432/db -y +``` + +This will: +- Configure API key for automated report uploads (if provided) +- Add PostgreSQL instance to monitor (if provided) +- Generate secure Grafana password +- Start all monitoring services +- Open Grafana at http://localhost:3000 + +## Commands + +### Monitoring services management (`mon` group) + +#### Service lifecycle +```bash +# Complete setup with various options +postgres-ai mon quickstart # Interactive setup for production +postgres-ai mon quickstart --demo # Demo mode with sample database +postgres-ai mon quickstart --api-key # Setup with API key +postgres-ai mon quickstart --db-url # Setup with database URL +postgres-ai mon quickstart --api-key --db-url # Complete automated setup +postgres-ai mon quickstart -y # Auto-accept all defaults + +# Service management +postgres-ai mon start # Start monitoring services +postgres-ai mon stop # Stop monitoring services +postgres-ai mon restart [service] # Restart all or specific monitoring service +postgres-ai mon status # Show monitoring services status +postgres-ai mon health [--wait ] # Check monitoring services health +``` + +##### Quickstart options +- `--demo` - Demo mode with sample database (testing only, cannot use with --api-key) +- `--api-key ` - Postgres AI API key for automated report uploads +- `--db-url ` - PostgreSQL connection URL to monitor (format: `postgresql://user:pass@host:port/db`) +- `-y, --yes` - Accept all defaults and skip interactive prompts + +#### Monitoring target databases (`mon targets` subgroup) +```bash +postgres-ai mon targets list # List databases to monitor +postgres-ai mon targets add # Add database to monitor +postgres-ai mon targets remove # Remove monitoring target +postgres-ai mon targets test # Test target connectivity +``` + +#### Configuration and maintenance +```bash +postgres-ai mon config # Show monitoring configuration +postgres-ai mon update-config # Apply configuration changes +postgres-ai mon update # Update monitoring stack +postgres-ai mon reset [service] # Reset service data +postgres-ai mon clean # Cleanup artifacts +postgres-ai mon check # System readiness check +postgres-ai mon shell # Open shell to monitoring service +``` + +### MCP server (`mcp` group) + +```bash +pgai mcp start # Start MCP stdio server exposing tools +``` + +Cursor configuration example (Settings → MCP): + +```json +{ + "mcpServers": { + "PostgresAI": { + "command": "pgai", + "args": ["mcp", "start"], + "env": { + "PGAI_API_BASE_URL": "https://postgres.ai/api/general/" + } + } + } +} +``` + +Tools exposed: +- list_issues: returns the same JSON as `pgai issues list`. +- view_issue: view a single issue with its comments (args: { issue_id, debug? }) +- post_issue_comment: post a comment (args: { issue_id, content, parent_comment_id?, debug? }) + +### Issues management (`issues` group) + +```bash +pgai issues list # List issues (shows: id, title, status, created_at) +pgai issues view # View issue details and comments +pgai issues post_comment # Post a comment to an issue +# Options: +# --parent Parent comment ID (for replies) +# --debug Enable debug output +# --json Output raw JSON (overrides default YAML) +``` + +#### Output format for issues commands + +By default, issues commands print human-friendly YAML when writing to a terminal. For scripting, you can: + +- Use `--json` to force JSON output: + +```bash +pgai issues list --json | jq '.[] | {id, title}' +``` + +- Rely on auto-detection: when stdout is not a TTY (e.g., piped or redirected), output is JSON automatically: + +```bash +pgai issues view > issue.json +``` + +#### Grafana management +```bash +postgres-ai mon generate-grafana-password # Generate new Grafana password +postgres-ai mon show-grafana-credentials # Show Grafana credentials +``` + +### Authentication and API key management +```bash +postgres-ai auth # Authenticate via browser (recommended) +postgres-ai add-key # Manually store API key +postgres-ai show-key # Show stored key (masked) +postgres-ai remove-key # Remove stored key +``` + +## Configuration + +The CLI stores configuration in `~/.config/postgresai/config.json` including: +- API key +- Base URL +- Organization ID + +### Configuration priority + +API key resolution order: +1. Command line option (`--api-key`) +2. Environment variable (`PGAI_API_KEY`) +3. User config file (`~/.config/postgresai/config.json`) +4. Legacy project config (`.pgwatch-config`) + +Base URL resolution order: +- API base URL (`apiBaseUrl`): + 1. Command line option (`--api-base-url`) + 2. Environment variable (`PGAI_API_BASE_URL`) + 3. User config file `baseUrl` (`~/.config/postgresai/config.json`) + 4. Default: `https://postgres.ai/api/general/` +- UI base URL (`uiBaseUrl`): + 1. Command line option (`--ui-base-url`) + 2. Environment variable (`PGAI_UI_BASE_URL`) + 3. Default: `https://console.postgres.ai` + +Normalization: +- A single trailing `/` is removed to ensure consistent path joining. + +### Environment variables + +- `PGAI_API_KEY` - API key for PostgresAI services +- `PGAI_API_BASE_URL` - API endpoint for backend RPC (default: `https://postgres.ai/api/general/`) +- `PGAI_UI_BASE_URL` - UI endpoint for browser routes (default: `https://console.postgres.ai`) + +### CLI options + +- `--api-base-url ` - overrides `PGAI_API_BASE_URL` +- `--ui-base-url ` - overrides `PGAI_UI_BASE_URL` + +### Examples + +Linux/macOS (bash/zsh): + +```bash +export PGAI_API_BASE_URL=https://v2.postgres.ai/api/general/ +export PGAI_UI_BASE_URL=https://console-dev.postgres.ai +pgai auth --debug +``` + +Windows PowerShell: + +```powershell +$env:PGAI_API_BASE_URL = "https://v2.postgres.ai/api/general/" +$env:PGAI_UI_BASE_URL = "https://console-dev.postgres.ai" +pgai auth --debug +``` + +Via CLI options (overrides env): + +```bash +pgai auth --debug \ + --api-base-url https://v2.postgres.ai/api/general/ \ + --ui-base-url https://console-dev.postgres.ai +``` + +Notes: +- If `PGAI_UI_BASE_URL` is not set, the default is `https://console.postgres.ai`. + +## Requirements + +- Node.js 18 or higher +- Docker and Docker Compose + +## Learn more + +- Documentation: https://postgres.ai/docs +- Issues: https://gitlab.com/postgres-ai/postgres_ai/-/issues diff --git a/cli/bin/postgres-ai.ts b/cli/bin/postgres-ai.ts new file mode 100644 index 0000000..a408d50 --- /dev/null +++ b/cli/bin/postgres-ai.ts @@ -0,0 +1,1771 @@ +#!/usr/bin/env node + +import { Command } from "commander"; +import * as pkg from "../package.json"; +import * as config from "../lib/config"; +import * as yaml from "js-yaml"; +import * as fs from "fs"; +import * as path from "path"; +import * as os from "os"; +import { spawn, spawnSync, exec, execFile } from "child_process"; +import { promisify } from "util"; +import * as readline from "readline"; +import * as http from "https"; +import { URL } from "url"; +import { startMcpServer } from "../lib/mcp-server"; +import { fetchIssues, fetchIssueComments, createIssueComment, fetchIssue } from "../lib/issues"; +import { resolveBaseUrls } from "../lib/util"; + +const execPromise = promisify(exec); +const execFilePromise = promisify(execFile); + +/** + * CLI configuration options + */ +interface CliOptions { + apiKey?: string; + apiBaseUrl?: string; + uiBaseUrl?: string; +} + +/** + * Configuration result + */ +interface ConfigResult { + apiKey: string; +} + +/** + * Instance configuration + */ +interface Instance { + name: string; + conn_str?: string; + preset_metrics?: string; + custom_metrics?: any; + is_enabled?: boolean; + group?: string; + custom_tags?: Record; +} + +/** + * Path resolution result + */ +interface PathResolution { + fs: typeof fs; + path: typeof path; + projectDir: string; + composeFile: string; + instancesFile: string; +} + +/** + * Get configuration from various sources + * @param opts - Command line options + * @returns Configuration object + */ +function getConfig(opts: CliOptions): ConfigResult { + // Priority order: + // 1. Command line option (--api-key) + // 2. Environment variable (PGAI_API_KEY) + // 3. User-level config file (~/.config/postgresai/config.json) + // 4. Legacy project-local config (.pgwatch-config) + + let apiKey = opts.apiKey || process.env.PGAI_API_KEY || ""; + + // Try config file if not provided via CLI or env + if (!apiKey) { + const fileConfig = config.readConfig(); + if (!apiKey) apiKey = fileConfig.apiKey || ""; + } + + return { apiKey }; +} + +// Human-friendly output helper: YAML for TTY by default, JSON when --json or non-TTY +function printResult(result: unknown, json?: boolean): void { + if (typeof result === "string") { + process.stdout.write(result); + if (!/\n$/.test(result)) console.log(); + return; + } + if (json || !process.stdout.isTTY) { + console.log(JSON.stringify(result, null, 2)); + } else { + let text = yaml.dump(result as any); + if (Array.isArray(result)) { + text = text.replace(/\n- /g, "\n\n- "); + } + console.log(text); + } +} + +const program = new Command(); + +program + .name("postgres-ai") + .description("PostgresAI CLI") + .version(pkg.version) + .option("--api-key ", "API key (overrides PGAI_API_KEY)") + .option( + "--api-base-url ", + "API base URL for backend RPC (overrides PGAI_API_BASE_URL)" + ) + .option( + "--ui-base-url ", + "UI base URL for browser routes (overrides PGAI_UI_BASE_URL)" + ); + +/** + * Stub function for not implemented commands + */ +const stub = (name: string) => async (): Promise => { + // Temporary stubs until Node parity is implemented + console.error(`${name}: not implemented in Node CLI yet; use bash CLI for now`); + process.exitCode = 2; +}; + +/** + * Resolve project paths + */ +function resolvePaths(): PathResolution { + const startDir = process.cwd(); + let currentDir = startDir; + + while (true) { + const composeFile = path.resolve(currentDir, "docker-compose.yml"); + if (fs.existsSync(composeFile)) { + const instancesFile = path.resolve(currentDir, "instances.yml"); + return { fs, path, projectDir: currentDir, composeFile, instancesFile }; + } + + const parentDir = path.dirname(currentDir); + if (parentDir === currentDir) break; + currentDir = parentDir; + } + + throw new Error( + `docker-compose.yml not found. Run monitoring commands from the PostgresAI project directory or one of its subdirectories (starting search from ${startDir}).` + ); +} + +/** + * Check if Docker daemon is running + */ +function isDockerRunning(): boolean { + try { + const result = spawnSync("docker", ["info"], { stdio: "pipe" }); + return result.status === 0; + } catch { + return false; + } +} + +/** + * Get docker compose command + */ +function getComposeCmd(): string[] | null { + const tryCmd = (cmd: string, args: string[]): boolean => + spawnSync(cmd, args, { stdio: "ignore" }).status === 0; + if (tryCmd("docker-compose", ["version"])) return ["docker-compose"]; + if (tryCmd("docker", ["compose", "version"])) return ["docker", "compose"]; + return null; +} + +/** + * Check if monitoring containers are already running + */ +function checkRunningContainers(): { running: boolean; containers: string[] } { + try { + const result = spawnSync( + "docker", + ["ps", "--filter", "name=grafana-with-datasources", "--filter", "name=pgwatch", "--format", "{{.Names}}"], + { stdio: "pipe", encoding: "utf8" } + ); + + if (result.status === 0 && result.stdout) { + const containers = result.stdout.trim().split("\n").filter(Boolean); + return { running: containers.length > 0, containers }; + } + return { running: false, containers: [] }; + } catch { + return { running: false, containers: [] }; + } +} + +/** + * Run docker compose command + */ +async function runCompose(args: string[]): Promise { + let composeFile: string; + let projectDir: string; + try { + ({ composeFile, projectDir } = resolvePaths()); + } catch (error) { + const message = error instanceof Error ? error.message : String(error); + console.error(message); + process.exitCode = 1; + return 1; + } + + // Check if Docker daemon is running + if (!isDockerRunning()) { + console.error("Docker is not running. Please start Docker and try again"); + process.exitCode = 1; + return 1; + } + + const cmd = getComposeCmd(); + if (!cmd) { + console.error("docker compose not found (need docker-compose or docker compose)"); + process.exitCode = 1; + return 1; + } + + // Read Grafana password from .pgwatch-config and pass to Docker Compose + const env = { ...process.env }; + const cfgPath = path.resolve(projectDir, ".pgwatch-config"); + if (fs.existsSync(cfgPath)) { + try { + const stats = fs.statSync(cfgPath); + if (!stats.isDirectory()) { + const content = fs.readFileSync(cfgPath, "utf8"); + const match = content.match(/^grafana_password=([^\r\n]+)/m); + if (match) { + env.GF_SECURITY_ADMIN_PASSWORD = match[1].trim(); + } + } + } catch (err) { + // If we can't read the config, continue without setting the password + } + } + + return new Promise((resolve) => { + const child = spawn(cmd[0], [...cmd.slice(1), "-f", composeFile, ...args], { + stdio: "inherit", + env: env + }); + child.on("close", (code) => resolve(code || 0)); + }); +} + +program.command("help", { isDefault: true }).description("show help").action(() => { + program.outputHelp(); +}); + +// Monitoring services management +const mon = program.command("mon").description("monitoring services management"); + +mon + .command("quickstart") + .description("complete setup (generate config, start monitoring services)") + .option("--demo", "demo mode with sample database", false) + .option("--api-key ", "Postgres AI API key for automated report uploads") + .option("--db-url ", "PostgreSQL connection URL to monitor") + .option("-y, --yes", "accept all defaults and skip interactive prompts", false) + .action(async (opts: { demo: boolean; apiKey?: string; dbUrl?: string; yes: boolean }) => { + console.log("\n================================="); + console.log(" PostgresAI Monitoring Quickstart"); + console.log("=================================\n"); + console.log("This will install, configure, and start the monitoring system\n"); + + // Validate conflicting options + if (opts.demo && opts.dbUrl) { + console.log("⚠ Both --demo and --db-url provided. Demo mode includes its own database."); + console.log("⚠ The --db-url will be ignored in demo mode.\n"); + opts.dbUrl = undefined; + } + + if (opts.demo && opts.apiKey) { + console.error("āœ— Cannot use --api-key with --demo mode"); + console.error("āœ— Demo mode is for testing only and does not support API key integration"); + console.error("\nUse demo mode without API key: postgres-ai mon quickstart --demo"); + console.error("Or use production mode with API key: postgres-ai mon quickstart --api-key=your_key"); + process.exitCode = 1; + return; + } + + // Check if containers are already running + const { running, containers } = checkRunningContainers(); + if (running) { + console.log(`⚠ Monitoring services are already running: ${containers.join(", ")}`); + console.log("Use 'postgres-ai mon restart' to restart them\n"); + return; + } + + // Step 1: API key configuration (only in production mode) + if (!opts.demo) { + console.log("Step 1: Postgres AI API Configuration (Optional)"); + console.log("An API key enables automatic upload of PostgreSQL reports to Postgres AI\n"); + + if (opts.apiKey) { + console.log("Using API key provided via --api-key parameter"); + config.writeConfig({ apiKey: opts.apiKey }); + console.log("āœ“ API key saved\n"); + } else if (opts.yes) { + // Auto-yes mode without API key - skip API key setup + console.log("Auto-yes mode: no API key provided, skipping API key setup"); + console.log("⚠ Reports will be generated locally only"); + console.log("You can add an API key later with: postgres-ai add-key \n"); + } else { + const rl = readline.createInterface({ + input: process.stdin, + output: process.stdout + }); + + const question = (prompt: string): Promise => + new Promise((resolve) => rl.question(prompt, resolve)); + + try { + const answer = await question("Do you have a Postgres AI API key? (Y/n): "); + const proceedWithApiKey = !answer || answer.toLowerCase() === "y"; + + if (proceedWithApiKey) { + while (true) { + const inputApiKey = await question("Enter your Postgres AI API key: "); + const trimmedKey = inputApiKey.trim(); + + if (trimmedKey) { + config.writeConfig({ apiKey: trimmedKey }); + console.log("āœ“ API key saved\n"); + break; + } + + console.log("⚠ API key cannot be empty"); + const retry = await question("Try again or skip API key setup, retry? (Y/n): "); + if (retry.toLowerCase() === "n") { + console.log("⚠ Skipping API key setup - reports will be generated locally only"); + console.log("You can add an API key later with: postgres-ai add-key \n"); + break; + } + } + } else { + console.log("⚠ Skipping API key setup - reports will be generated locally only"); + console.log("You can add an API key later with: postgres-ai add-key \n"); + } + } finally { + rl.close(); + } + } + } else { + console.log("Step 1: Demo mode - API key configuration skipped"); + console.log("Demo mode is for testing only and does not support API key integration\n"); + } + + // Step 2: Add PostgreSQL instance (if not demo mode) + if (!opts.demo) { + console.log("Step 2: Add PostgreSQL Instance to Monitor\n"); + + // Clear instances.yml in production mode (start fresh) + const instancesPath = path.resolve(process.cwd(), "instances.yml"); + const emptyInstancesContent = "# PostgreSQL instances to monitor\n# Add your instances using: postgres-ai mon targets add\n\n"; + fs.writeFileSync(instancesPath, emptyInstancesContent, "utf8"); + + if (opts.dbUrl) { + console.log("Using database URL provided via --db-url parameter"); + console.log(`Adding PostgreSQL instance from: ${opts.dbUrl}\n`); + + const match = opts.dbUrl.match(/^postgresql:\/\/[^@]+@([^:/]+)/); + const autoInstanceName = match ? match[1] : "db-instance"; + + const connStr = opts.dbUrl; + const m = connStr.match(/^postgresql:\/\/([^:]+):([^@]+)@([^:\/]+)(?::(\d+))?\/(.+)$/); + + if (!m) { + console.error("āœ— Invalid connection string format"); + process.exitCode = 1; + return; + } + + const host = m[3]; + const db = m[5]; + const instanceName = `${host}-${db}`.replace(/[^a-zA-Z0-9-]/g, "-"); + + const body = `- name: ${instanceName}\n conn_str: ${connStr}\n preset_metrics: full\n custom_metrics:\n is_enabled: true\n group: default\n custom_tags:\n env: production\n cluster: default\n node_name: ${instanceName}\n sink_type: ~sink_type~\n`; + fs.appendFileSync(instancesPath, body, "utf8"); + console.log(`āœ“ Monitoring target '${instanceName}' added\n`); + + // Test connection + console.log("Testing connection to the added instance..."); + try { + const { Client } = require("pg"); + const client = new Client({ connectionString: connStr }); + await client.connect(); + const result = await client.query("select version();"); + console.log("āœ“ Connection successful"); + console.log(`${result.rows[0].version}\n`); + await client.end(); + } catch (error) { + const message = error instanceof Error ? error.message : String(error); + console.error(`āœ— Connection failed: ${message}\n`); + } + } else if (opts.yes) { + // Auto-yes mode without database URL - skip database setup + console.log("Auto-yes mode: no database URL provided, skipping database setup"); + console.log("⚠ No PostgreSQL instance added"); + console.log("You can add one later with: postgres-ai mon targets add\n"); + } else { + const rl = readline.createInterface({ + input: process.stdin, + output: process.stdout + }); + + const question = (prompt: string): Promise => + new Promise((resolve) => rl.question(prompt, resolve)); + + try { + console.log("You need to add at least one PostgreSQL instance to monitor"); + const answer = await question("Do you want to add a PostgreSQL instance now? (Y/n): "); + const proceedWithInstance = !answer || answer.toLowerCase() === "y"; + + if (proceedWithInstance) { + console.log("\nYou can provide either:"); + console.log(" 1. A full connection string: postgresql://user:pass@host:port/database"); + console.log(" 2. Press Enter to skip for now\n"); + + const connStr = await question("Enter connection string (or press Enter to skip): "); + + if (connStr.trim()) { + const m = connStr.match(/^postgresql:\/\/([^:]+):([^@]+)@([^:\/]+)(?::(\d+))?\/(.+)$/); + if (!m) { + console.error("āœ— Invalid connection string format"); + console.log("⚠ Continuing without adding instance\n"); + } else { + const host = m[3]; + const db = m[5]; + const instanceName = `${host}-${db}`.replace(/[^a-zA-Z0-9-]/g, "-"); + + const body = `- name: ${instanceName}\n conn_str: ${connStr}\n preset_metrics: full\n custom_metrics:\n is_enabled: true\n group: default\n custom_tags:\n env: production\n cluster: default\n node_name: ${instanceName}\n sink_type: ~sink_type~\n`; + fs.appendFileSync(instancesPath, body, "utf8"); + console.log(`āœ“ Monitoring target '${instanceName}' added\n`); + + // Test connection + console.log("Testing connection to the added instance..."); + try { + const { Client } = require("pg"); + const client = new Client({ connectionString: connStr }); + await client.connect(); + const result = await client.query("select version();"); + console.log("āœ“ Connection successful"); + console.log(`${result.rows[0].version}\n`); + await client.end(); + } catch (error) { + const message = error instanceof Error ? error.message : String(error); + console.error(`āœ— Connection failed: ${message}\n`); + } + } + } else { + console.log("⚠ No PostgreSQL instance added - you can add one later with: postgres-ai mon targets add\n"); + } + } else { + console.log("⚠ No PostgreSQL instance added - you can add one later with: postgres-ai mon targets add\n"); + } + } finally { + rl.close(); + } + } + } else { + console.log("Step 2: Demo mode enabled - using included demo PostgreSQL database\n"); + } + + // Step 3: Update configuration + console.log(opts.demo ? "Step 3: Updating configuration..." : "Step 3: Updating configuration..."); + const code1 = await runCompose(["run", "--rm", "sources-generator"]); + if (code1 !== 0) { + process.exitCode = code1; + return; + } + console.log("āœ“ Configuration updated\n"); + + // Step 4: Ensure Grafana password is configured + console.log(opts.demo ? "Step 4: Configuring Grafana security..." : "Step 4: Configuring Grafana security..."); + const cfgPath = path.resolve(process.cwd(), ".pgwatch-config"); + let grafanaPassword = ""; + + try { + if (fs.existsSync(cfgPath)) { + const stats = fs.statSync(cfgPath); + if (!stats.isDirectory()) { + const content = fs.readFileSync(cfgPath, "utf8"); + const match = content.match(/^grafana_password=([^\r\n]+)/m); + if (match) { + grafanaPassword = match[1].trim(); + } + } + } + + if (!grafanaPassword) { + console.log("Generating secure Grafana password..."); + const { stdout: password } = await execPromise("openssl rand -base64 12 | tr -d '\n'"); + grafanaPassword = password.trim(); + + let configContent = ""; + if (fs.existsSync(cfgPath)) { + const stats = fs.statSync(cfgPath); + if (!stats.isDirectory()) { + configContent = fs.readFileSync(cfgPath, "utf8"); + } + } + + const lines = configContent.split(/\r?\n/).filter((l) => !/^grafana_password=/.test(l)); + lines.push(`grafana_password=${grafanaPassword}`); + fs.writeFileSync(cfgPath, lines.filter(Boolean).join("\n") + "\n", "utf8"); + } + + console.log("āœ“ Grafana password configured\n"); + } catch (error) { + console.log("⚠ Could not generate Grafana password automatically"); + console.log("Using default password: demo\n"); + grafanaPassword = "demo"; + } + + // Step 5: Start services + console.log(opts.demo ? "Step 5: Starting monitoring services..." : "Step 5: Starting monitoring services..."); + const code2 = await runCompose(["up", "-d", "--force-recreate"]); + if (code2 !== 0) { + process.exitCode = code2; + return; + } + console.log("āœ“ Services started\n"); + + // Final summary + console.log("================================="); + console.log(" šŸŽ‰ Quickstart setup completed!"); + console.log("=================================\n"); + + console.log("What's running:"); + if (opts.demo) { + console.log(" āœ… Demo PostgreSQL database (monitoring target)"); + } + console.log(" āœ… PostgreSQL monitoring infrastructure"); + console.log(" āœ… Grafana dashboards (with secure password)"); + console.log(" āœ… Prometheus metrics storage"); + console.log(" āœ… Flask API backend"); + console.log(" āœ… Automated report generation (every 24h)"); + console.log(" āœ… Host stats monitoring (CPU, memory, disk, I/O)\n"); + + if (!opts.demo) { + console.log("Next steps:"); + console.log(" • Add more PostgreSQL instances: postgres-ai mon targets add"); + console.log(" • View configured instances: postgres-ai mon targets list"); + console.log(" • Check service health: postgres-ai mon health\n"); + } else { + console.log("Demo mode next steps:"); + console.log(" • Explore Grafana dashboards at http://localhost:3000"); + console.log(" • Connect to demo database: postgresql://postgres:postgres@localhost:55432/target_database"); + console.log(" • Generate some load on the demo database to see metrics\n"); + } + + console.log("━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━"); + console.log("šŸš€ MAIN ACCESS POINT - Start here:"); + console.log(" Grafana Dashboard: http://localhost:3000"); + console.log(` Login: monitor / ${grafanaPassword}`); + console.log("━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\n"); + }); + +mon + .command("start") + .description("start monitoring services") + .action(async () => { + // Check if containers are already running + const { running, containers } = checkRunningContainers(); + if (running) { + console.log(`Monitoring services are already running: ${containers.join(", ")}`); + console.log("Use 'postgres-ai mon restart' to restart them"); + return; + } + + const code = await runCompose(["up", "-d"]); + if (code !== 0) process.exitCode = code; + }); + +mon + .command("stop") + .description("stop monitoring services") + .action(async () => { + const code = await runCompose(["down"]); + if (code !== 0) process.exitCode = code; + }); + +mon + .command("restart [service]") + .description("restart all monitoring services or specific service") + .action(async (service?: string) => { + const args = ["restart"]; + if (service) args.push(service); + const code = await runCompose(args); + if (code !== 0) process.exitCode = code; + }); + +mon + .command("status") + .description("show monitoring services status") + .action(async () => { + const code = await runCompose(["ps"]); + if (code !== 0) process.exitCode = code; + }); + +mon + .command("logs [service]") + .option("-f, --follow", "follow logs", false) + .option("--tail ", "number of lines to show from the end of logs", "all") + .description("show logs for all or specific monitoring service") + .action(async (service: string | undefined, opts: { follow: boolean; tail: string }) => { + const args: string[] = ["logs"]; + if (opts.follow) args.push("-f"); + if (opts.tail) args.push("--tail", opts.tail); + if (service) args.push(service); + const code = await runCompose(args); + if (code !== 0) process.exitCode = code; + }); +mon + .command("health") + .description("health check for monitoring services") + .option("--wait ", "wait time in seconds for services to become healthy", parseInt, 0) + .action(async (opts: { wait: number }) => { + const services = [ + { name: "Grafana", container: "grafana-with-datasources" }, + { name: "Prometheus", container: "sink-prometheus" }, + { name: "PGWatch (Postgres)", container: "pgwatch-postgres" }, + { name: "PGWatch (Prometheus)", container: "pgwatch-prometheus" }, + { name: "Target DB", container: "target-db" }, + { name: "Sink Postgres", container: "sink-postgres" }, + ]; + + const waitTime = opts.wait || 0; + const maxAttempts = waitTime > 0 ? Math.ceil(waitTime / 5) : 1; + + console.log("Checking service health...\n"); + + let allHealthy = false; + for (let attempt = 1; attempt <= maxAttempts; attempt++) { + if (attempt > 1) { + console.log(`Retrying (attempt ${attempt}/${maxAttempts})...\n`); + await new Promise(resolve => setTimeout(resolve, 5000)); + } + + allHealthy = true; + for (const service of services) { + try { + const { execSync } = require("child_process"); + const status = execSync(`docker inspect -f '{{.State.Status}}' ${service.container} 2>/dev/null`, { + encoding: 'utf8', + stdio: ['pipe', 'pipe', 'pipe'] + }).trim(); + + if (status === 'running') { + console.log(`āœ“ ${service.name}: healthy`); + } else { + console.log(`āœ— ${service.name}: unhealthy (status: ${status})`); + allHealthy = false; + } + } catch (error) { + console.log(`āœ— ${service.name}: unreachable`); + allHealthy = false; + } + } + + if (allHealthy) { + break; + } + } + + console.log(""); + if (allHealthy) { + console.log("All services are healthy"); + } else { + console.log("Some services are unhealthy"); + process.exitCode = 1; + } + }); +mon + .command("config") + .description("show monitoring services configuration") + .action(async () => { + let projectDir: string; + let composeFile: string; + let instancesFile: string; + try { + ({ projectDir, composeFile, instancesFile } = resolvePaths()); + } catch (error) { + const message = error instanceof Error ? error.message : String(error); + console.error(message); + process.exitCode = 1; + return; + } + console.log(`Project Directory: ${projectDir}`); + console.log(`Docker Compose File: ${composeFile}`); + console.log(`Instances File: ${instancesFile}`); + if (fs.existsSync(instancesFile)) { + console.log("\nInstances configuration:\n"); + const text = fs.readFileSync(instancesFile, "utf8"); + process.stdout.write(text); + if (!/\n$/.test(text)) console.log(); + } + }); +mon + .command("update-config") + .description("apply monitoring services configuration (generate sources)") + .action(async () => { + const code = await runCompose(["run", "--rm", "sources-generator"]); + if (code !== 0) process.exitCode = code; + }); +mon + .command("update") + .description("update monitoring stack") + .action(async () => { + console.log("Updating PostgresAI monitoring stack...\n"); + + try { + // Check if we're in a git repo + const gitDir = path.resolve(process.cwd(), ".git"); + if (!fs.existsSync(gitDir)) { + console.error("Not a git repository. Cannot update."); + process.exitCode = 1; + return; + } + + // Fetch latest changes + console.log("Fetching latest changes..."); + await execPromise("git fetch origin"); + + // Check current branch + const { stdout: branch } = await execPromise("git rev-parse --abbrev-ref HEAD"); + const currentBranch = branch.trim(); + console.log(`Current branch: ${currentBranch}`); + + // Pull latest changes + console.log("Pulling latest changes..."); + const { stdout: pullOut } = await execPromise("git pull origin " + currentBranch); + console.log(pullOut); + + // Update Docker images + console.log("\nUpdating Docker images..."); + const code = await runCompose(["pull"]); + + if (code === 0) { + console.log("\nāœ“ Update completed successfully"); + console.log("\nTo apply updates, restart monitoring services:"); + console.log(" postgres-ai mon restart"); + } else { + console.error("\nāœ— Docker image update failed"); + process.exitCode = 1; + } + } catch (error) { + const message = error instanceof Error ? error.message : String(error); + console.error(`Update failed: ${message}`); + process.exitCode = 1; + } + }); +mon + .command("reset [service]") + .description("reset all or specific monitoring service") + .action(async (service?: string) => { + const rl = readline.createInterface({ + input: process.stdin, + output: process.stdout, + }); + + const question = (prompt: string): Promise => + new Promise((resolve) => rl.question(prompt, resolve)); + + try { + if (service) { + // Reset specific service + console.log(`\nThis will stop '${service}', remove its volume, and restart it.`); + console.log("All data for this service will be lost!\n"); + + const answer = await question("Continue? (y/N): "); + if (answer.toLowerCase() !== "y") { + console.log("Cancelled"); + rl.close(); + return; + } + + console.log(`\nStopping ${service}...`); + await runCompose(["stop", service]); + + console.log(`Removing volume for ${service}...`); + await runCompose(["rm", "-f", "-v", service]); + + console.log(`Restarting ${service}...`); + const code = await runCompose(["up", "-d", service]); + + if (code === 0) { + console.log(`\nāœ“ Service '${service}' has been reset`); + } else { + console.error(`\nāœ— Failed to restart '${service}'`); + process.exitCode = 1; + } + } else { + // Reset all services + console.log("\nThis will stop all services and remove all data!"); + console.log("Volumes, networks, and containers will be deleted.\n"); + + const answer = await question("Continue? (y/N): "); + if (answer.toLowerCase() !== "y") { + console.log("Cancelled"); + rl.close(); + return; + } + + console.log("\nStopping services and removing data..."); + const downCode = await runCompose(["down", "-v"]); + + if (downCode === 0) { + console.log("āœ“ Environment reset completed - all containers and data removed"); + } else { + console.error("āœ— Reset failed"); + process.exitCode = 1; + } + } + + rl.close(); + } catch (error) { + rl.close(); + const message = error instanceof Error ? error.message : String(error); + console.error(`Reset failed: ${message}`); + process.exitCode = 1; + } + }); +mon + .command("clean") + .description("cleanup monitoring services artifacts") + .action(async () => { + console.log("Cleaning up Docker resources...\n"); + + try { + // Remove stopped containers + const { stdout: containers } = await execFilePromise("docker", ["ps", "-aq", "--filter", "status=exited"]); + if (containers.trim()) { + const containerIds = containers.trim().split('\n'); + await execFilePromise("docker", ["rm", ...containerIds]); + console.log("āœ“ Removed stopped containers"); + } else { + console.log("āœ“ No stopped containers to remove"); + } + + // Remove unused volumes + await execFilePromise("docker", ["volume", "prune", "-f"]); + console.log("āœ“ Removed unused volumes"); + + // Remove unused networks + await execFilePromise("docker", ["network", "prune", "-f"]); + console.log("āœ“ Removed unused networks"); + + // Remove dangling images + await execFilePromise("docker", ["image", "prune", "-f"]); + console.log("āœ“ Removed dangling images"); + + console.log("\nCleanup completed"); + } catch (error) { + const message = error instanceof Error ? error.message : String(error); + console.error(`Error during cleanup: ${message}`); + process.exitCode = 1; + } + }); +mon + .command("shell ") + .description("open shell to monitoring service") + .action(async (service: string) => { + const code = await runCompose(["exec", service, "/bin/sh"]); + if (code !== 0) process.exitCode = code; + }); +mon + .command("check") + .description("monitoring services system readiness check") + .action(async () => { + const code = await runCompose(["ps"]); + if (code !== 0) process.exitCode = code; + }); + +// Monitoring targets (databases to monitor) +const targets = mon.command("targets").description("manage databases to monitor"); + +targets + .command("list") + .description("list monitoring target databases") + .action(async () => { + const instancesPath = path.resolve(process.cwd(), "instances.yml"); + if (!fs.existsSync(instancesPath)) { + console.error(`instances.yml not found in ${process.cwd()}`); + process.exitCode = 1; + return; + } + + try { + const content = fs.readFileSync(instancesPath, "utf8"); + const instances = yaml.load(content) as Instance[] | null; + + if (!instances || !Array.isArray(instances) || instances.length === 0) { + console.log("No monitoring targets configured"); + console.log(""); + console.log("To add a monitoring target:"); + console.log(" postgres-ai mon targets add "); + console.log(""); + console.log("Example:"); + console.log(" postgres-ai mon targets add 'postgresql://user:pass@host:5432/db' my-db"); + return; + } + + // Filter out disabled instances (e.g., demo placeholders) + const filtered = instances.filter((inst) => inst.name && inst.is_enabled !== false); + + if (filtered.length === 0) { + console.log("No monitoring targets configured"); + console.log(""); + console.log("To add a monitoring target:"); + console.log(" postgres-ai mon targets add "); + console.log(""); + console.log("Example:"); + console.log(" postgres-ai mon targets add 'postgresql://user:pass@host:5432/db' my-db"); + return; + } + + for (const inst of filtered) { + console.log(`Target: ${inst.name}`); + } + } catch (err) { + const message = err instanceof Error ? err.message : String(err); + console.error(`Error parsing instances.yml: ${message}`); + process.exitCode = 1; + } + }); +targets + .command("add [connStr] [name]") + .description("add monitoring target database") + .action(async (connStr?: string, name?: string) => { + const file = path.resolve(process.cwd(), "instances.yml"); + if (!connStr) { + console.error("Connection string required: postgresql://user:pass@host:port/db"); + process.exitCode = 1; + return; + } + const m = connStr.match(/^postgresql:\/\/([^:]+):([^@]+)@([^:\/]+)(?::(\d+))?\/(.+)$/); + if (!m) { + console.error("Invalid connection string format"); + process.exitCode = 1; + return; + } + const host = m[3]; + const db = m[5]; + const instanceName = name && name.trim() ? name.trim() : `${host}-${db}`.replace(/[^a-zA-Z0-9-]/g, "-"); + + // Check if instance already exists + try { + if (fs.existsSync(file)) { + const content = fs.readFileSync(file, "utf8"); + const instances = yaml.load(content) as Instance[] | null || []; + if (Array.isArray(instances)) { + const exists = instances.some((inst) => inst.name === instanceName); + if (exists) { + console.error(`Monitoring target '${instanceName}' already exists`); + process.exitCode = 1; + return; + } + } + } + } catch (err) { + // If YAML parsing fails, fall back to simple check + const content = fs.existsSync(file) ? fs.readFileSync(file, "utf8") : ""; + if (new RegExp(`^- name: ${instanceName}$`, "m").test(content)) { + console.error(`Monitoring target '${instanceName}' already exists`); + process.exitCode = 1; + return; + } + } + + // Add new instance + const body = `- name: ${instanceName}\n conn_str: ${connStr}\n preset_metrics: full\n custom_metrics:\n is_enabled: true\n group: default\n custom_tags:\n env: production\n cluster: default\n node_name: ${instanceName}\n sink_type: ~sink_type~\n`; + const content = fs.existsSync(file) ? fs.readFileSync(file, "utf8") : ""; + fs.appendFileSync(file, (content && !/\n$/.test(content) ? "\n" : "") + body, "utf8"); + console.log(`Monitoring target '${instanceName}' added`); + }); +targets + .command("remove ") + .description("remove monitoring target database") + .action(async (name: string) => { + const file = path.resolve(process.cwd(), "instances.yml"); + if (!fs.existsSync(file)) { + console.error("instances.yml not found"); + process.exitCode = 1; + return; + } + + try { + const content = fs.readFileSync(file, "utf8"); + const instances = yaml.load(content) as Instance[] | null; + + if (!instances || !Array.isArray(instances)) { + console.error("Invalid instances.yml format"); + process.exitCode = 1; + return; + } + + const filtered = instances.filter((inst) => inst.name !== name); + + if (filtered.length === instances.length) { + console.error(`Monitoring target '${name}' not found`); + process.exitCode = 1; + return; + } + + fs.writeFileSync(file, yaml.dump(filtered), "utf8"); + console.log(`Monitoring target '${name}' removed`); + } catch (err) { + const message = err instanceof Error ? err.message : String(err); + console.error(`Error processing instances.yml: ${message}`); + process.exitCode = 1; + } + }); +targets + .command("test ") + .description("test monitoring target database connectivity") + .action(async (name: string) => { + const instancesPath = path.resolve(process.cwd(), "instances.yml"); + if (!fs.existsSync(instancesPath)) { + console.error("instances.yml not found"); + process.exitCode = 1; + return; + } + + try { + const content = fs.readFileSync(instancesPath, "utf8"); + const instances = yaml.load(content) as Instance[] | null; + + if (!instances || !Array.isArray(instances)) { + console.error("Invalid instances.yml format"); + process.exitCode = 1; + return; + } + + const instance = instances.find((inst) => inst.name === name); + + if (!instance) { + console.error(`Monitoring target '${name}' not found`); + process.exitCode = 1; + return; + } + + if (!instance.conn_str) { + console.error(`Connection string not found for monitoring target '${name}'`); + process.exitCode = 1; + return; + } + + console.log(`Testing connection to monitoring target '${name}'...`); + + // Use native pg client instead of requiring psql to be installed + const { Client } = require('pg'); + const client = new Client({ connectionString: instance.conn_str }); + + try { + await client.connect(); + const result = await client.query('select version();'); + console.log(`āœ“ Connection successful`); + console.log(result.rows[0].version); + } finally { + await client.end(); + } + } catch (error) { + const message = error instanceof Error ? error.message : String(error); + console.error(`āœ— Connection failed: ${message}`); + process.exitCode = 1; + } + }); + +// Authentication and API key management +program + .command("auth") + .description("authenticate via browser and obtain API key") + .option("--port ", "local callback server port (default: random)", parseInt) + .option("--debug", "enable debug output") + .action(async (opts: { port?: number; debug?: boolean }) => { + const pkce = require("../lib/pkce"); + const authServer = require("../lib/auth-server"); + + console.log("Starting authentication flow...\n"); + + // Generate PKCE parameters + const params = pkce.generatePKCEParams(); + + const rootOpts = program.opts(); + const cfg = config.readConfig(); + const { apiBaseUrl, uiBaseUrl } = resolveBaseUrls(rootOpts, cfg); + + if (opts.debug) { + console.log(`Debug: Resolved API base URL: ${apiBaseUrl}`); + console.log(`Debug: Resolved UI base URL: ${uiBaseUrl}`); + } + + try { + // Step 1: Start local callback server FIRST to get actual port + console.log("Starting local callback server..."); + const requestedPort = opts.port || 0; // 0 = OS assigns available port + const callbackServer = authServer.createCallbackServer(requestedPort, params.state, 120000); // 2 minute timeout + + // Wait a bit for server to start and get port + await new Promise(resolve => setTimeout(resolve, 100)); + const actualPort = callbackServer.getPort(); + const redirectUri = `http://localhost:${actualPort}/callback`; + + console.log(`Callback server listening on port ${actualPort}`); + + // Step 2: Initialize OAuth session on backend + console.log("Initializing authentication session..."); + const initData = JSON.stringify({ + client_type: "cli", + state: params.state, + code_challenge: params.codeChallenge, + code_challenge_method: params.codeChallengeMethod, + redirect_uri: redirectUri, + }); + + // Build init URL by appending to the API base path (keep /api/general) + const initUrl = new URL(`${apiBaseUrl}/rpc/oauth_init`); + + if (opts.debug) { + console.log(`Debug: Trying to POST to: ${initUrl.toString()}`); + console.log(`Debug: Request data: ${initData}`); + } + + const initReq = http.request( + initUrl, + { + method: "POST", + headers: { + "Content-Type": "application/json", + "Content-Length": Buffer.byteLength(initData), + }, + }, + (res) => { + let data = ""; + res.on("data", (chunk) => (data += chunk)); + res.on("end", async () => { + if (res.statusCode !== 200) { + console.error(`Failed to initialize auth session: ${res.statusCode}`); + + // Check if response is HTML (common for 404 pages) + if (data.trim().startsWith(" { + console.log("\n\nAuthentication cancelled by user."); + callbackServer.server.close(); + process.exit(130); // Standard exit code for SIGINT + }; + process.on("SIGINT", cancelHandler); + + try { + const { code } = await callbackServer.promise; + + // Remove the cancel handler after successful auth + process.off("SIGINT", cancelHandler); + + // Step 5: Exchange code for token + console.log("\nExchanging authorization code for API token..."); + const exchangeData = JSON.stringify({ + authorization_code: code, + code_verifier: params.codeVerifier, + state: params.state, + }); + const exchangeUrl = new URL(`${apiBaseUrl}/rpc/oauth_token_exchange`); + const exchangeReq = http.request( + exchangeUrl, + { + method: "POST", + headers: { + "Content-Type": "application/json", + "Content-Length": Buffer.byteLength(exchangeData), + }, + }, + (exchangeRes) => { + let exchangeBody = ""; + exchangeRes.on("data", (chunk) => (exchangeBody += chunk)); + exchangeRes.on("end", () => { + if (exchangeRes.statusCode !== 200) { + console.error(`Failed to exchange code for token: ${exchangeRes.statusCode}`); + + // Check if response is HTML (common for 404 pages) + if (exchangeBody.trim().startsWith(" { + console.error(`Exchange request failed: ${err.message}`); + process.exit(1); + }); + + exchangeReq.write(exchangeData); + exchangeReq.end(); + + } catch (err) { + // Remove the cancel handler in error case too + process.off("SIGINT", cancelHandler); + + const message = err instanceof Error ? err.message : String(err); + + // Provide more helpful error messages + if (message.includes("timeout")) { + console.error(`\nAuthentication timed out.`); + console.error(`This usually means you closed the browser window without completing authentication.`); + console.error(`Please try again and complete the authentication flow.`); + } else { + console.error(`\nAuthentication failed: ${message}`); + } + + process.exit(1); + } + }); + } + ); + + initReq.on("error", (err: Error) => { + console.error(`Failed to connect to API: ${err.message}`); + callbackServer.server.close(); + process.exit(1); + }); + + initReq.write(initData); + initReq.end(); + + } catch (err) { + const message = err instanceof Error ? err.message : String(err); + console.error(`Authentication error: ${message}`); + process.exit(1); + } + }); + +program + .command("add-key ") + .description("store API key") + .action(async (apiKey: string) => { + config.writeConfig({ apiKey }); + console.log(`API key saved to ${config.getConfigPath()}`); + }); + +program + .command("show-key") + .description("show API key (masked)") + .action(async () => { + const cfg = config.readConfig(); + if (!cfg.apiKey) { + console.log("No API key configured"); + console.log(`\nTo authenticate, run: pgai auth`); + return; + } + const { maskSecret } = require("../lib/util"); + console.log(`Current API key: ${maskSecret(cfg.apiKey)}`); + if (cfg.orgId) { + console.log(`Organization ID: ${cfg.orgId}`); + } + console.log(`Config location: ${config.getConfigPath()}`); + }); + +program + .command("remove-key") + .description("remove API key") + .action(async () => { + // Check both new config and legacy config + const newConfigPath = config.getConfigPath(); + const hasNewConfig = fs.existsSync(newConfigPath); + const legacyPath = path.resolve(process.cwd(), ".pgwatch-config"); + const hasLegacyConfig = fs.existsSync(legacyPath) && fs.statSync(legacyPath).isFile(); + + if (!hasNewConfig && !hasLegacyConfig) { + console.log("No API key configured"); + return; + } + + // Remove from new config + if (hasNewConfig) { + config.deleteConfigKeys(["apiKey", "orgId"]); + } + + // Remove from legacy config + if (hasLegacyConfig) { + try { + const content = fs.readFileSync(legacyPath, "utf8"); + const filtered = content + .split(/\r?\n/) + .filter((l) => !/^api_key=/.test(l)) + .join("\n") + .replace(/\n+$/g, "\n"); + fs.writeFileSync(legacyPath, filtered, "utf8"); + } catch (err) { + // If we can't read/write the legacy config, just skip it + console.warn(`Warning: Could not update legacy config: ${err instanceof Error ? err.message : String(err)}`); + } + } + + console.log("API key removed"); + console.log(`\nTo authenticate again, run: pgai auth`); + }); +mon + .command("generate-grafana-password") + .description("generate Grafana password for monitoring services") + .action(async () => { + const cfgPath = path.resolve(process.cwd(), ".pgwatch-config"); + + try { + // Generate secure password using openssl + const { stdout: password } = await execPromise( + "openssl rand -base64 12 | tr -d '\n'" + ); + const newPassword = password.trim(); + + if (!newPassword) { + console.error("Failed to generate password"); + process.exitCode = 1; + return; + } + + // Read existing config + let configContent = ""; + if (fs.existsSync(cfgPath)) { + const stats = fs.statSync(cfgPath); + if (stats.isDirectory()) { + console.error(".pgwatch-config is a directory, expected a file. Skipping read."); + } else { + configContent = fs.readFileSync(cfgPath, "utf8"); + } + } + + // Update or add grafana_password + const lines = configContent.split(/\r?\n/).filter((l) => !/^grafana_password=/.test(l)); + lines.push(`grafana_password=${newPassword}`); + + // Write back + fs.writeFileSync(cfgPath, lines.filter(Boolean).join("\n") + "\n", "utf8"); + + console.log("āœ“ New Grafana password generated and saved"); + console.log("\nNew credentials:"); + console.log(" URL: http://localhost:3000"); + console.log(" Username: monitor"); + console.log(` Password: ${newPassword}`); + console.log("\nReset Grafana to apply new password:"); + console.log(" postgres-ai mon reset grafana"); + } catch (error) { + const message = error instanceof Error ? error.message : String(error); + console.error(`Failed to generate password: ${message}`); + console.error("\nNote: This command requires 'openssl' to be installed"); + process.exitCode = 1; + } + }); +mon + .command("show-grafana-credentials") + .description("show Grafana credentials for monitoring services") + .action(async () => { + const cfgPath = path.resolve(process.cwd(), ".pgwatch-config"); + if (!fs.existsSync(cfgPath)) { + console.error("Configuration file not found. Run 'postgres-ai mon quickstart' first."); + process.exitCode = 1; + return; + } + + const stats = fs.statSync(cfgPath); + if (stats.isDirectory()) { + console.error(".pgwatch-config is a directory, expected a file. Cannot read credentials."); + process.exitCode = 1; + return; + } + + const content = fs.readFileSync(cfgPath, "utf8"); + const lines = content.split(/\r?\n/); + let password = ""; + for (const line of lines) { + const m = line.match(/^grafana_password=(.+)$/); + if (m) { + password = m[1].trim(); + break; + } + } + if (!password) { + console.error("Grafana password not found in configuration"); + process.exitCode = 1; + return; + } + console.log("\nGrafana credentials:"); + console.log(" URL: http://localhost:3000"); + console.log(" Username: monitor"); + console.log(` Password: ${password}`); + console.log(""); + }); + +/** + * Interpret escape sequences in a string (e.g., \n -> newline) + * Note: In regex, to match literal backslash-n, we need \\n in the pattern + * which requires \\\\n in the JavaScript string literal + */ +function interpretEscapes(str: string): string { + // First handle double backslashes by temporarily replacing them + // Then handle other escapes, then restore double backslashes as single + return str + .replace(/\\\\/g, '\x00') // Temporarily mark double backslashes + .replace(/\\n/g, '\n') // Match literal backslash-n (\\\\n in JS string -> \\n in regex -> matches \n) + .replace(/\\t/g, '\t') + .replace(/\\r/g, '\r') + .replace(/\\"/g, '"') + .replace(/\\'/g, "'") + .replace(/\x00/g, '\\'); // Restore double backslashes as single +} + +// Issues management +const issues = program.command("issues").description("issues management"); + +issues + .command("list") + .description("list issues") + .option("--debug", "enable debug output") + .option("--json", "output raw JSON") + .action(async (opts: { debug?: boolean; json?: boolean }) => { + try { + const rootOpts = program.opts(); + const cfg = config.readConfig(); + const { apiKey } = getConfig(rootOpts); + if (!apiKey) { + console.error("API key is required. Run 'pgai auth' first or set --api-key."); + process.exitCode = 1; + return; + } + + const { apiBaseUrl } = resolveBaseUrls(rootOpts, cfg); + + const result = await fetchIssues({ apiKey, apiBaseUrl, debug: !!opts.debug }); + const trimmed = Array.isArray(result) + ? (result as any[]).map((r) => ({ + id: (r as any).id, + title: (r as any).title, + status: (r as any).status, + created_at: (r as any).created_at, + })) + : result; + printResult(trimmed, opts.json); + } catch (err) { + const message = err instanceof Error ? err.message : String(err); + console.error(message); + process.exitCode = 1; + } + }); + +issues + .command("view ") + .description("view issue details and comments") + .option("--debug", "enable debug output") + .option("--json", "output raw JSON") + .action(async (issueId: string, opts: { debug?: boolean; json?: boolean }) => { + try { + const rootOpts = program.opts(); + const cfg = config.readConfig(); + const { apiKey } = getConfig(rootOpts); + if (!apiKey) { + console.error("API key is required. Run 'pgai auth' first or set --api-key."); + process.exitCode = 1; + return; + } + + const { apiBaseUrl } = resolveBaseUrls(rootOpts, cfg); + + const issue = await fetchIssue({ apiKey, apiBaseUrl, issueId, debug: !!opts.debug }); + if (!issue) { + console.error("Issue not found"); + process.exitCode = 1; + return; + } + + const comments = await fetchIssueComments({ apiKey, apiBaseUrl, issueId, debug: !!opts.debug }); + const combined = { issue, comments }; + printResult(combined, opts.json); + } catch (err) { + const message = err instanceof Error ? err.message : String(err); + console.error(message); + process.exitCode = 1; + } + }); + +issues + .command("post_comment ") + .description("post a new comment to an issue") + .option("--parent ", "parent comment id") + .option("--debug", "enable debug output") + .option("--json", "output raw JSON") + .action(async (issueId: string, content: string, opts: { parent?: string; debug?: boolean; json?: boolean }) => { + try { + // Interpret escape sequences in content (e.g., \n -> newline) + if (opts.debug) { + // eslint-disable-next-line no-console + console.log(`Debug: Original content: ${JSON.stringify(content)}`); + } + content = interpretEscapes(content); + if (opts.debug) { + // eslint-disable-next-line no-console + console.log(`Debug: Interpreted content: ${JSON.stringify(content)}`); + } + + const rootOpts = program.opts(); + const cfg = config.readConfig(); + const { apiKey } = getConfig(rootOpts); + if (!apiKey) { + console.error("API key is required. Run 'pgai auth' first or set --api-key."); + process.exitCode = 1; + return; + } + + const { apiBaseUrl } = resolveBaseUrls(rootOpts, cfg); + + const result = await createIssueComment({ + apiKey, + apiBaseUrl, + issueId, + content, + parentCommentId: opts.parent, + debug: !!opts.debug, + }); + printResult(result, opts.json); + } catch (err) { + const message = err instanceof Error ? err.message : String(err); + console.error(message); + process.exitCode = 1; + } + }); + +// MCP server +const mcp = program.command("mcp").description("MCP server integration"); + +mcp + .command("start") + .description("start MCP stdio server") + .option("--debug", "enable debug output") + .action(async (opts: { debug?: boolean }) => { + const rootOpts = program.opts(); + await startMcpServer(rootOpts, { debug: !!opts.debug }); + }); + +mcp + .command("install [client]") + .description("install MCP server configuration for AI coding tool") + .action(async (client?: string) => { + const supportedClients = ["cursor", "claude-code", "windsurf", "codex"]; + + // If no client specified, prompt user to choose + if (!client) { + console.log("Available AI coding tools:"); + console.log(" 1. Cursor"); + console.log(" 2. Claude Code"); + console.log(" 3. Windsurf"); + console.log(" 4. Codex"); + console.log(""); + + const rl = readline.createInterface({ + input: process.stdin, + output: process.stdout + }); + + const answer = await new Promise((resolve) => { + rl.question("Select your AI coding tool (1-4): ", resolve); + }); + rl.close(); + + const choices: Record = { + "1": "cursor", + "2": "claude-code", + "3": "windsurf", + "4": "codex" + }; + + client = choices[answer.trim()]; + if (!client) { + console.error("Invalid selection"); + process.exitCode = 1; + return; + } + } + + client = client.toLowerCase(); + + if (!supportedClients.includes(client)) { + console.error(`Unsupported client: ${client}`); + console.error(`Supported clients: ${supportedClients.join(", ")}`); + process.exitCode = 1; + return; + } + + try { + // Get the path to the current pgai executable + let pgaiPath: string; + try { + const execPath = await execPromise("which pgai"); + pgaiPath = execPath.stdout.trim(); + } catch { + // Fallback to just "pgai" if which fails + pgaiPath = "pgai"; + } + + // Claude Code uses its own CLI to manage MCP servers + if (client === "claude-code") { + console.log("Installing PostgresAI MCP server for Claude Code..."); + + try { + const { stdout, stderr } = await execPromise( + `claude mcp add -s user postgresai ${pgaiPath} mcp start` + ); + + if (stdout) console.log(stdout); + if (stderr) console.error(stderr); + + console.log(""); + console.log("Successfully installed PostgresAI MCP server for Claude Code"); + console.log(""); + console.log("Next steps:"); + console.log(" 1. Restart Claude Code to load the new configuration"); + console.log(" 2. The PostgresAI MCP server will be available as 'postgresai'"); + } catch (err) { + const message = err instanceof Error ? err.message : String(err); + console.error("Failed to install MCP server using Claude CLI"); + console.error(message); + console.error(""); + console.error("Make sure the 'claude' CLI tool is installed and in your PATH"); + console.error("See: https://docs.anthropic.com/en/docs/build-with-claude/mcp"); + process.exitCode = 1; + } + return; + } + + // For other clients (Cursor, Windsurf, Codex), use JSON config editing + const homeDir = os.homedir(); + let configPath: string; + let configDir: string; + + // Determine config file location based on client + switch (client) { + case "cursor": + configPath = path.join(homeDir, ".cursor", "mcp.json"); + configDir = path.dirname(configPath); + break; + + case "windsurf": + configPath = path.join(homeDir, ".windsurf", "mcp.json"); + configDir = path.dirname(configPath); + break; + + case "codex": + configPath = path.join(homeDir, ".codex", "mcp.json"); + configDir = path.dirname(configPath); + break; + + default: + console.error(`Configuration not implemented for: ${client}`); + process.exitCode = 1; + return; + } + + // Ensure config directory exists + if (!fs.existsSync(configDir)) { + fs.mkdirSync(configDir, { recursive: true }); + } + + // Read existing config or create new one + let config: any = { mcpServers: {} }; + if (fs.existsSync(configPath)) { + try { + const content = fs.readFileSync(configPath, "utf8"); + config = JSON.parse(content); + if (!config.mcpServers) { + config.mcpServers = {}; + } + } catch (err) { + console.error(`Warning: Could not parse existing config, creating new one`); + } + } + + // Add or update PostgresAI MCP server configuration + config.mcpServers.postgresai = { + command: pgaiPath, + args: ["mcp", "start"] + }; + + // Write updated config + fs.writeFileSync(configPath, JSON.stringify(config, null, 2), "utf8"); + + console.log(`āœ“ PostgresAI MCP server configured for ${client}`); + console.log(` Config file: ${configPath}`); + console.log(""); + console.log("Please restart your AI coding tool to activate the MCP server"); + + } catch (error) { + const message = error instanceof Error ? error.message : String(error); + console.error(`Failed to install MCP server: ${message}`); + process.exitCode = 1; + } + }); + +program.parseAsync(process.argv); + diff --git a/cli/lib/auth-server.ts b/cli/lib/auth-server.ts new file mode 100644 index 0000000..04442a6 --- /dev/null +++ b/cli/lib/auth-server.ts @@ -0,0 +1,267 @@ +import * as http from "http"; +import { URL } from "url"; + +/** + * OAuth callback result + */ +export interface CallbackResult { + code: string; + state: string; +} + +/** + * Callback server structure + */ +export interface CallbackServer { + server: http.Server; + promise: Promise; + getPort: () => number; +} + +/** + * Simple HTML escape utility + * @param str - String to escape + * @returns Escaped string + */ +function escapeHtml(str: string | null): string { + if (!str) return ""; + return String(str) + .replace(/&/g, "&") + .replace(//g, ">") + .replace(/"/g, """) + .replace(/'/g, "'"); +} + +/** + * Create and start callback server, returning server object and promise + * @param port - Port to listen on (0 for random available port) + * @param expectedState - Expected state parameter for CSRF protection + * @param timeoutMs - Timeout in milliseconds + * @returns Server object with promise and getPort function + */ +export function createCallbackServer( + port: number = 0, + expectedState: string | null = null, + timeoutMs: number = 300000 +): CallbackServer { + let resolved = false; + let server: http.Server | null = null; + let actualPort = port; + let resolveCallback: (value: CallbackResult) => void; + let rejectCallback: (reason: Error) => void; + + const promise = new Promise((resolve, reject) => { + resolveCallback = resolve; + rejectCallback = reject; + }); + + // Timeout handler + const timeout = setTimeout(() => { + if (!resolved) { + resolved = true; + if (server) { + server.close(); + } + rejectCallback(new Error("Authentication timeout. Please try again.")); + } + }, timeoutMs); + + // Request handler + const requestHandler = (req: http.IncomingMessage, res: http.ServerResponse): void => { + if (resolved) { + return; + } + + // Only handle /callback path + if (!req.url || !req.url.startsWith("/callback")) { + res.writeHead(404, { "Content-Type": "text/plain" }); + res.end("Not Found"); + return; + } + + try { + const url = new URL(req.url, `http://localhost:${actualPort}`); + const code = url.searchParams.get("code"); + const state = url.searchParams.get("state"); + const error = url.searchParams.get("error"); + const errorDescription = url.searchParams.get("error_description"); + + // Handle OAuth error + if (error) { + resolved = true; + clearTimeout(timeout); + + res.writeHead(400, { "Content-Type": "text/html" }); + res.end(` + + + + Authentication failed + + + +
+

Authentication failed

+

Error: ${escapeHtml(error)}

+ ${errorDescription ? `

Description: ${escapeHtml(errorDescription)}

` : ""} +

You can close this window and return to your terminal.

+
+ + + `); + + if (server) { + server.close(); + } + rejectCallback(new Error(`OAuth error: ${error}${errorDescription ? ` - ${errorDescription}` : ""}`)); + return; + } + + // Validate required parameters + if (!code || !state) { + res.writeHead(400, { "Content-Type": "text/html" }); + res.end(` + + + + Authentication failed + + + +
+

Authentication failed

+

Missing required parameters (code or state).

+

You can close this window and return to your terminal.

+
+ + + `); + return; + } + + // Validate state (CSRF protection) + if (expectedState && state !== expectedState) { + resolved = true; + clearTimeout(timeout); + + res.writeHead(400, { "Content-Type": "text/html" }); + res.end(` + + + + Authentication failed + + + +
+

Authentication failed

+

Invalid state parameter (possible CSRF attack).

+

You can close this window and return to your terminal.

+
+ + + `); + + if (server) { + server.close(); + } + rejectCallback(new Error("State mismatch (possible CSRF attack)")); + return; + } + + // Success! + resolved = true; + clearTimeout(timeout); + + res.writeHead(200, { "Content-Type": "text/html" }); + res.end(` + + + + Authentication successful + + + +
+

Authentication successful

+

You have successfully authenticated the PostgresAI CLI.

+

You can close this window and return to your terminal.

+
+ + + `); + + if (server) { + server.close(); + } + resolveCallback({ code, state }); + } catch (err) { + if (!resolved) { + resolved = true; + clearTimeout(timeout); + res.writeHead(500, { "Content-Type": "text/plain" }); + res.end("Internal Server Error"); + if (server) { + server.close(); + } + rejectCallback(err instanceof Error ? err : new Error(String(err))); + } + } + }; + + // Create server + server = http.createServer(requestHandler); + + server.on("error", (err: Error) => { + if (!resolved) { + resolved = true; + clearTimeout(timeout); + rejectCallback(err); + } + }); + + server.listen(port, "127.0.0.1", () => { + const address = server?.address(); + if (address && typeof address === "object") { + actualPort = address.port; + } + }); + + return { + server, + promise, + getPort: () => { + const address = server?.address(); + return address && typeof address === "object" ? address.port : 0; + }, + }; +} + +/** + * Get the actual port the server is listening on + * @param server - HTTP server instance + * @returns Port number + */ +export function getServerPort(server: http.Server): number { + const address = server.address(); + return address && typeof address === "object" ? address.port : 0; +} + diff --git a/cli/lib/config.ts b/cli/lib/config.ts new file mode 100644 index 0000000..7e6f33c --- /dev/null +++ b/cli/lib/config.ts @@ -0,0 +1,161 @@ +import * as fs from "fs"; +import * as path from "path"; +import * as os from "os"; + +/** + * Configuration object structure + */ +export interface Config { + apiKey: string | null; + baseUrl: string | null; + orgId: number | null; +} + +/** + * Get the user-level config directory path + * @returns Path to ~/.config/postgresai + */ +export function getConfigDir(): string { + const configHome = process.env.XDG_CONFIG_HOME || path.join(os.homedir(), ".config"); + return path.join(configHome, "postgresai"); +} + +/** + * Get the user-level config file path + * @returns Path to ~/.config/postgresai/config.json + */ +export function getConfigPath(): string { + return path.join(getConfigDir(), "config.json"); +} + +/** + * Get the legacy project-local config file path + * @returns Path to .pgwatch-config in current directory + */ +export function getLegacyConfigPath(): string { + return path.resolve(process.cwd(), ".pgwatch-config"); +} + +/** + * Read configuration from file + * Tries user-level config first, then falls back to legacy project-local config + * @returns Configuration object with apiKey, baseUrl, orgId + */ +export function readConfig(): Config { + const config: Config = { + apiKey: null, + baseUrl: null, + orgId: null, + }; + + // Try user-level config first + const userConfigPath = getConfigPath(); + if (fs.existsSync(userConfigPath)) { + try { + const content = fs.readFileSync(userConfigPath, "utf8"); + const parsed = JSON.parse(content); + config.apiKey = parsed.apiKey || null; + config.baseUrl = parsed.baseUrl || null; + config.orgId = parsed.orgId || null; + return config; + } catch (err) { + const message = err instanceof Error ? err.message : String(err); + console.error(`Warning: Failed to read config from ${userConfigPath}: ${message}`); + } + } + + // Fall back to legacy project-local config + const legacyPath = getLegacyConfigPath(); + if (fs.existsSync(legacyPath)) { + try { + const stats = fs.statSync(legacyPath); + if (stats.isFile()) { + const content = fs.readFileSync(legacyPath, "utf8"); + const lines = content.split(/\r?\n/); + for (const line of lines) { + const match = line.match(/^api_key=(.+)$/); + if (match) { + config.apiKey = match[1].trim(); + break; + } + } + } + } catch (err) { + const message = err instanceof Error ? err.message : String(err); + console.error(`Warning: Failed to read legacy config from ${legacyPath}: ${message}`); + } + } + + return config; +} + +/** + * Write configuration to user-level config file + * @param config - Configuration object with apiKey, baseUrl, orgId + */ +export function writeConfig(config: Partial): void { + const configDir = getConfigDir(); + const configPath = getConfigPath(); + + // Ensure config directory exists + if (!fs.existsSync(configDir)) { + fs.mkdirSync(configDir, { recursive: true, mode: 0o700 }); + } + + // Read existing config and merge + let existingConfig: Record = {}; + if (fs.existsSync(configPath)) { + try { + const content = fs.readFileSync(configPath, "utf8"); + existingConfig = JSON.parse(content); + } catch (err) { + // Ignore parse errors, will overwrite + } + } + + const mergedConfig = { + ...existingConfig, + ...config, + }; + + // Write config file with restricted permissions + fs.writeFileSync(configPath, JSON.stringify(mergedConfig, null, 2) + "\n", { + mode: 0o600, + }); +} + +/** + * Delete specific keys from configuration + * @param keys - Array of keys to delete (e.g., ['apiKey']) + */ +export function deleteConfigKeys(keys: string[]): void { + const configPath = getConfigPath(); + if (!fs.existsSync(configPath)) { + return; + } + + try { + const content = fs.readFileSync(configPath, "utf8"); + const config: Record = JSON.parse(content); + + for (const key of keys) { + delete config[key]; + } + + fs.writeFileSync(configPath, JSON.stringify(config, null, 2) + "\n", { + mode: 0o600, + }); + } catch (err) { + const message = err instanceof Error ? err.message : String(err); + console.error(`Warning: Failed to update config: ${message}`); + } +} + +/** + * Check if config file exists + * @returns True if config exists + */ +export function configExists(): boolean { + return fs.existsSync(getConfigPath()) || fs.existsSync(getLegacyConfigPath()); +} + diff --git a/cli/lib/issues.ts b/cli/lib/issues.ts new file mode 100644 index 0000000..4231778 --- /dev/null +++ b/cli/lib/issues.ts @@ -0,0 +1,354 @@ +import * as https from "https"; +import { URL } from "url"; +import { maskSecret, normalizeBaseUrl } from "./util"; + +export interface FetchIssuesParams { + apiKey: string; + apiBaseUrl: string; + debug?: boolean; +} + +export async function fetchIssues(params: FetchIssuesParams): Promise { + const { apiKey, apiBaseUrl, debug } = params; + if (!apiKey) { + throw new Error("API key is required"); + } + + const base = normalizeBaseUrl(apiBaseUrl); + const url = new URL(`${base}/issues`); + + const headers: Record = { + "access-token": apiKey, + "Prefer": "return=representation", + "Content-Type": "application/json", + }; + + if (debug) { + const debugHeaders: Record = { ...headers, "access-token": maskSecret(apiKey) }; + // eslint-disable-next-line no-console + console.log(`Debug: Resolved API base URL: ${base}`); + // eslint-disable-next-line no-console + console.log(`Debug: GET URL: ${url.toString()}`); + // eslint-disable-next-line no-console + console.log(`Debug: Auth scheme: access-token`); + // eslint-disable-next-line no-console + console.log(`Debug: Request headers: ${JSON.stringify(debugHeaders)}`); + } + + return new Promise((resolve, reject) => { + const req = https.request( + url, + { + method: "GET", + headers, + }, + (res) => { + let data = ""; + res.on("data", (chunk) => (data += chunk)); + res.on("end", () => { + if (debug) { + // eslint-disable-next-line no-console + console.log(`Debug: Response status: ${res.statusCode}`); + // eslint-disable-next-line no-console + console.log(`Debug: Response headers: ${JSON.stringify(res.headers)}`); + } + if (res.statusCode && res.statusCode >= 200 && res.statusCode < 300) { + try { + const parsed = JSON.parse(data); + resolve(parsed); + } catch { + resolve(data); + } + } else { + let errMsg = `Failed to fetch issues: HTTP ${res.statusCode}`; + if (data) { + try { + const errObj = JSON.parse(data); + errMsg += `\n${JSON.stringify(errObj, null, 2)}`; + } catch { + errMsg += `\n${data}`; + } + } + reject(new Error(errMsg)); + } + }); + } + ); + + req.on("error", (err: Error) => reject(err)); + req.end(); + }); +} + + +export interface FetchIssueCommentsParams { + apiKey: string; + apiBaseUrl: string; + issueId: string; + debug?: boolean; +} + +export async function fetchIssueComments(params: FetchIssueCommentsParams): Promise { + const { apiKey, apiBaseUrl, issueId, debug } = params; + if (!apiKey) { + throw new Error("API key is required"); + } + if (!issueId) { + throw new Error("issueId is required"); + } + + const base = normalizeBaseUrl(apiBaseUrl); + const url = new URL(`${base}/issue_comments?issue_id=eq.${encodeURIComponent(issueId)}`); + + const headers: Record = { + "access-token": apiKey, + "Prefer": "return=representation", + "Content-Type": "application/json", + }; + + if (debug) { + const debugHeaders: Record = { ...headers, "access-token": maskSecret(apiKey) }; + // eslint-disable-next-line no-console + console.log(`Debug: Resolved API base URL: ${base}`); + // eslint-disable-next-line no-console + console.log(`Debug: GET URL: ${url.toString()}`); + // eslint-disable-next-line no-console + console.log(`Debug: Auth scheme: access-token`); + // eslint-disable-next-line no-console + console.log(`Debug: Request headers: ${JSON.stringify(debugHeaders)}`); + } + + return new Promise((resolve, reject) => { + const req = https.request( + url, + { + method: "GET", + headers, + }, + (res) => { + let data = ""; + res.on("data", (chunk) => (data += chunk)); + res.on("end", () => { + if (debug) { + // eslint-disable-next-line no-console + console.log(`Debug: Response status: ${res.statusCode}`); + // eslint-disable-next-line no-console + console.log(`Debug: Response headers: ${JSON.stringify(res.headers)}`); + } + if (res.statusCode && res.statusCode >= 200 && res.statusCode < 300) { + try { + const parsed = JSON.parse(data); + resolve(parsed); + } catch { + resolve(data); + } + } else { + let errMsg = `Failed to fetch issue comments: HTTP ${res.statusCode}`; + if (data) { + try { + const errObj = JSON.parse(data); + errMsg += `\n${JSON.stringify(errObj, null, 2)}`; + } catch { + errMsg += `\n${data}`; + } + } + reject(new Error(errMsg)); + } + }); + } + ); + + req.on("error", (err: Error) => reject(err)); + req.end(); + }); +} + +export interface FetchIssueParams { + apiKey: string; + apiBaseUrl: string; + issueId: string; + debug?: boolean; +} + +export async function fetchIssue(params: FetchIssueParams): Promise { + const { apiKey, apiBaseUrl, issueId, debug } = params; + if (!apiKey) { + throw new Error("API key is required"); + } + if (!issueId) { + throw new Error("issueId is required"); + } + + const base = normalizeBaseUrl(apiBaseUrl); + const url = new URL(`${base}/issues`); + url.searchParams.set("id", `eq.${issueId}`); + url.searchParams.set("limit", "1"); + + const headers: Record = { + "access-token": apiKey, + "Prefer": "return=representation", + "Content-Type": "application/json", + }; + + if (debug) { + const debugHeaders: Record = { ...headers, "access-token": maskSecret(apiKey) }; + // eslint-disable-next-line no-console + console.log(`Debug: Resolved API base URL: ${base}`); + // eslint-disable-next-line no-console + console.log(`Debug: GET URL: ${url.toString()}`); + // eslint-disable-next-line no-console + console.log(`Debug: Auth scheme: access-token`); + // eslint-disable-next-line no-console + console.log(`Debug: Request headers: ${JSON.stringify(debugHeaders)}`); + } + + return new Promise((resolve, reject) => { + const req = https.request( + url, + { + method: "GET", + headers, + }, + (res) => { + let data = ""; + res.on("data", (chunk) => (data += chunk)); + res.on("end", () => { + if (debug) { + // eslint-disable-next-line no-console + console.log(`Debug: Response status: ${res.statusCode}`); + // eslint-disable-next-line no-console + console.log(`Debug: Response headers: ${JSON.stringify(res.headers)}`); + } + if (res.statusCode && res.statusCode >= 200 && res.statusCode < 300) { + try { + const parsed = JSON.parse(data); + if (Array.isArray(parsed)) { + resolve(parsed[0] ?? null); + } else { + resolve(parsed); + } + } catch { + resolve(data); + } + } else { + let errMsg = `Failed to fetch issue: HTTP ${res.statusCode}`; + if (data) { + try { + const errObj = JSON.parse(data); + errMsg += `\n${JSON.stringify(errObj, null, 2)}`; + } catch { + errMsg += `\n${data}`; + } + } + reject(new Error(errMsg)); + } + }); + } + ); + + req.on("error", (err: Error) => reject(err)); + req.end(); + }); +} + +export interface CreateIssueCommentParams { + apiKey: string; + apiBaseUrl: string; + issueId: string; + content: string; + parentCommentId?: string; + debug?: boolean; +} + +export async function createIssueComment(params: CreateIssueCommentParams): Promise { + const { apiKey, apiBaseUrl, issueId, content, parentCommentId, debug } = params; + if (!apiKey) { + throw new Error("API key is required"); + } + if (!issueId) { + throw new Error("issueId is required"); + } + if (!content) { + throw new Error("content is required"); + } + + const base = normalizeBaseUrl(apiBaseUrl); + const url = new URL(`${base}/rpc/issue_comment_create`); + + const bodyObj: Record = { + issue_id: issueId, + content: content, + }; + if (parentCommentId) { + bodyObj.parent_comment_id = parentCommentId; + } + const body = JSON.stringify(bodyObj); + + const headers: Record = { + "access-token": apiKey, + "Prefer": "return=representation", + "Content-Type": "application/json", + "Content-Length": Buffer.byteLength(body).toString(), + }; + + if (debug) { + const debugHeaders: Record = { ...headers, "access-token": maskSecret(apiKey) }; + // eslint-disable-next-line no-console + console.log(`Debug: Resolved API base URL: ${base}`); + // eslint-disable-next-line no-console + console.log(`Debug: POST URL: ${url.toString()}`); + // eslint-disable-next-line no-console + console.log(`Debug: Auth scheme: access-token`); + // eslint-disable-next-line no-console + console.log(`Debug: Request headers: ${JSON.stringify(debugHeaders)}`); + // eslint-disable-next-line no-console + console.log(`Debug: Request body: ${body}`); + } + + return new Promise((resolve, reject) => { + const req = https.request( + url, + { + method: "POST", + headers, + }, + (res) => { + let data = ""; + res.on("data", (chunk) => (data += chunk)); + res.on("end", () => { + if (debug) { + // eslint-disable-next-line no-console + console.log(`Debug: Response status: ${res.statusCode}`); + // eslint-disable-next-line no-console + console.log(`Debug: Response headers: ${JSON.stringify(res.headers)}`); + } + if (res.statusCode && res.statusCode >= 200 && res.statusCode < 300) { + try { + const parsed = JSON.parse(data); + resolve(parsed); + } catch { + resolve(data); + } + } else { + let errMsg = `Failed to create issue comment: HTTP ${res.statusCode}`; + if (data) { + try { + const errObj = JSON.parse(data); + errMsg += `\n${JSON.stringify(errObj, null, 2)}`; + } catch { + errMsg += `\n${data}`; + } + } + reject(new Error(errMsg)); + } + }); + } + ); + + req.on("error", (err: Error) => reject(err)); + req.write(body); + req.end(); + }); +} + + diff --git a/cli/lib/mcp-server.ts b/cli/lib/mcp-server.ts new file mode 100644 index 0000000..ede1f17 --- /dev/null +++ b/cli/lib/mcp-server.ts @@ -0,0 +1,164 @@ +import * as pkg from "../package.json"; +import * as config from "./config"; +import { fetchIssues, fetchIssueComments, createIssueComment, fetchIssue } from "./issues"; +import { resolveBaseUrls } from "./util"; + +// MCP SDK imports +import { Server } from "@modelcontextprotocol/sdk/server"; +import * as path from "path"; +// Types schemas will be loaded dynamically from the SDK's CJS bundle + +interface RootOptsLike { + apiKey?: string; + apiBaseUrl?: string; +} + +export async function startMcpServer(rootOpts?: RootOptsLike, extra?: { debug?: boolean }): Promise { + // Resolve stdio transport at runtime to avoid subpath export resolution issues + const serverEntry = require.resolve("@modelcontextprotocol/sdk/server"); + const stdioPath = path.join(path.dirname(serverEntry), "stdio.js"); + // eslint-disable-next-line @typescript-eslint/no-var-requires + const { StdioServerTransport } = require(stdioPath); + // Load schemas dynamically to avoid subpath export resolution issues + const typesPath = path.resolve(path.dirname(serverEntry), "../types.js"); + // eslint-disable-next-line @typescript-eslint/no-var-requires + const { CallToolRequestSchema, ListToolsRequestSchema } = require(typesPath); + + const server = new Server( + { name: "postgresai-mcp", version: pkg.version }, + { capabilities: { tools: {} } } + ); + + // Interpret escape sequences (e.g., \n -> newline). Input comes from JSON, but + // we still normalize common escapes for consistency. + const interpretEscapes = (str: string): string => + (str || "") + .replace(/\\n/g, "\n") + .replace(/\\t/g, "\t") + .replace(/\\r/g, "\r") + .replace(/\\"/g, '"') + .replace(/\\'/g, "'"); + + server.setRequestHandler(ListToolsRequestSchema, async () => { + return { + tools: [ + { + name: "list_issues", + description: "List issues from PostgresAI API (same as CLI 'issues list')", + inputSchema: { + type: "object", + properties: { + debug: { type: "boolean", description: "Enable verbose debug logs" }, + }, + additionalProperties: false, + }, + }, + { + name: "view_issue", + description: "View a specific issue with its comments", + inputSchema: { + type: "object", + properties: { + issue_id: { type: "string", description: "Issue ID (UUID)" }, + debug: { type: "boolean", description: "Enable verbose debug logs" }, + }, + required: ["issue_id"], + additionalProperties: false, + }, + }, + { + name: "post_issue_comment", + description: "Post a new comment to an issue (optionally as a reply)", + inputSchema: { + type: "object", + properties: { + issue_id: { type: "string", description: "Issue ID (UUID)" }, + content: { type: "string", description: "Comment text (supports \\n as newline)" }, + parent_comment_id: { type: "string", description: "Parent comment ID (UUID) for replies" }, + debug: { type: "boolean", description: "Enable verbose debug logs" }, + }, + required: ["issue_id", "content"], + additionalProperties: false, + }, + }, + ], + }; + }); + + server.setRequestHandler(CallToolRequestSchema, async (req: any) => { + const toolName = req.params.name; + const args = (req.params.arguments as Record) || {}; + + const cfg = config.readConfig(); + const apiKey = (rootOpts?.apiKey || process.env.PGAI_API_KEY || cfg.apiKey || "").toString(); + const { apiBaseUrl } = resolveBaseUrls(rootOpts, cfg); + + const debug = Boolean(args.debug ?? extra?.debug); + + if (!apiKey) { + return { + content: [ + { + type: "text", + text: "API key is required. Run 'pgai auth' or set PGAI_API_KEY.", + }, + ], + isError: true, + }; + } + + try { + if (toolName === "list_issues") { + const result = await fetchIssues({ apiKey, apiBaseUrl, debug }); + const trimmed = Array.isArray(result) + ? (result as any[]).map((r) => ({ + id: (r as any).id, + title: (r as any).title, + status: (r as any).status, + created_at: (r as any).created_at, + })) + : result; + return { content: [{ type: "text", text: JSON.stringify(trimmed, null, 2) }] }; + } + + if (toolName === "view_issue") { + const issueId = String(args.issue_id || "").trim(); + if (!issueId) { + return { content: [{ type: "text", text: "issue_id is required" }], isError: true }; + } + const issue = await fetchIssue({ apiKey, apiBaseUrl, issueId, debug }); + if (!issue) { + return { content: [{ type: "text", text: "Issue not found" }], isError: true }; + } + const comments = await fetchIssueComments({ apiKey, apiBaseUrl, issueId, debug }); + const combined = { issue, comments }; + return { content: [{ type: "text", text: JSON.stringify(combined, null, 2) }] }; + } + + if (toolName === "post_issue_comment") { + const issueId = String(args.issue_id || "").trim(); + const rawContent = String(args.content || ""); + const parentCommentId = args.parent_comment_id ? String(args.parent_comment_id) : undefined; + if (!issueId) { + return { content: [{ type: "text", text: "issue_id is required" }], isError: true }; + } + if (!rawContent) { + return { content: [{ type: "text", text: "content is required" }], isError: true }; + } + const content = interpretEscapes(rawContent); + const result = await createIssueComment({ apiKey, apiBaseUrl, issueId, content, parentCommentId, debug }); + return { content: [{ type: "text", text: JSON.stringify(result, null, 2) }] }; + } + + throw new Error(`Unknown tool: ${toolName}`); + } catch (err) { + const message = err instanceof Error ? err.message : String(err); + return { content: [{ type: "text", text: message }], isError: true }; + } + }); + + const transport = new StdioServerTransport(); + await server.connect(transport); +} + + diff --git a/cli/lib/pkce.ts b/cli/lib/pkce.ts new file mode 100644 index 0000000..e9838de --- /dev/null +++ b/cli/lib/pkce.ts @@ -0,0 +1,79 @@ +import * as crypto from "crypto"; + +/** + * PKCE parameters for OAuth 2.0 Authorization Code Flow with PKCE + */ +export interface PKCEParams { + codeVerifier: string; + codeChallenge: string; + codeChallengeMethod: "S256"; + state: string; +} + +/** + * Generate a cryptographically random string for PKCE + * @param length - Length of the string (43-128 characters per RFC 7636) + * @returns Base64URL-encoded random string + */ +function generateRandomString(length: number = 64): string { + const bytes = crypto.randomBytes(length); + return base64URLEncode(bytes); +} + +/** + * Base64URL encode (without padding) + * @param buffer - Buffer to encode + * @returns Base64URL-encoded string + */ +function base64URLEncode(buffer: Buffer): string { + return buffer + .toString("base64") + .replace(/\+/g, "-") + .replace(/\//g, "_") + .replace(/=/g, ""); +} + +/** + * Generate PKCE code verifier + * @returns Random code verifier (43-128 characters) + */ +export function generateCodeVerifier(): string { + return generateRandomString(32); // 32 bytes = 43 chars after base64url encoding +} + +/** + * Generate PKCE code challenge from verifier + * Uses S256 method (SHA256) + * @param verifier - Code verifier string + * @returns Base64URL-encoded SHA256 hash of verifier + */ +export function generateCodeChallenge(verifier: string): string { + const hash = crypto.createHash("sha256").update(verifier).digest(); + return base64URLEncode(hash); +} + +/** + * Generate random state for CSRF protection + * @returns Random state string + */ +export function generateState(): string { + return generateRandomString(16); // 16 bytes = 22 chars +} + +/** + * Generate complete PKCE parameters + * @returns Object with verifier, challenge, challengeMethod, and state + */ +export function generatePKCEParams(): PKCEParams { + const verifier = generateCodeVerifier(); + const challenge = generateCodeChallenge(verifier); + const state = generateState(); + + return { + codeVerifier: verifier, + codeChallenge: challenge, + codeChallengeMethod: "S256", + state: state, + }; +} + diff --git a/cli/lib/util.ts b/cli/lib/util.ts new file mode 100644 index 0000000..afa001b --- /dev/null +++ b/cli/lib/util.ts @@ -0,0 +1,60 @@ +export function maskSecret(secret: string): string { + if (!secret) return ""; + if (secret.length <= 8) return "****"; + if (secret.length <= 16) return `${secret.slice(0, 4)}${"*".repeat(secret.length - 8)}${secret.slice(-4)}`; + return `${secret.slice(0, Math.min(12, secret.length - 8))}${"*".repeat(Math.max(4, secret.length - 16))}${secret.slice(-4)}`; +} + + +export interface RootOptsLike { + apiBaseUrl?: string; + uiBaseUrl?: string; +} + +export interface ConfigLike { + baseUrl?: string | null; +} + +export interface ResolvedBaseUrls { + apiBaseUrl: string; + uiBaseUrl: string; +} + +/** + * Normalize a base URL by trimming a single trailing slash and validating. + * @throws Error if the URL is invalid + */ +export function normalizeBaseUrl(value: string): string { + const trimmed = (value || "").replace(/\/$/, ""); + try { + // Validate + // eslint-disable-next-line no-new + new URL(trimmed); + } catch { + throw new Error(`Invalid base URL: ${value}`); + } + return trimmed; +} + +/** + * Resolve API and UI base URLs using precedence and normalize them. + * Precedence (API): opts.apiBaseUrl → env.PGAI_API_BASE_URL → cfg.baseUrl → default + * Precedence (UI): opts.uiBaseUrl → env.PGAI_UI_BASE_URL → default + */ +export function resolveBaseUrls( + opts?: RootOptsLike, + cfg?: ConfigLike, + defaults: { apiBaseUrl?: string; uiBaseUrl?: string } = {} +): ResolvedBaseUrls { + const defApi = defaults.apiBaseUrl || "https://postgres.ai/api/general/"; + const defUi = defaults.uiBaseUrl || "https://console.postgres.ai"; + + const apiCandidate = (opts?.apiBaseUrl || process.env.PGAI_API_BASE_URL || cfg?.baseUrl || defApi) as string; + const uiCandidate = (opts?.uiBaseUrl || process.env.PGAI_UI_BASE_URL || defUi) as string; + + return { + apiBaseUrl: normalizeBaseUrl(apiCandidate), + uiBaseUrl: normalizeBaseUrl(uiCandidate), + }; +} + diff --git a/cli/package-lock.json b/cli/package-lock.json new file mode 100644 index 0000000..7466c86 --- /dev/null +++ b/cli/package-lock.json @@ -0,0 +1,1218 @@ +{ + "name": "postgresai", + "version": "0.12.0-beta.6", + "lockfileVersion": 3, + "requires": true, + "packages": { + "": { + "name": "postgresai", + "version": "0.12.0-beta.6", + "license": "Apache-2.0", + "dependencies": { + "@modelcontextprotocol/sdk": "^1.20.2", + "commander": "^12.1.0", + "js-yaml": "^4.1.0", + "pg": "^8.16.3" + }, + "bin": { + "pgai": "dist/bin/postgres-ai.js", + "postgres-ai": "dist/bin/postgres-ai.js", + "postgresai": "dist/bin/postgres-ai.js" + }, + "devDependencies": { + "@types/js-yaml": "^4.0.9", + "@types/node": "^18.19.0", + "@types/pg": "^8.15.6", + "typescript": "^5.3.3" + }, + "engines": { + "node": ">=18" + } + }, + "node_modules/@modelcontextprotocol/sdk": { + "version": "1.20.2", + "resolved": "https://registry.npmjs.org/@modelcontextprotocol/sdk/-/sdk-1.20.2.tgz", + "integrity": "sha512-6rqTdFt67AAAzln3NOKsXRmv5ZzPkgbfaebKBqUbts7vK1GZudqnrun5a8d3M/h955cam9RHZ6Jb4Y1XhnmFPg==", + "dependencies": { + "ajv": "^6.12.6", + "content-type": "^1.0.5", + "cors": "^2.8.5", + "cross-spawn": "^7.0.5", + "eventsource": "^3.0.2", + "eventsource-parser": "^3.0.0", + "express": "^5.0.1", + "express-rate-limit": "^7.5.0", + "pkce-challenge": "^5.0.0", + "raw-body": "^3.0.0", + "zod": "^3.23.8", + "zod-to-json-schema": "^3.24.1" + }, + "engines": { + "node": ">=18" + } + }, + "node_modules/@types/js-yaml": { + "version": "4.0.9", + "resolved": "https://registry.npmjs.org/@types/js-yaml/-/js-yaml-4.0.9.tgz", + "integrity": "sha512-k4MGaQl5TGo/iipqb2UDG2UwjXziSWkh0uysQelTlJpX1qGlpUZYm8PnO4DxG1qBomtJUdYJ6qR6xdIah10JLg==", + "dev": true, + "license": "MIT" + }, + "node_modules/@types/node": { + "version": "18.19.130", + "resolved": "https://registry.npmjs.org/@types/node/-/node-18.19.130.tgz", + "integrity": "sha512-GRaXQx6jGfL8sKfaIDD6OupbIHBr9jv7Jnaml9tB7l4v068PAOXqfcujMMo5PhbIs6ggR1XODELqahT2R8v0fg==", + "dev": true, + "license": "MIT", + "dependencies": { + "undici-types": "~5.26.4" + } + }, + "node_modules/@types/pg": { + "version": "8.15.6", + "resolved": "https://registry.npmjs.org/@types/pg/-/pg-8.15.6.tgz", + "integrity": "sha512-NoaMtzhxOrubeL/7UZuNTrejB4MPAJ0RpxZqXQf2qXuVlTPuG6Y8p4u9dKRaue4yjmC7ZhzVO2/Yyyn25znrPQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@types/node": "*", + "pg-protocol": "*", + "pg-types": "^2.2.0" + } + }, + "node_modules/accepts": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/accepts/-/accepts-2.0.0.tgz", + "integrity": "sha512-5cvg6CtKwfgdmVqY1WIiXKc3Q1bkRqGLi+2W/6ao+6Y7gu/RCwRuAhGEzh5B4KlszSuTLgZYuqFqo5bImjNKng==", + "dependencies": { + "mime-types": "^3.0.0", + "negotiator": "^1.0.0" + }, + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/ajv": { + "version": "6.12.6", + "resolved": "https://registry.npmjs.org/ajv/-/ajv-6.12.6.tgz", + "integrity": "sha512-j3fVLgvTo527anyYyJOGTYJbG+vnnQYvE0m5mmkc1TK+nxAppkCLMIL0aZ4dblVCNoGShhm+kzE4ZUykBoMg4g==", + "dependencies": { + "fast-deep-equal": "^3.1.1", + "fast-json-stable-stringify": "^2.0.0", + "json-schema-traverse": "^0.4.1", + "uri-js": "^4.2.2" + }, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/epoberezkin" + } + }, + "node_modules/argparse": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/argparse/-/argparse-2.0.1.tgz", + "integrity": "sha512-8+9WqebbFzpX9OR+Wa6O29asIogeRMzcGtAINdpMHHyAg10f05aSFVBbcEqGf/PXw1EjAZ+q2/bEBg3DvurK3Q==", + "license": "Python-2.0" + }, + "node_modules/body-parser": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/body-parser/-/body-parser-2.2.0.tgz", + "integrity": "sha512-02qvAaxv8tp7fBa/mw1ga98OGm+eCbqzJOKoRt70sLmfEEi+jyBYVTDGfCL/k06/4EMk/z01gCe7HoCH/f2LTg==", + "dependencies": { + "bytes": "^3.1.2", + "content-type": "^1.0.5", + "debug": "^4.4.0", + "http-errors": "^2.0.0", + "iconv-lite": "^0.6.3", + "on-finished": "^2.4.1", + "qs": "^6.14.0", + "raw-body": "^3.0.0", + "type-is": "^2.0.0" + }, + "engines": { + "node": ">=18" + } + }, + "node_modules/bytes": { + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/bytes/-/bytes-3.1.2.tgz", + "integrity": "sha512-/Nf7TyzTx6S3yRJObOAV7956r8cr2+Oj8AC5dt8wSP3BQAoeX58NoHyCU8P8zGkNXStjTSi6fzO6F0pBdcYbEg==", + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/call-bind-apply-helpers": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/call-bind-apply-helpers/-/call-bind-apply-helpers-1.0.2.tgz", + "integrity": "sha512-Sp1ablJ0ivDkSzjcaJdxEunN5/XvksFJ2sMBFfq6x0ryhQV/2b/KwFe21cMpmHtPOSij8K99/wSfoEuTObmuMQ==", + "dependencies": { + "es-errors": "^1.3.0", + "function-bind": "^1.1.2" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/call-bound": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/call-bound/-/call-bound-1.0.4.tgz", + "integrity": "sha512-+ys997U96po4Kx/ABpBCqhA9EuxJaQWDQg7295H4hBphv3IZg0boBKuwYpt4YXp6MZ5AmZQnU/tyMTlRpaSejg==", + "dependencies": { + "call-bind-apply-helpers": "^1.0.2", + "get-intrinsic": "^1.3.0" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/commander": { + "version": "12.1.0", + "resolved": "https://registry.npmjs.org/commander/-/commander-12.1.0.tgz", + "integrity": "sha512-Vw8qHK3bZM9y/P10u3Vib8o/DdkvA2OtPtZvD871QKjy74Wj1WSKFILMPRPSdUSx5RFK1arlJzEtA4PkFgnbuA==", + "license": "MIT", + "engines": { + "node": ">=18" + } + }, + "node_modules/content-disposition": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/content-disposition/-/content-disposition-1.0.0.tgz", + "integrity": "sha512-Au9nRL8VNUut/XSzbQA38+M78dzP4D+eqg3gfJHMIHHYa3bg067xj1KxMUWj+VULbiZMowKngFFbKczUrNJ1mg==", + "dependencies": { + "safe-buffer": "5.2.1" + }, + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/content-type": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/content-type/-/content-type-1.0.5.tgz", + "integrity": "sha512-nTjqfcBFEipKdXCv4YDQWCfmcLZKm81ldF0pAopTvyrFGVbcR6P/VAAd5G7N+0tTr8QqiU0tFadD6FK4NtJwOA==", + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/cookie": { + "version": "0.7.2", + "resolved": "https://registry.npmjs.org/cookie/-/cookie-0.7.2.tgz", + "integrity": "sha512-yki5XnKuf750l50uGTllt6kKILY4nQ1eNIQatoXEByZ5dWgnKqbnqmTrBE5B4N7lrMJKQ2ytWMiTO2o0v6Ew/w==", + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/cookie-signature": { + "version": "1.2.2", + "resolved": "https://registry.npmjs.org/cookie-signature/-/cookie-signature-1.2.2.tgz", + "integrity": "sha512-D76uU73ulSXrD1UXF4KE2TMxVVwhsnCgfAyTg9k8P6KGZjlXKrOLe4dJQKI3Bxi5wjesZoFXJWElNWBjPZMbhg==", + "engines": { + "node": ">=6.6.0" + } + }, + "node_modules/cors": { + "version": "2.8.5", + "resolved": "https://registry.npmjs.org/cors/-/cors-2.8.5.tgz", + "integrity": "sha512-KIHbLJqu73RGr/hnbrO9uBeixNGuvSQjul/jdFvS/KFSIH1hWVd1ng7zOHx+YrEfInLG7q4n6GHQ9cDtxv/P6g==", + "dependencies": { + "object-assign": "^4", + "vary": "^1" + }, + "engines": { + "node": ">= 0.10" + } + }, + "node_modules/cross-spawn": { + "version": "7.0.6", + "resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-7.0.6.tgz", + "integrity": "sha512-uV2QOWP2nWzsy2aMp8aRibhi9dlzF5Hgh5SHaB9OiTGEyDTiJJyx0uy51QXdyWbtAHNua4XJzUKca3OzKUd3vA==", + "dependencies": { + "path-key": "^3.1.0", + "shebang-command": "^2.0.0", + "which": "^2.0.1" + }, + "engines": { + "node": ">= 8" + } + }, + "node_modules/debug": { + "version": "4.4.3", + "resolved": "https://registry.npmjs.org/debug/-/debug-4.4.3.tgz", + "integrity": "sha512-RGwwWnwQvkVfavKVt22FGLw+xYSdzARwm0ru6DhTVA3umU5hZc28V3kO4stgYryrTlLpuvgI9GiijltAjNbcqA==", + "dependencies": { + "ms": "^2.1.3" + }, + "engines": { + "node": ">=6.0" + }, + "peerDependenciesMeta": { + "supports-color": { + "optional": true + } + } + }, + "node_modules/depd": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/depd/-/depd-2.0.0.tgz", + "integrity": "sha512-g7nH6P6dyDioJogAAGprGpCtVImJhpPk/roCzdb3fIh61/s/nPsfR6onyMwkCAR/OlC3yBC0lESvUoQEAssIrw==", + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/dunder-proto": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/dunder-proto/-/dunder-proto-1.0.1.tgz", + "integrity": "sha512-KIN/nDJBQRcXw0MLVhZE9iQHmG68qAVIBg9CqmUYjmQIhgij9U5MFvrqkUL5FbtyyzZuOeOt0zdeRe4UY7ct+A==", + "dependencies": { + "call-bind-apply-helpers": "^1.0.1", + "es-errors": "^1.3.0", + "gopd": "^1.2.0" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/ee-first": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/ee-first/-/ee-first-1.1.1.tgz", + "integrity": "sha512-WMwm9LhRUo+WUaRN+vRuETqG89IgZphVSNkdFgeb6sS/E4OrDIN7t48CAewSHXc6C8lefD8KKfr5vY61brQlow==" + }, + "node_modules/encodeurl": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/encodeurl/-/encodeurl-2.0.0.tgz", + "integrity": "sha512-Q0n9HRi4m6JuGIV1eFlmvJB7ZEVxu93IrMyiMsGC0lrMJMWzRgx6WGquyfQgZVb31vhGgXnfmPNNXmxnOkRBrg==", + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/es-define-property": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/es-define-property/-/es-define-property-1.0.1.tgz", + "integrity": "sha512-e3nRfgfUZ4rNGL232gUgX06QNyyez04KdjFrF+LTRoOXmrOgFKDg4BCdsjW8EnT69eqdYGmRpJwiPVYNrCaW3g==", + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/es-errors": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/es-errors/-/es-errors-1.3.0.tgz", + "integrity": "sha512-Zf5H2Kxt2xjTvbJvP2ZWLEICxA6j+hAmMzIlypy4xcBg1vKVnx89Wy0GbS+kf5cwCVFFzdCFh2XSCFNULS6csw==", + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/es-object-atoms": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/es-object-atoms/-/es-object-atoms-1.1.1.tgz", + "integrity": "sha512-FGgH2h8zKNim9ljj7dankFPcICIK9Cp5bm+c2gQSYePhpaG5+esrLODihIorn+Pe6FGJzWhXQotPv73jTaldXA==", + "dependencies": { + "es-errors": "^1.3.0" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/escape-html": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/escape-html/-/escape-html-1.0.3.tgz", + "integrity": "sha512-NiSupZ4OeuGwr68lGIeym/ksIZMJodUGOSCZ/FSnTxcrekbvqrgdUxlJOMpijaKZVjAJrWrGs/6Jy8OMuyj9ow==" + }, + "node_modules/etag": { + "version": "1.8.1", + "resolved": "https://registry.npmjs.org/etag/-/etag-1.8.1.tgz", + "integrity": "sha512-aIL5Fx7mawVa300al2BnEE4iNvo1qETxLrPI/o05L7z6go7fCw1J6EQmbK4FmJ2AS7kgVF/KEZWufBfdClMcPg==", + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/eventsource": { + "version": "3.0.7", + "resolved": "https://registry.npmjs.org/eventsource/-/eventsource-3.0.7.tgz", + "integrity": "sha512-CRT1WTyuQoD771GW56XEZFQ/ZoSfWid1alKGDYMmkt2yl8UXrVR4pspqWNEcqKvVIzg6PAltWjxcSSPrboA4iA==", + "dependencies": { + "eventsource-parser": "^3.0.1" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/eventsource-parser": { + "version": "3.0.6", + "resolved": "https://registry.npmjs.org/eventsource-parser/-/eventsource-parser-3.0.6.tgz", + "integrity": "sha512-Vo1ab+QXPzZ4tCa8SwIHJFaSzy4R6SHf7BY79rFBDf0idraZWAkYrDjDj8uWaSm3S2TK+hJ7/t1CEmZ7jXw+pg==", + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/express": { + "version": "5.1.0", + "resolved": "https://registry.npmjs.org/express/-/express-5.1.0.tgz", + "integrity": "sha512-DT9ck5YIRU+8GYzzU5kT3eHGA5iL+1Zd0EutOmTE9Dtk+Tvuzd23VBU+ec7HPNSTxXYO55gPV/hq4pSBJDjFpA==", + "dependencies": { + "accepts": "^2.0.0", + "body-parser": "^2.2.0", + "content-disposition": "^1.0.0", + "content-type": "^1.0.5", + "cookie": "^0.7.1", + "cookie-signature": "^1.2.1", + "debug": "^4.4.0", + "encodeurl": "^2.0.0", + "escape-html": "^1.0.3", + "etag": "^1.8.1", + "finalhandler": "^2.1.0", + "fresh": "^2.0.0", + "http-errors": "^2.0.0", + "merge-descriptors": "^2.0.0", + "mime-types": "^3.0.0", + "on-finished": "^2.4.1", + "once": "^1.4.0", + "parseurl": "^1.3.3", + "proxy-addr": "^2.0.7", + "qs": "^6.14.0", + "range-parser": "^1.2.1", + "router": "^2.2.0", + "send": "^1.1.0", + "serve-static": "^2.2.0", + "statuses": "^2.0.1", + "type-is": "^2.0.1", + "vary": "^1.1.2" + }, + "engines": { + "node": ">= 18" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/express" + } + }, + "node_modules/express-rate-limit": { + "version": "7.5.1", + "resolved": "https://registry.npmjs.org/express-rate-limit/-/express-rate-limit-7.5.1.tgz", + "integrity": "sha512-7iN8iPMDzOMHPUYllBEsQdWVB6fPDMPqwjBaFrgr4Jgr/+okjvzAy+UHlYYL/Vs0OsOrMkwS6PJDkFlJwoxUnw==", + "engines": { + "node": ">= 16" + }, + "funding": { + "url": "https://github.com/sponsors/express-rate-limit" + }, + "peerDependencies": { + "express": ">= 4.11" + } + }, + "node_modules/fast-deep-equal": { + "version": "3.1.3", + "resolved": "https://registry.npmjs.org/fast-deep-equal/-/fast-deep-equal-3.1.3.tgz", + "integrity": "sha512-f3qQ9oQy9j2AhBe/H9VC91wLmKBCCU/gDOnKNAYG5hswO7BLKj09Hc5HYNz9cGI++xlpDCIgDaitVs03ATR84Q==" + }, + "node_modules/fast-json-stable-stringify": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/fast-json-stable-stringify/-/fast-json-stable-stringify-2.1.0.tgz", + "integrity": "sha512-lhd/wF+Lk98HZoTCtlVraHtfh5XYijIjalXck7saUtuanSDyLMxnHhSXEDJqHxD7msR8D0uCmqlkwjCV8xvwHw==" + }, + "node_modules/finalhandler": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/finalhandler/-/finalhandler-2.1.0.tgz", + "integrity": "sha512-/t88Ty3d5JWQbWYgaOGCCYfXRwV1+be02WqYYlL6h0lEiUAMPM8o8qKGO01YIkOHzka2up08wvgYD0mDiI+q3Q==", + "dependencies": { + "debug": "^4.4.0", + "encodeurl": "^2.0.0", + "escape-html": "^1.0.3", + "on-finished": "^2.4.1", + "parseurl": "^1.3.3", + "statuses": "^2.0.1" + }, + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/forwarded": { + "version": "0.2.0", + "resolved": "https://registry.npmjs.org/forwarded/-/forwarded-0.2.0.tgz", + "integrity": "sha512-buRG0fpBtRHSTCOASe6hD258tEubFoRLb4ZNA6NxMVHNw2gOcwHo9wyablzMzOA5z9xA9L1KNjk/Nt6MT9aYow==", + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/fresh": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/fresh/-/fresh-2.0.0.tgz", + "integrity": "sha512-Rx/WycZ60HOaqLKAi6cHRKKI7zxWbJ31MhntmtwMoaTeF7XFH9hhBp8vITaMidfljRQ6eYWCKkaTK+ykVJHP2A==", + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/function-bind": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/function-bind/-/function-bind-1.1.2.tgz", + "integrity": "sha512-7XHNxH7qX9xG5mIwxkhumTox/MIRNcOgDrxWsMt2pAr23WHp6MrRlN7FBSFpCpr+oVO0F744iUgR82nJMfG2SA==", + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/get-intrinsic": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/get-intrinsic/-/get-intrinsic-1.3.0.tgz", + "integrity": "sha512-9fSjSaos/fRIVIp+xSJlE6lfwhES7LNtKaCBIamHsjr2na1BiABJPo0mOjjz8GJDURarmCPGqaiVg5mfjb98CQ==", + "dependencies": { + "call-bind-apply-helpers": "^1.0.2", + "es-define-property": "^1.0.1", + "es-errors": "^1.3.0", + "es-object-atoms": "^1.1.1", + "function-bind": "^1.1.2", + "get-proto": "^1.0.1", + "gopd": "^1.2.0", + "has-symbols": "^1.1.0", + "hasown": "^2.0.2", + "math-intrinsics": "^1.1.0" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/get-proto": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/get-proto/-/get-proto-1.0.1.tgz", + "integrity": "sha512-sTSfBjoXBp89JvIKIefqw7U2CCebsc74kiY6awiGogKtoSGbgjYE/G/+l9sF3MWFPNc9IcoOC4ODfKHfxFmp0g==", + "dependencies": { + "dunder-proto": "^1.0.1", + "es-object-atoms": "^1.0.0" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/gopd": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/gopd/-/gopd-1.2.0.tgz", + "integrity": "sha512-ZUKRh6/kUFoAiTAtTYPZJ3hw9wNxx+BIBOijnlG9PnrJsCcSjs1wyyD6vJpaYtgnzDrKYRSqf3OO6Rfa93xsRg==", + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/has-symbols": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/has-symbols/-/has-symbols-1.1.0.tgz", + "integrity": "sha512-1cDNdwJ2Jaohmb3sg4OmKaMBwuC48sYni5HUw2DvsC8LjGTLK9h+eb1X6RyuOHe4hT0ULCW68iomhjUoKUqlPQ==", + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/hasown": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/hasown/-/hasown-2.0.2.tgz", + "integrity": "sha512-0hJU9SCPvmMzIBdZFqNPXWa6dqh7WdH0cII9y+CyS8rG3nL48Bclra9HmKhVVUHyPWNH5Y7xDwAB7bfgSjkUMQ==", + "dependencies": { + "function-bind": "^1.1.2" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/http-errors": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/http-errors/-/http-errors-2.0.0.tgz", + "integrity": "sha512-FtwrG/euBzaEjYeRqOgly7G0qviiXoJWnvEH2Z1plBdXgbyjv34pHTSb9zoeHMyDy33+DWy5Wt9Wo+TURtOYSQ==", + "dependencies": { + "depd": "2.0.0", + "inherits": "2.0.4", + "setprototypeof": "1.2.0", + "statuses": "2.0.1", + "toidentifier": "1.0.1" + }, + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/http-errors/node_modules/statuses": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/statuses/-/statuses-2.0.1.tgz", + "integrity": "sha512-RwNA9Z/7PrK06rYLIzFMlaF+l73iwpzsqRIFgbMLbTcLD6cOao82TaWefPXQvB2fOC4AjuYSEndS7N/mTCbkdQ==", + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/iconv-lite": { + "version": "0.6.3", + "resolved": "https://registry.npmjs.org/iconv-lite/-/iconv-lite-0.6.3.tgz", + "integrity": "sha512-4fCk79wshMdzMp2rH06qWrJE4iolqLhCUH+OiuIgU++RB0+94NlDL81atO7GX55uUKueo0txHNtvEyI6D7WdMw==", + "dependencies": { + "safer-buffer": ">= 2.1.2 < 3.0.0" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/inherits": { + "version": "2.0.4", + "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.4.tgz", + "integrity": "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==" + }, + "node_modules/ipaddr.js": { + "version": "1.9.1", + "resolved": "https://registry.npmjs.org/ipaddr.js/-/ipaddr.js-1.9.1.tgz", + "integrity": "sha512-0KI/607xoxSToH7GjN1FfSbLoU0+btTicjsQSWQlh/hZykN8KpmMf7uYwPW3R+akZ6R/w18ZlXSHBYXiYUPO3g==", + "engines": { + "node": ">= 0.10" + } + }, + "node_modules/is-promise": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/is-promise/-/is-promise-4.0.0.tgz", + "integrity": "sha512-hvpoI6korhJMnej285dSg6nu1+e6uxs7zG3BYAm5byqDsgJNWwxzM6z6iZiAgQR4TJ30JmBTOwqZUw3WlyH3AQ==" + }, + "node_modules/isexe": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/isexe/-/isexe-2.0.0.tgz", + "integrity": "sha512-RHxMLp9lnKHGHRng9QFhRCMbYAcVpn69smSGcq3f36xjgVVWThj4qqLbTLlq7Ssj8B+fIQ1EuCEGI2lKsyQeIw==" + }, + "node_modules/js-yaml": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-4.1.0.tgz", + "integrity": "sha512-wpxZs9NoxZaJESJGIZTyDEaYpl0FKSA+FB9aJiyemKhMwkxQg63h4T1KJgUGHpTqPDNRcmmYLugrRjJlBtWvRA==", + "license": "MIT", + "dependencies": { + "argparse": "^2.0.1" + }, + "bin": { + "js-yaml": "bin/js-yaml.js" + } + }, + "node_modules/json-schema-traverse": { + "version": "0.4.1", + "resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-0.4.1.tgz", + "integrity": "sha512-xbbCH5dCYU5T8LcEhhuh7HJ88HXuW3qsI3Y0zOZFKfZEHcpWiHU/Jxzk629Brsab/mMiHQti9wMP+845RPe3Vg==" + }, + "node_modules/math-intrinsics": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/math-intrinsics/-/math-intrinsics-1.1.0.tgz", + "integrity": "sha512-/IXtbwEk5HTPyEwyKX6hGkYXxM9nbj64B+ilVJnC/R6B0pH5G4V3b0pVbL7DBj4tkhBAppbQUlf6F6Xl9LHu1g==", + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/media-typer": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/media-typer/-/media-typer-1.1.0.tgz", + "integrity": "sha512-aisnrDP4GNe06UcKFnV5bfMNPBUw4jsLGaWwWfnH3v02GnBuXX2MCVn5RbrWo0j3pczUilYblq7fQ7Nw2t5XKw==", + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/merge-descriptors": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/merge-descriptors/-/merge-descriptors-2.0.0.tgz", + "integrity": "sha512-Snk314V5ayFLhp3fkUREub6WtjBfPdCPY1Ln8/8munuLuiYhsABgBVWsozAG+MWMbVEvcdcpbi9R7ww22l9Q3g==", + "engines": { + "node": ">=18" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/mime-db": { + "version": "1.54.0", + "resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.54.0.tgz", + "integrity": "sha512-aU5EJuIN2WDemCcAp2vFBfp/m4EAhWJnUNSSw0ixs7/kXbd6Pg64EmwJkNdFhB8aWt1sH2CTXrLxo/iAGV3oPQ==", + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/mime-types": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/mime-types/-/mime-types-3.0.1.tgz", + "integrity": "sha512-xRc4oEhT6eaBpU1XF7AjpOFD+xQmXNB5OVKwp4tqCuBpHLS/ZbBDrc07mYTDqVMg6PfxUjjNp85O6Cd2Z/5HWA==", + "dependencies": { + "mime-db": "^1.54.0" + }, + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/ms": { + "version": "2.1.3", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.3.tgz", + "integrity": "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==" + }, + "node_modules/negotiator": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/negotiator/-/negotiator-1.0.0.tgz", + "integrity": "sha512-8Ofs/AUQh8MaEcrlq5xOX0CQ9ypTF5dl78mjlMNfOK08fzpgTHQRQPBxcPlEtIw0yRpws+Zo/3r+5WRby7u3Gg==", + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/object-assign": { + "version": "4.1.1", + "resolved": "https://registry.npmjs.org/object-assign/-/object-assign-4.1.1.tgz", + "integrity": "sha512-rJgTQnkUnH1sFw8yT6VSU3zD3sWmu6sZhIseY8VX+GRu3P6F7Fu+JNDoXfklElbLJSnc3FUQHVe4cU5hj+BcUg==", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/object-inspect": { + "version": "1.13.4", + "resolved": "https://registry.npmjs.org/object-inspect/-/object-inspect-1.13.4.tgz", + "integrity": "sha512-W67iLl4J2EXEGTbfeHCffrjDfitvLANg0UlX3wFUUSTx92KXRFegMHUVgSqE+wvhAbi4WqjGg9czysTV2Epbew==", + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/on-finished": { + "version": "2.4.1", + "resolved": "https://registry.npmjs.org/on-finished/-/on-finished-2.4.1.tgz", + "integrity": "sha512-oVlzkg3ENAhCk2zdv7IJwd/QUD4z2RxRwpkcGY8psCVcCYZNq4wYnVWALHM+brtuJjePWiYF/ClmuDr8Ch5+kg==", + "dependencies": { + "ee-first": "1.1.1" + }, + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/once": { + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/once/-/once-1.4.0.tgz", + "integrity": "sha512-lNaJgI+2Q5URQBkccEKHTQOPaXdUxnZZElQTZY0MFUAuaEqe1E+Nyvgdz/aIyNi6Z9MzO5dv1H8n58/GELp3+w==", + "dependencies": { + "wrappy": "1" + } + }, + "node_modules/parseurl": { + "version": "1.3.3", + "resolved": "https://registry.npmjs.org/parseurl/-/parseurl-1.3.3.tgz", + "integrity": "sha512-CiyeOxFT/JZyN5m0z9PfXw4SCBJ6Sygz1Dpl0wqjlhDEGGBP1GnsUVEL0p63hoG1fcj3fHynXi9NYO4nWOL+qQ==", + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/path-key": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/path-key/-/path-key-3.1.1.tgz", + "integrity": "sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q==", + "engines": { + "node": ">=8" + } + }, + "node_modules/path-to-regexp": { + "version": "8.3.0", + "resolved": "https://registry.npmjs.org/path-to-regexp/-/path-to-regexp-8.3.0.tgz", + "integrity": "sha512-7jdwVIRtsP8MYpdXSwOS0YdD0Du+qOoF/AEPIt88PcCFrZCzx41oxku1jD88hZBwbNUIEfpqvuhjFaMAqMTWnA==", + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/express" + } + }, + "node_modules/pg": { + "version": "8.16.3", + "resolved": "https://registry.npmjs.org/pg/-/pg-8.16.3.tgz", + "integrity": "sha512-enxc1h0jA/aq5oSDMvqyW3q89ra6XIIDZgCX9vkMrnz5DFTw/Ny3Li2lFQ+pt3L6MCgm/5o2o8HW9hiJji+xvw==", + "license": "MIT", + "dependencies": { + "pg-connection-string": "^2.9.1", + "pg-pool": "^3.10.1", + "pg-protocol": "^1.10.3", + "pg-types": "2.2.0", + "pgpass": "1.0.5" + }, + "engines": { + "node": ">= 16.0.0" + }, + "optionalDependencies": { + "pg-cloudflare": "^1.2.7" + }, + "peerDependencies": { + "pg-native": ">=3.0.1" + }, + "peerDependenciesMeta": { + "pg-native": { + "optional": true + } + } + }, + "node_modules/pg-cloudflare": { + "version": "1.2.7", + "resolved": "https://registry.npmjs.org/pg-cloudflare/-/pg-cloudflare-1.2.7.tgz", + "integrity": "sha512-YgCtzMH0ptvZJslLM1ffsY4EuGaU0cx4XSdXLRFae8bPP4dS5xL1tNB3k2o/N64cHJpwU7dxKli/nZ2lUa5fLg==", + "license": "MIT", + "optional": true + }, + "node_modules/pg-connection-string": { + "version": "2.9.1", + "resolved": "https://registry.npmjs.org/pg-connection-string/-/pg-connection-string-2.9.1.tgz", + "integrity": "sha512-nkc6NpDcvPVpZXxrreI/FOtX3XemeLl8E0qFr6F2Lrm/I8WOnaWNhIPK2Z7OHpw7gh5XJThi6j6ppgNoaT1w4w==", + "license": "MIT" + }, + "node_modules/pg-int8": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/pg-int8/-/pg-int8-1.0.1.tgz", + "integrity": "sha512-WCtabS6t3c8SkpDBUlb1kjOs7l66xsGdKpIPZsg4wR+B3+u9UAum2odSsF9tnvxg80h4ZxLWMy4pRjOsFIqQpw==", + "license": "ISC", + "engines": { + "node": ">=4.0.0" + } + }, + "node_modules/pg-pool": { + "version": "3.10.1", + "resolved": "https://registry.npmjs.org/pg-pool/-/pg-pool-3.10.1.tgz", + "integrity": "sha512-Tu8jMlcX+9d8+QVzKIvM/uJtp07PKr82IUOYEphaWcoBhIYkoHpLXN3qO59nAI11ripznDsEzEv8nUxBVWajGg==", + "license": "MIT", + "peerDependencies": { + "pg": ">=8.0" + } + }, + "node_modules/pg-protocol": { + "version": "1.10.3", + "resolved": "https://registry.npmjs.org/pg-protocol/-/pg-protocol-1.10.3.tgz", + "integrity": "sha512-6DIBgBQaTKDJyxnXaLiLR8wBpQQcGWuAESkRBX/t6OwA8YsqP+iVSiond2EDy6Y/dsGk8rh/jtax3js5NeV7JQ==", + "license": "MIT" + }, + "node_modules/pg-types": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/pg-types/-/pg-types-2.2.0.tgz", + "integrity": "sha512-qTAAlrEsl8s4OiEQY69wDvcMIdQN6wdz5ojQiOy6YRMuynxenON0O5oCpJI6lshc6scgAY8qvJ2On/p+CXY0GA==", + "license": "MIT", + "dependencies": { + "pg-int8": "1.0.1", + "postgres-array": "~2.0.0", + "postgres-bytea": "~1.0.0", + "postgres-date": "~1.0.4", + "postgres-interval": "^1.1.0" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/pgpass": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/pgpass/-/pgpass-1.0.5.tgz", + "integrity": "sha512-FdW9r/jQZhSeohs1Z3sI1yxFQNFvMcnmfuj4WBMUTxOrAyLMaTcE1aAMBiTlbMNaXvBCQuVi0R7hd8udDSP7ug==", + "license": "MIT", + "dependencies": { + "split2": "^4.1.0" + } + }, + "node_modules/pkce-challenge": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/pkce-challenge/-/pkce-challenge-5.0.0.tgz", + "integrity": "sha512-ueGLflrrnvwB3xuo/uGob5pd5FN7l0MsLf0Z87o/UQmRtwjvfylfc9MurIxRAWywCYTgrvpXBcqjV4OfCYGCIQ==", + "engines": { + "node": ">=16.20.0" + } + }, + "node_modules/postgres-array": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/postgres-array/-/postgres-array-2.0.0.tgz", + "integrity": "sha512-VpZrUqU5A69eQyW2c5CA1jtLecCsN2U/bD6VilrFDWq5+5UIEVO7nazS3TEcHf1zuPYO/sqGvUvW62g86RXZuA==", + "license": "MIT", + "engines": { + "node": ">=4" + } + }, + "node_modules/postgres-bytea": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/postgres-bytea/-/postgres-bytea-1.0.0.tgz", + "integrity": "sha512-xy3pmLuQqRBZBXDULy7KbaitYqLcmxigw14Q5sj8QBVLqEwXfeybIKVWiqAXTlcvdvb0+xkOtDbfQMOf4lST1w==", + "license": "MIT", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/postgres-date": { + "version": "1.0.7", + "resolved": "https://registry.npmjs.org/postgres-date/-/postgres-date-1.0.7.tgz", + "integrity": "sha512-suDmjLVQg78nMK2UZ454hAG+OAW+HQPZ6n++TNDUX+L0+uUlLywnoxJKDou51Zm+zTCjrCl0Nq6J9C5hP9vK/Q==", + "license": "MIT", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/postgres-interval": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/postgres-interval/-/postgres-interval-1.2.0.tgz", + "integrity": "sha512-9ZhXKM/rw350N1ovuWHbGxnGh/SNJ4cnxHiM0rxE4VN41wsg8P8zWn9hv/buK00RP4WvlOyr/RBDiptyxVbkZQ==", + "license": "MIT", + "dependencies": { + "xtend": "^4.0.0" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/proxy-addr": { + "version": "2.0.7", + "resolved": "https://registry.npmjs.org/proxy-addr/-/proxy-addr-2.0.7.tgz", + "integrity": "sha512-llQsMLSUDUPT44jdrU/O37qlnifitDP+ZwrmmZcoSKyLKvtZxpyV0n2/bD/N4tBAAZ/gJEdZU7KMraoK1+XYAg==", + "dependencies": { + "forwarded": "0.2.0", + "ipaddr.js": "1.9.1" + }, + "engines": { + "node": ">= 0.10" + } + }, + "node_modules/punycode": { + "version": "2.3.1", + "resolved": "https://registry.npmjs.org/punycode/-/punycode-2.3.1.tgz", + "integrity": "sha512-vYt7UD1U9Wg6138shLtLOvdAu+8DsC/ilFtEVHcH+wydcSpNE20AfSOduf6MkRFahL5FY7X1oU7nKVZFtfq8Fg==", + "engines": { + "node": ">=6" + } + }, + "node_modules/qs": { + "version": "6.14.0", + "resolved": "https://registry.npmjs.org/qs/-/qs-6.14.0.tgz", + "integrity": "sha512-YWWTjgABSKcvs/nWBi9PycY/JiPJqOD4JA6o9Sej2AtvSGarXxKC3OQSk4pAarbdQlKAh5D4FCQkJNkW+GAn3w==", + "dependencies": { + "side-channel": "^1.1.0" + }, + "engines": { + "node": ">=0.6" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/range-parser": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/range-parser/-/range-parser-1.2.1.tgz", + "integrity": "sha512-Hrgsx+orqoygnmhFbKaHE6c296J+HTAQXoxEF6gNupROmmGJRoyzfG3ccAveqCBrwr/2yxQ5BVd/GTl5agOwSg==", + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/raw-body": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/raw-body/-/raw-body-3.0.1.tgz", + "integrity": "sha512-9G8cA+tuMS75+6G/TzW8OtLzmBDMo8p1JRxN5AZ+LAp8uxGA8V8GZm4GQ4/N5QNQEnLmg6SS7wyuSmbKepiKqA==", + "dependencies": { + "bytes": "3.1.2", + "http-errors": "2.0.0", + "iconv-lite": "0.7.0", + "unpipe": "1.0.0" + }, + "engines": { + "node": ">= 0.10" + } + }, + "node_modules/raw-body/node_modules/iconv-lite": { + "version": "0.7.0", + "resolved": "https://registry.npmjs.org/iconv-lite/-/iconv-lite-0.7.0.tgz", + "integrity": "sha512-cf6L2Ds3h57VVmkZe+Pn+5APsT7FpqJtEhhieDCvrE2MK5Qk9MyffgQyuxQTm6BChfeZNtcOLHp9IcWRVcIcBQ==", + "dependencies": { + "safer-buffer": ">= 2.1.2 < 3.0.0" + }, + "engines": { + "node": ">=0.10.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/express" + } + }, + "node_modules/router": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/router/-/router-2.2.0.tgz", + "integrity": "sha512-nLTrUKm2UyiL7rlhapu/Zl45FwNgkZGaCpZbIHajDYgwlJCOzLSk+cIPAnsEqV955GjILJnKbdQC1nVPz+gAYQ==", + "dependencies": { + "debug": "^4.4.0", + "depd": "^2.0.0", + "is-promise": "^4.0.0", + "parseurl": "^1.3.3", + "path-to-regexp": "^8.0.0" + }, + "engines": { + "node": ">= 18" + } + }, + "node_modules/safe-buffer": { + "version": "5.2.1", + "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.2.1.tgz", + "integrity": "sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ==", + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ] + }, + "node_modules/safer-buffer": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/safer-buffer/-/safer-buffer-2.1.2.tgz", + "integrity": "sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg==" + }, + "node_modules/send": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/send/-/send-1.2.0.tgz", + "integrity": "sha512-uaW0WwXKpL9blXE2o0bRhoL2EGXIrZxQ2ZQ4mgcfoBxdFmQold+qWsD2jLrfZ0trjKL6vOw0j//eAwcALFjKSw==", + "dependencies": { + "debug": "^4.3.5", + "encodeurl": "^2.0.0", + "escape-html": "^1.0.3", + "etag": "^1.8.1", + "fresh": "^2.0.0", + "http-errors": "^2.0.0", + "mime-types": "^3.0.1", + "ms": "^2.1.3", + "on-finished": "^2.4.1", + "range-parser": "^1.2.1", + "statuses": "^2.0.1" + }, + "engines": { + "node": ">= 18" + } + }, + "node_modules/serve-static": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/serve-static/-/serve-static-2.2.0.tgz", + "integrity": "sha512-61g9pCh0Vnh7IutZjtLGGpTA355+OPn2TyDv/6ivP2h/AdAVX9azsoxmg2/M6nZeQZNYBEwIcsne1mJd9oQItQ==", + "dependencies": { + "encodeurl": "^2.0.0", + "escape-html": "^1.0.3", + "parseurl": "^1.3.3", + "send": "^1.2.0" + }, + "engines": { + "node": ">= 18" + } + }, + "node_modules/setprototypeof": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/setprototypeof/-/setprototypeof-1.2.0.tgz", + "integrity": "sha512-E5LDX7Wrp85Kil5bhZv46j8jOeboKq5JMmYM3gVGdGH8xFpPWXUMsNrlODCrkoxMEeNi/XZIwuRvY4XNwYMJpw==" + }, + "node_modules/shebang-command": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/shebang-command/-/shebang-command-2.0.0.tgz", + "integrity": "sha512-kHxr2zZpYtdmrN1qDjrrX/Z1rR1kG8Dx+gkpK1G4eXmvXswmcE1hTWBWYUzlraYw1/yZp6YuDY77YtvbN0dmDA==", + "dependencies": { + "shebang-regex": "^3.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/shebang-regex": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/shebang-regex/-/shebang-regex-3.0.0.tgz", + "integrity": "sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A==", + "engines": { + "node": ">=8" + } + }, + "node_modules/side-channel": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/side-channel/-/side-channel-1.1.0.tgz", + "integrity": "sha512-ZX99e6tRweoUXqR+VBrslhda51Nh5MTQwou5tnUDgbtyM0dBgmhEDtWGP/xbKn6hqfPRHujUNwz5fy/wbbhnpw==", + "dependencies": { + "es-errors": "^1.3.0", + "object-inspect": "^1.13.3", + "side-channel-list": "^1.0.0", + "side-channel-map": "^1.0.1", + "side-channel-weakmap": "^1.0.2" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/side-channel-list": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/side-channel-list/-/side-channel-list-1.0.0.tgz", + "integrity": "sha512-FCLHtRD/gnpCiCHEiJLOwdmFP+wzCmDEkc9y7NsYxeF4u7Btsn1ZuwgwJGxImImHicJArLP4R0yX4c2KCrMrTA==", + "dependencies": { + "es-errors": "^1.3.0", + "object-inspect": "^1.13.3" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/side-channel-map": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/side-channel-map/-/side-channel-map-1.0.1.tgz", + "integrity": "sha512-VCjCNfgMsby3tTdo02nbjtM/ewra6jPHmpThenkTYh8pG9ucZ/1P8So4u4FGBek/BjpOVsDCMoLA/iuBKIFXRA==", + "dependencies": { + "call-bound": "^1.0.2", + "es-errors": "^1.3.0", + "get-intrinsic": "^1.2.5", + "object-inspect": "^1.13.3" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/side-channel-weakmap": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/side-channel-weakmap/-/side-channel-weakmap-1.0.2.tgz", + "integrity": "sha512-WPS/HvHQTYnHisLo9McqBHOJk2FkHO/tlpvldyrnem4aeQp4hai3gythswg6p01oSoTl58rcpiFAjF2br2Ak2A==", + "dependencies": { + "call-bound": "^1.0.2", + "es-errors": "^1.3.0", + "get-intrinsic": "^1.2.5", + "object-inspect": "^1.13.3", + "side-channel-map": "^1.0.1" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/split2": { + "version": "4.2.0", + "resolved": "https://registry.npmjs.org/split2/-/split2-4.2.0.tgz", + "integrity": "sha512-UcjcJOWknrNkF6PLX83qcHM6KHgVKNkV62Y8a5uYDVv9ydGQVwAHMKqHdJje1VTWpljG0WYpCDhrCdAOYH4TWg==", + "license": "ISC", + "engines": { + "node": ">= 10.x" + } + }, + "node_modules/statuses": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/statuses/-/statuses-2.0.2.tgz", + "integrity": "sha512-DvEy55V3DB7uknRo+4iOGT5fP1slR8wQohVdknigZPMpMstaKJQWhwiYBACJE3Ul2pTnATihhBYnRhZQHGBiRw==", + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/toidentifier": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/toidentifier/-/toidentifier-1.0.1.tgz", + "integrity": "sha512-o5sSPKEkg/DIQNmH43V0/uerLrpzVedkUh8tGNvaeXpfpuwjKenlSox/2O/BTlZUtEe+JG7s5YhEz608PlAHRA==", + "engines": { + "node": ">=0.6" + } + }, + "node_modules/type-is": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/type-is/-/type-is-2.0.1.tgz", + "integrity": "sha512-OZs6gsjF4vMp32qrCbiVSkrFmXtG/AZhY3t0iAMrMBiAZyV9oALtXO8hsrHbMXF9x6L3grlFuwW2oAz7cav+Gw==", + "dependencies": { + "content-type": "^1.0.5", + "media-typer": "^1.1.0", + "mime-types": "^3.0.0" + }, + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/typescript": { + "version": "5.9.3", + "resolved": "https://registry.npmjs.org/typescript/-/typescript-5.9.3.tgz", + "integrity": "sha512-jl1vZzPDinLr9eUt3J/t7V6FgNEw9QjvBPdysz9KfQDD41fQrC2Y4vKQdiaUpFT4bXlb1RHhLpp8wtm6M5TgSw==", + "dev": true, + "license": "Apache-2.0", + "bin": { + "tsc": "bin/tsc", + "tsserver": "bin/tsserver" + }, + "engines": { + "node": ">=14.17" + } + }, + "node_modules/undici-types": { + "version": "5.26.5", + "resolved": "https://registry.npmjs.org/undici-types/-/undici-types-5.26.5.tgz", + "integrity": "sha512-JlCMO+ehdEIKqlFxk6IfVoAUVmgz7cU7zD/h9XZ0qzeosSHmUJVOzSQvvYSYWXkFXC+IfLKSIffhv0sVZup6pA==", + "dev": true, + "license": "MIT" + }, + "node_modules/unpipe": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/unpipe/-/unpipe-1.0.0.tgz", + "integrity": "sha512-pjy2bYhSsufwWlKwPc+l3cN7+wuJlK6uz0YdJEOlQDbl6jo/YlPi4mb8agUkVC8BF7V8NuzeyPNqRksA3hztKQ==", + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/uri-js": { + "version": "4.4.1", + "resolved": "https://registry.npmjs.org/uri-js/-/uri-js-4.4.1.tgz", + "integrity": "sha512-7rKUyy33Q1yc98pQ1DAmLtwX109F7TIfWlW1Ydo8Wl1ii1SeHieeh0HHfPeL2fMXK6z0s8ecKs9frCuLJvndBg==", + "dependencies": { + "punycode": "^2.1.0" + } + }, + "node_modules/vary": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/vary/-/vary-1.1.2.tgz", + "integrity": "sha512-BNGbWLfd0eUPabhkXUVm0j8uuvREyTh5ovRa/dyow/BqAbZJyC+5fU+IzQOzmAKzYqYRAISoRhdQr3eIZ/PXqg==", + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/which": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/which/-/which-2.0.2.tgz", + "integrity": "sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA==", + "dependencies": { + "isexe": "^2.0.0" + }, + "bin": { + "node-which": "bin/node-which" + }, + "engines": { + "node": ">= 8" + } + }, + "node_modules/wrappy": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz", + "integrity": "sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ==" + }, + "node_modules/xtend": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/xtend/-/xtend-4.0.2.tgz", + "integrity": "sha512-LKYU1iAXJXUgAXn9URjiu+MWhyUXHsvfp7mcuYm9dSUKK0/CjtrUwFAxD82/mCWbtLsGjFIad0wIsod4zrTAEQ==", + "license": "MIT", + "engines": { + "node": ">=0.4" + } + }, + "node_modules/zod": { + "version": "3.25.76", + "resolved": "https://registry.npmjs.org/zod/-/zod-3.25.76.tgz", + "integrity": "sha512-gzUt/qt81nXsFGKIFcC3YnfEAx5NkunCfnDlvuBSSFS02bcXu4Lmea0AFIUwbLWxWPx3d9p8S5QoaujKcNQxcQ==", + "funding": { + "url": "https://github.com/sponsors/colinhacks" + } + }, + "node_modules/zod-to-json-schema": { + "version": "3.24.6", + "resolved": "https://registry.npmjs.org/zod-to-json-schema/-/zod-to-json-schema-3.24.6.tgz", + "integrity": "sha512-h/z3PKvcTcTetyjl1fkj79MHNEjm+HpD6NXheWjzOekY7kV+lwDYnHw+ivHkijnCSMz1yJaWBD9vu/Fcmk+vEg==", + "peerDependencies": { + "zod": "^3.24.1" + } + } + } +} diff --git a/cli/package.json b/cli/package.json new file mode 100644 index 0000000..261555a --- /dev/null +++ b/cli/package.json @@ -0,0 +1,45 @@ +{ + "name": "postgresai", + "version": "0.12.0-beta.6", + "description": "postgres_ai CLI (Node.js)", + "license": "Apache-2.0", + "private": false, + "repository": { + "type": "git", + "url": "git+https://gitlab.com/postgres-ai/postgres_ai.git" + }, + "homepage": "https://gitlab.com/postgres-ai/postgres_ai", + "bugs": { + "url": "https://gitlab.com/postgres-ai/postgres_ai/-/issues" + }, + "bin": { + "postgres-ai": "./dist/bin/postgres-ai.js", + "postgresai": "./dist/bin/postgres-ai.js", + "pgai": "./dist/bin/postgres-ai.js" + }, + "type": "commonjs", + "engines": { + "node": ">=18" + }, + "scripts": { + "build": "tsc", + "prepare": "npm run build", + "start": "node ./dist/bin/postgres-ai.js --help", + "dev": "tsc --watch" + }, + "dependencies": { + "@modelcontextprotocol/sdk": "^1.20.2", + "commander": "^12.1.0", + "js-yaml": "^4.1.0", + "pg": "^8.16.3" + }, + "devDependencies": { + "@types/js-yaml": "^4.0.9", + "@types/node": "^18.19.0", + "@types/pg": "^8.15.6", + "typescript": "^5.3.3" + }, + "publishConfig": { + "access": "public" + } +} diff --git a/cli/tsconfig.json b/cli/tsconfig.json new file mode 100644 index 0000000..8f969b6 --- /dev/null +++ b/cli/tsconfig.json @@ -0,0 +1,28 @@ +{ + "compilerOptions": { + "target": "ES2020", + "module": "node16", + "lib": ["ES2020"], + "outDir": "./dist", + "rootDir": "./", + "strict": true, + "esModuleInterop": true, + "skipLibCheck": true, + "forceConsistentCasingInFileNames": true, + "resolveJsonModule": true, + "declaration": true, + "declarationMap": true, + "sourceMap": true, + "moduleResolution": "node16", + "types": ["node"] + }, + "include": [ + "bin/**/*", + "lib/**/*" + ], + "exclude": [ + "node_modules", + "dist" + ] +} + diff --git a/docs/brew-installation.md b/docs/brew-installation.md new file mode 100644 index 0000000..19ab612 --- /dev/null +++ b/docs/brew-installation.md @@ -0,0 +1,103 @@ +# Homebrew Installation for PostgresAI CLI + +This document describes how to set up and distribute the PostgresAI CLI via Homebrew. + +## For Users + +### Installation + +Once the Homebrew tap is set up, users can install with: + +```bash +# Add the PostgresAI tap +brew tap postgres-ai/tap https://gitlab.com/postgres-ai/homebrew-tap.git + +# Install postgresai +brew install postgresai + +# Verify installation +pgai --version +``` + +### Updating + +```bash +brew update +brew upgrade postgresai +``` + +### Uninstalling + +```bash +brew uninstall postgresai +brew untap postgres-ai/tap +``` + +## For Maintainers + +### Creating the Homebrew Tap Repository + +1. Create a new GitLab repository named `homebrew-tap` at: + `https://gitlab.com/postgres-ai/homebrew-tap` + +2. Add the formula file `Formula/postgresai.rb` to the repository + +3. Update the formula SHA256 after each npm publish: + ```bash + # Download the tarball + curl -L https://registry.npmjs.org/postgresai/-/postgresai-VERSION.tgz -o postgresai.tgz + + # Calculate SHA256 + shasum -a 256 postgresai.tgz + + # Update the sha256 field in the formula + ``` + +### Updating the Formula + +After publishing a new version to npm: + +1. Update the `url` with the new version number +2. Calculate and update the `sha256` hash +3. Test the formula locally: + ```bash + brew install --build-from-source Formula/postgresai.rb + brew test postgresai + ``` +4. Commit and push to the homebrew-tap repository + +### Testing Locally + +Before pushing to the tap: + +```bash +# Install from local formula +brew install --build-from-source Formula/postgresai.rb + +# Run tests +brew test postgresai + +# Audit the formula +brew audit --strict postgresai + +# Uninstall +brew uninstall postgresai +``` + +## Alternative: Homebrew Core + +To submit to the main Homebrew repository (more visibility but stricter requirements): + +1. Formula must meet Homebrew's acceptance criteria +2. Project should be notable/popular +3. Follow instructions at: https://docs.brew.sh/Adding-Software-to-Homebrew + +## Automation + +Consider setting up CI/CD to automatically: +1. Calculate SHA256 from the npm tarball +2. Update the formula +3. Commit to homebrew-tap repository + +This can be done in GitLab CI after successful npm publish. + diff --git a/instances.yml b/instances.yml index c905455..9c91ce7 100644 --- a/instances.yml +++ b/instances.yml @@ -2,7 +2,7 @@ conn_str: postgresql://monitor:monitor_pass@target-db:5432/target_database preset_metrics: full custom_metrics: - is_enabled: true + is_enabled: false group: default custom_tags: env: demo diff --git a/tests/e2e.cli.sh b/tests/e2e.cli.sh new file mode 100755 index 0000000..64d3587 --- /dev/null +++ b/tests/e2e.cli.sh @@ -0,0 +1,51 @@ +#!/bin/bash +# E2E tests for postgres_ai CLI (Node.js) +# Usage: ./tests/e2e.cli.sh + +set -e + +CLI_CMD="node ./cli/dist/bin/postgres-ai.js" +MON_CMD="$CLI_CMD mon" + +echo "=== Testing service commands ===" +$MON_CMD check || true +$MON_CMD config || true +$MON_CMD update-config +$MON_CMD start +sleep 10 +$MON_CMD status +$MON_CMD logs --tail 5 grafana || true +$MON_CMD health --wait 60 || true + +echo "" +echo "=== Testing instance commands ===" +$MON_CMD targets list +$MON_CMD targets add "postgresql://monitor:monitor_pass@target-db:5432/target_database" ci-test +$MON_CMD targets list | grep -q ci-test +sleep 5 +$MON_CMD targets test ci-test || true +$MON_CMD targets remove ci-test + +echo "" +echo "=== Testing API key commands ===" +$CLI_CMD add-key "test_api_key_12345" +$CLI_CMD show-key | grep -q "test_api" +$CLI_CMD remove-key + +echo "" +echo "=== Testing Grafana commands ===" +$MON_CMD show-grafana-credentials || true +$MON_CMD generate-grafana-password || true +$MON_CMD show-grafana-credentials || true + +echo "" +echo "=== Testing service management ===" +$MON_CMD restart grafana +sleep 3 +$MON_CMD status +$MON_CMD stop +$MON_CMD clean || true + +echo "" +echo "āœ“ All E2E tests passed" +