diff --git a/.env.example b/.env.example index 23e8941..893ed93 100644 --- a/.env.example +++ b/.env.example @@ -67,7 +67,8 @@ NEXT_PUBLIC_GATEWAY_HOST= NEXT_PUBLIC_GATEWAY_PORT=18789 NEXT_PUBLIC_GATEWAY_PROTOCOL= NEXT_PUBLIC_GATEWAY_URL= -# NEXT_PUBLIC_GATEWAY_TOKEN= # Optional, set if gateway requires auth token +# Do not expose gateway tokens via NEXT_PUBLIC_* variables. +# Keep gateway auth secrets server-side only (OPENCLAW_GATEWAY_TOKEN / GATEWAY_TOKEN). # Gateway client id used in websocket handshake (role=operator UI client). NEXT_PUBLIC_GATEWAY_CLIENT_ID=openclaw-control-ui diff --git a/.github/workflows/quality-gate.yml b/.github/workflows/quality-gate.yml index d4bdf97..8a6c400 100644 --- a/.github/workflows/quality-gate.yml +++ b/.github/workflows/quality-gate.yml @@ -24,7 +24,7 @@ jobs: - name: Setup Node uses: actions/setup-node@v4 with: - node-version: 20 + node-version-file: '.nvmrc' cache: 'pnpm' - name: Install dependencies diff --git a/.gitignore b/.gitignore index dd46eb2..1e0d2b5 100644 --- a/.gitignore +++ b/.gitignore @@ -35,6 +35,12 @@ aegis/ # Playwright test-results/ playwright-report/ +.tmp/ +.playwright-mcp/ + +# Local QA screenshots +/e2e-debug-*.png +/e2e-channels-*.png # Claude Code context files CLAUDE.md diff --git a/.node-version b/.node-version new file mode 100644 index 0000000..2bd5a0a --- /dev/null +++ b/.node-version @@ -0,0 +1 @@ +22 diff --git a/.nvmrc b/.nvmrc new file mode 100644 index 0000000..2bd5a0a --- /dev/null +++ b/.nvmrc @@ -0,0 +1 @@ +22 diff --git a/Dockerfile b/Dockerfile index 792ef4a..3a48b7e 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,4 +1,4 @@ -FROM node:20-slim AS base +FROM node:22.22.0-slim AS base RUN corepack enable && corepack prepare pnpm@latest --activate WORKDIR /app @@ -20,7 +20,14 @@ COPY --from=deps /app/node_modules ./node_modules COPY . . RUN pnpm build -FROM node:20-slim AS runtime +FROM node:22.22.0-slim AS runtime + +ARG MC_VERSION=dev +LABEL org.opencontainers.image.source="https://github.com/openclaw/mission-control" +LABEL org.opencontainers.image.description="Mission Control - operations dashboard" +LABEL org.opencontainers.image.licenses="MIT" +LABEL org.opencontainers.image.version="${MC_VERSION}" + WORKDIR /app ENV NODE_ENV=production RUN addgroup --system --gid 1001 nodejs && adduser --system --uid 1001 nextjs @@ -30,11 +37,11 @@ COPY --from=build /app/.next/static ./.next/static COPY --from=build /app/src/lib/schema.sql ./src/lib/schema.sql # Create data directory with correct ownership for SQLite RUN mkdir -p .data && chown nextjs:nodejs .data -RUN apt-get update && apt-get install -y curl --no-install-recommends && rm -rf /var/lib/apt/lists/* +RUN echo 'const http=require("http");const r=http.get("http://localhost:"+(process.env.PORT||3000)+"/api/status?action=health",s=>{process.exit(s.statusCode===200?0:1)});r.on("error",()=>process.exit(1));r.setTimeout(4000,()=>{r.destroy();process.exit(1)})' > /app/healthcheck.js USER nextjs ENV PORT=3000 EXPOSE 3000 ENV HOSTNAME=0.0.0.0 HEALTHCHECK --interval=30s --timeout=5s --start-period=10s --retries=3 \ - CMD curl -f http://localhost:${PORT:-3000}/login || exit 1 + CMD ["node", "/app/healthcheck.js"] CMD ["node", "server.js"] diff --git a/README.md b/README.md index abaa5c5..0c17b85 100644 --- a/README.md +++ b/README.md @@ -24,20 +24,47 @@ Manage agent fleets, track tasks, monitor costs, and orchestrate workflows — a Running AI agents at scale means juggling sessions, tasks, costs, and reliability across multiple models and channels. Mission Control gives you: -- **28 panels** — Tasks, agents, logs, tokens, memory, cron, alerts, webhooks, pipelines, and more +- **32 panels** — Tasks, agents, skills, logs, tokens, memory, security, cron, alerts, webhooks, pipelines, and more - **Real-time everything** — WebSocket + SSE push updates, smart polling that pauses when you're away - **Zero external dependencies** — SQLite database, single `pnpm start` to run, no Redis/Postgres/Docker required - **Role-based access** — Viewer, operator, and admin roles with session + API key auth -- **Quality gates** — Built-in review system that blocks task completion without sign-off +- **Quality gates** — Built-in Aegis review system that blocks task completion without sign-off +- **Recurring tasks** — Natural language scheduling ("every morning at 9am") with cron-based template spawning +- **Claude Code bridge** — Read-only integration surfaces Claude Code team tasks and configs on the dashboard +- **Skills Hub** — Browse, install, and security-scan agent skills from ClawdHub and skills.sh registries - **Multi-gateway** — Connect to multiple agent gateways simultaneously (OpenClaw, and more coming soon) ## Quick Start -> **Requires [pnpm](https://pnpm.io/installation)** — Mission Control uses pnpm for dependency management. Install it with `npm install -g pnpm` or `corepack enable`. +### One-Command Install (Docker) ```bash git clone https://github.com/builderz-labs/mission-control.git cd mission-control +bash install.sh --docker +``` + +The installer auto-generates secure credentials, starts the container, and runs an OpenClaw fleet health check. Open `http://localhost:3000` and log in with the printed credentials. + +### One-Command Install (Local) + +```bash +git clone https://github.com/builderz-labs/mission-control.git +cd mission-control +bash install.sh --local +``` + +Requires Node.js 22.x (LTS) and pnpm (auto-installed via corepack if missing). + +### Manual Setup + +> **Requires [pnpm](https://pnpm.io/installation)** and **Node.js 22.x (LTS)**. +> Mission Control is validated against Node 22 across local dev, CI, Docker, and standalone deploys. Use `nvm use 22` (or your version manager equivalent) before installing or starting the app. + +```bash +git clone https://github.com/builderz-labs/mission-control.git +cd mission-control +nvm use 22 pnpm install cp .env.example .env # edit with your values pnpm dev # http://localhost:3000 @@ -46,6 +73,25 @@ pnpm dev # http://localhost:3000 Initial login is seeded from `AUTH_USER` / `AUTH_PASS` on first run. If `AUTH_PASS` contains `#`, quote it (e.g. `AUTH_PASS="my#password"`) or use `AUTH_PASS_B64`. +### Docker Hardening (Production) + +For production deployments, use the hardened compose overlay: + +```bash +docker compose -f docker-compose.yml -f docker-compose.hardened.yml up -d +``` + +This adds read-only filesystem, capability dropping, log rotation, HSTS, and network isolation. See [Security Hardening](docs/SECURITY-HARDENING.md) for the full checklist. + +### Station Doctor + +Run diagnostics on your installation: + +```bash +bash scripts/station-doctor.sh +bash scripts/security-audit.sh +``` + ## Project Status ### What Works @@ -65,7 +111,22 @@ If `AUTH_PASS` contains `#`, quote it (e.g. `AUTH_PASS="my#password"`) or use `A - Ed25519 device identity for secure gateway handshake - Agent SOUL system with workspace file sync and templates - Agent inter-agent messaging and comms -- Update available banner with GitHub release check +- Skills Hub with ClawdHub and skills.sh registry integration (search, install, security scan) +- Bidirectional skill sync — disk ↔ DB with SHA-256 change detection +- Local agent discovery from `~/.agents/`, `~/.codex/agents/`, `~/.claude/agents/` +- Natural language recurring tasks — schedule parser converts "every 2 hours" to cron, spawns dated child tasks +- Claude Code task bridge — read-only scanner surfaces team tasks and configs from `~/.claude/tasks/` and `~/.claude/teams/` +- Skill security scanner (prompt injection, credential leaks, data exfiltration, obfuscated content) +- Update available banner with GitHub release check and one-click self-update +- Framework adapter layer for multi-agent registration (OpenClaw, CrewAI, LangGraph, AutoGen, Claude SDK, generic) +- Multi-project task organization with per-project ticket prefixes +- Per-agent rate limiting with `x-agent-name` identity-based quotas +- Agent self-registration endpoint for autonomous agent onboarding +- Security audit panel with posture scoring, secret detection, trust scoring, and MCP call auditing +- Four-layer agent eval framework (output, trace, component, drift detection) +- Agent optimization endpoint with token efficiency, tool patterns, and fleet benchmarks +- Hook profiles (minimal/standard/strict) for tunable security strictness +- Guided onboarding wizard with credential setup, agent discovery, and security scan ### Known Limitations @@ -81,10 +142,10 @@ If `AUTH_PASS` contains `#`, quote it (e.g. `AUTH_PASS="my#password"`) or use `A ## Features ### Agent Management -Monitor agent status, spawn new sessions, view heartbeats, and manage the full agent lifecycle from registration to retirement. +Monitor agent status, configure models, view heartbeats, and manage the full agent lifecycle from registration to retirement. Agent detail modal with compact overview, inline model selector, and editable sub-agent configuration. ### Task Board -Kanban board with six columns (inbox → backlog → todo → in-progress → review → done), drag-and-drop, priority levels, assignments, and threaded comments. +Kanban board with six columns (inbox → assigned → in progress → review → quality review → done), drag-and-drop, priority levels, assignments, threaded comments, and inline sub-agent spawning. ### Real-time Monitoring Live activity feed, session inspector, and log viewer with filtering. WebSocket connection to OpenClaw gateway for instant event delivery. @@ -93,7 +154,10 @@ Live activity feed, session inspector, and log viewer with filtering. WebSocket Token usage dashboard with per-model breakdowns, trend charts, and cost analysis powered by Recharts. ### Background Automation -Scheduled tasks for database backups, stale record cleanup, and agent heartbeat monitoring. Configurable via UI or API. +Scheduled tasks for database backups, stale record cleanup, agent heartbeat monitoring, and recurring task spawning. Configurable via UI or API. + +### Natural Language Recurring Tasks +Create recurring tasks with natural language like "every morning at 9am" or "every 2 hours". The built-in schedule parser (zero dependencies) converts expressions to cron and stores them in task metadata. A template-clone pattern keeps the original task as a template and spawns dated child tasks (e.g., "Daily Report - Mar 07") on schedule. Each spawned task gets its own Aegis quality gate. ### Direct CLI Integration Connect Claude Code, Codex, or any CLI tool directly to Mission Control without requiring a gateway. Register connections, send heartbeats with inline token reporting, and auto-register agents. @@ -101,28 +165,52 @@ Connect Claude Code, Codex, or any CLI tool directly to Mission Control without ### Claude Code Session Tracking Automatically discovers and tracks local Claude Code sessions by scanning `~/.claude/projects/`. Extracts token usage, model info, message counts, cost estimates, and active status from JSONL transcripts. Scans every 60 seconds via the background scheduler. +### Claude Code Task Bridge +Read-only integration that surfaces Claude Code team tasks and team configs on the Mission Control dashboard. Scans `~/.claude/tasks//.json` for structured task data (subject, status, owner, blockers) and `~/.claude/teams//config.json` for team metadata (members, lead agent, model assignments). Visible in both the Task Board (collapsible section) and Cron Management (teams overview) panels. + ### GitHub Issues Sync Inbound sync from GitHub repositories with label and assignee mapping. Synced issues appear on the task board alongside agent-created tasks. +### Skills Hub +Browse, install, and manage agent skills from local directories and external registries (ClawdHub, skills.sh). Bidirectional sync detects manual additions on disk and pushes UI edits back to `SKILL.md` files. Built-in security scanner checks for prompt injection, credential leaks, data exfiltration, obfuscated content, and dangerous shell commands before installation. Supports 5 skill roots: `~/.agents/skills`, `~/.codex/skills`, project-local `.agents/skills` and `.codex/skills`, and `~/.openclaw/skills` for gateway mode. + +### Local Agent Discovery +Automatically discovers agent definitions from `~/.agents/`, `~/.codex/agents/`, and `~/.claude/agents/` directories. Detection looks for marker files (AGENT.md, soul.md, identity.md, config.json). Discovered agents sync bidirectionally — edit in the UI and changes write back to disk. + ### Agent SOUL System Define agent personality, capabilities, and behavioral guidelines via SOUL markdown files. Edit in the UI or directly in workspace `soul.md` files — changes sync bidirectionally between disk and database. ### Agent Messaging -Inter-agent communication via the comms API. Agents can send messages to each other, enabling coordinated multi-agent workflows. +Session-threaded inter-agent communication via the comms API (`a2a:*`, `coord:*`, `session:*`) with coordinator inbox support and runtime tool-call visibility in the `agent-comms` feed. + +### Onboarding Wizard +Guided first-run setup wizard that walks new users through five steps: Welcome (system capabilities detection), Credentials (verify AUTH_PASS and API_KEY strength), Agent Setup (gateway connection or local Claude Code discovery), Security Scan (automated configuration audit with pass/fail checks), and Get Started (quick links to key panels). Automatically appears on first login and can be re-launched from Settings. Progress is persisted per-user so you can resume where you left off. + +### Security Audit & Agent Trust +Dedicated security audit panel with real-time posture scoring (0-100), secret detection across agent messages, MCP tool call auditing, injection attempt tracking, and per-agent trust scores. Hook profiles (minimal/standard/strict) let operators tune security strictness per deployment. Auth failures, rate limit hits, and injection attempts are logged automatically as security events. + +### Agent Eval Framework +Four-layer evaluation stack for agent quality: output evals (task completion scoring against golden datasets), trace evals (convergence scoring — >3.0 indicates looping), component evals (tool reliability with p50/p95/p99 latency from MCP call logs), and drift detection (10% threshold vs 4-week rolling baseline). Manage golden datasets and trigger eval runs via API or UI. + +### Agent Optimization +API endpoint agents can call for self-improvement recommendations. Analyzes token efficiency (tokens/task vs fleet average), tool usage patterns (success/failure rates, redundant calls), and generates prioritized recommendations. Fleet benchmarks provide percentile rankings across all agents. ### Integrations Outbound webhooks with delivery history, configurable alert rules with cooldowns, and multi-gateway connection management. Optional 1Password CLI integration for secret management. ### Workspace Management -Workspaces (tenant instances) are created and managed through the **Super Admin** panel, accessible from the sidebar under **Admin > Super Admin**. From there, admins can: +Workspaces (tenant instances) are managed via the `/api/super/*` API endpoints. Admins can: - **Create** new client instances (slug, display name, Linux user, gateway port, plan tier) - **Monitor** provisioning jobs and their step-by-step progress - **Decommission** tenants with optional cleanup of state directories and Linux users -Each workspace gets its own isolated environment with a dedicated OpenClaw gateway, state directory, and workspace root. See the [Super Admin API](#api-overview) endpoints under `/api/super/*` for programmatic access. +Each workspace gets its own isolated environment with a dedicated OpenClaw gateway, state directory, and workspace root. ### Update Checker -Automatic GitHub release check notifies you when a new version is available, displayed as a banner in the dashboard. +Automatic GitHub release check notifies you when a new version is available, displayed as a banner in the dashboard. Admins can trigger a one-click update directly from the banner — the server runs `git pull`, `pnpm install`, and `pnpm build`, then prompts for a restart. Dirty working trees are rejected, and all updates are logged to the audit trail. + +### Framework Adapters +Built-in adapter layer for multi-agent registration across frameworks. Supported adapters: OpenClaw, CrewAI, LangGraph, AutoGen, Claude SDK, and a generic fallback. Each adapter normalizes agent registration, heartbeats, and task reporting to a common interface. ## Architecture @@ -133,22 +221,35 @@ mission-control/ │ ├── app/ │ │ ├── page.tsx # SPA shell — routes all panels │ │ ├── login/page.tsx # Login page -│ │ └── api/ # 66 REST API routes +│ │ └── api/ # 101 REST API routes │ ├── components/ │ │ ├── layout/ # NavRail, HeaderBar, LiveFeed │ │ ├── dashboard/ # Overview dashboard -│ │ ├── panels/ # 28 feature panels +│ │ ├── panels/ # 32 feature panels │ │ └── chat/ # Agent chat UI │ ├── lib/ │ │ ├── auth.ts # Session + API key auth, RBAC │ │ ├── db.ts # SQLite (better-sqlite3, WAL mode) │ │ ├── claude-sessions.ts # Local Claude Code session scanner -│ │ ├── migrations.ts # 21 schema migrations +│ │ ├── claude-tasks.ts # Claude Code team task/config scanner +│ │ ├── schedule-parser.ts # Natural language → cron expression parser +│ │ ├── recurring-tasks.ts # Recurring task template spawner +│ │ ├── migrations.ts # 39 schema migrations │ │ ├── scheduler.ts # Background task scheduler │ │ ├── webhooks.ts # Outbound webhook delivery │ │ ├── websocket.ts # Gateway WebSocket client │ │ ├── device-identity.ts # Ed25519 device identity for gateway auth -│ │ └── agent-sync.ts # OpenClaw config → MC database sync +│ │ ├── agent-sync.ts # OpenClaw config → MC database sync +│ │ ├── skill-sync.ts # Bidirectional disk ↔ DB skill sync +│ │ ├── skill-registry.ts # ClawdHub + skills.sh registry client & security scanner +│ │ ├── local-agent-sync.ts # Local agent discovery from ~/.agents, ~/.codex, ~/.claude +│ │ ├── secret-scanner.ts # Regex-based secret detection (AWS, GitHub, Stripe, JWT, PEM, DB URIs) +│ │ ├── security-events.ts # Security event logger + agent trust scoring +│ │ ├── mcp-audit.ts # MCP tool call auditing +│ │ ├── agent-evals.ts # Four-layer agent eval framework +│ │ ├── agent-optimizer.ts # Agent optimization engine +│ │ ├── hook-profiles.ts # Security strictness profiles (minimal/standard/strict) +│ │ └── adapters/ # Framework adapters (openclaw, crewai, langgraph, autogen, claude-sdk, generic) │ └── store/index.ts # Zustand state management └── .data/ # Runtime data (SQLite DB, token logs) ``` @@ -166,7 +267,7 @@ mission-control/ | Real-time | WebSocket + Server-Sent Events | | Auth | scrypt hashing, session tokens, RBAC | | Validation | Zod 4 | -| Testing | Vitest + Playwright (148 E2E tests) | +| Testing | Vitest (282 unit) + Playwright (295 E2E) | ## Authentication @@ -211,7 +312,9 @@ All endpoints require authentication unless noted. Full reference below. | `POST` | `/api/agents` | operator | Register/update agent | | `GET` | `/api/agents/[id]` | viewer | Agent details | | `GET` | `/api/agents/[id]/attribution` | viewer | Self-scope attribution/audit/cost report (`?privileged=1` admin override) | -| `POST` | `/api/agents/sync` | operator | Sync agents from openclaw.json | +| `POST` | `/api/agents/sync` | operator | Sync agents from openclaw.json or local disk (`?source=local`) | +| `POST` | `/api/agents/register` | viewer | Agent self-registration (idempotent, rate-limited) | +| `GET/POST` | `/api/adapters` | viewer/operator | List adapters / Framework-agnostic agent action dispatch | | `GET/PUT` | `/api/agents/[id]/soul` | operator | Agent SOUL content (reads from workspace, writes to both) | | `GET/POST` | `/api/agents/comms` | operator | Agent inter-agent communication | | `POST` | `/api/agents/message` | operator | Send message to agent | @@ -235,6 +338,19 @@ All endpoints require authentication unless noted. Full reference below. - `hours`: integer window `1..720` (default `24`) - `section`: comma-separated subset of `identity,audit,mutations,cost` (default all) +
+Security & Evals + +| Method | Path | Role | Description | +|--------|------|------|-------------| +| `GET` | `/api/security-audit` | admin | Security posture, events, trust scores, MCP audit (`?timeframe=day`) | +| `GET` | `/api/security-scan` | admin | Static security configuration scan | +| `GET` | `/api/agents/optimize` | operator | Agent optimization recommendations (`?agent=&hours=24`) | +| `GET` | `/api/agents/evals` | operator | Agent eval results (`?agent=`, `?action=history&weeks=4`) | +| `POST` | `/api/agents/evals` | operator | Trigger eval run (`action: 'run'`) or manage golden datasets (`action: 'golden-set'`) | + +
+
Monitoring @@ -259,6 +375,7 @@ All endpoints require authentication unless noted. Full reference below. | `GET/PUT` | `/api/settings` | admin | App settings | | `GET/PUT` | `/api/gateway-config` | admin | OpenClaw gateway config | | `GET/POST` | `/api/cron` | admin | Cron management | +| `GET/POST` | `/api/onboarding` | viewer | Onboarding wizard state and step progression |
@@ -297,7 +414,7 @@ All endpoints require authentication unless noted. Full reference below.
-Super Admin (Workspace/Tenant Management) +Workspace/Tenant Management | Method | Path | Role | Description | |--------|------|------|-------------| @@ -310,6 +427,23 @@ All endpoints require authentication unless noted. Full reference below.
+
+Skills + +| Method | Path | Role | Description | +|--------|------|------|-------------| +| `GET` | `/api/skills` | viewer | List skills (DB-backed with filesystem fallback) | +| `GET` | `/api/skills?mode=content&source=…&name=…` | viewer | Read SKILL.md content with inline security report | +| `GET` | `/api/skills?mode=check&source=…&name=…` | viewer | On-demand security scan | +| `POST` | `/api/skills` | operator | Create skill | +| `PUT` | `/api/skills` | operator | Update skill content | +| `DELETE` | `/api/skills` | operator | Delete skill | +| `GET` | `/api/skills/registry?source=…&q=…` | viewer | Search external registry (ClawdHub, skills.sh) | +| `POST` | `/api/skills/registry` | admin | Install skill from registry | +| `PUT` | `/api/skills/registry` | viewer | Security-check content without installing | + +
+
Direct CLI @@ -351,6 +485,8 @@ All endpoints require authentication unless noted. Full reference below. |--------|------|------|-------------| | `GET` | `/api/claude/sessions` | viewer | List discovered sessions (filter: `?active=1`, `?project=`) | | `POST` | `/api/claude/sessions` | operator | Trigger manual session scan | +| `GET` | `/api/claude-tasks` | viewer | List Claude Code team tasks and configs (`?force=true` to bypass cache) | +| `GET` | `/api/schedule-parse` | viewer | Parse natural language schedule (`?input=every+2+hours`) |
@@ -402,15 +538,11 @@ See [`.env.example`](.env.example) for the complete list. Key variables: ### Workspace Creation Flow -To add a new workspace/client instance in the UI: +To add a new workspace/client instance, use the `/api/super/tenants` endpoint or the Workspaces panel (if enabled): -1. Open `Workspaces` from the left navigation. -2. Expand `Show Create Client Instance`. -3. Fill tenant/workspace fields (`slug`, `display_name`, optional ports/gateway owner). -4. Click `Create + Queue`. -5. Approve/run the generated provisioning job in the same panel. - -`Workspaces` and `Super Admin` currently point to the same provisioning control plane. +1. Provide tenant/workspace fields (`slug`, `display_name`, optional ports/gateway owner). +2. The system queues a bootstrap provisioning job. +3. Approve/run the provisioning job via `/api/super/provision-jobs/[id]/action`. ### Projects and Ticket Prefixes @@ -529,8 +661,16 @@ See [open issues](https://github.com/builderz-labs/mission-control/issues) for p **Up next:** +- [x] Workspace isolation for multi-team usage ([#75](https://github.com/builderz-labs/mission-control/issues/75)) +- [x] Framework adapter layer — multi-agent registration across OpenClaw, CrewAI, LangGraph, AutoGen, Claude SDK, and generic +- [x] Self-update mechanism — admin-only one-click update with audit logging +- [x] Multi-project task organization with per-project ticket prefixes +- [x] Skills Hub — browse, install, and security-scan skills from ClawdHub and skills.sh registries +- [x] Bidirectional skill sync — disk ↔ DB with SHA-256 change detection (60s scheduler) +- [x] Local agent discovery — auto-detect agents from `~/.agents/`, `~/.codex/agents/`, `~/.claude/agents/` +- [x] Natural language recurring tasks with cron-based template spawning +- [x] Claude Code task bridge — read-only team task and config integration - [ ] Agent-agnostic gateway support — connect any orchestration framework (OpenClaw, ZeroClaw, OpenFang, NeoBot, IronClaw, etc.), not just OpenClaw -- [ ] Workspace isolation for multi-team usage ([#75](https://github.com/builderz-labs/mission-control/issues/75)) - [ ] **[Flight Deck](https://github.com/splitlabs/flight-deck)** — native desktop companion app (Tauri v2) with real PTY terminal grid, stall inbox with native OS notifications, and system tray HUD. Currently in private beta. - [ ] First-class per-agent cost breakdowns — dedicated panel with per-agent token usage and spend (currently derivable from per-session data) - [ ] OAuth approval UI improvements @@ -544,6 +684,16 @@ Contributions are welcome. See [CONTRIBUTING.md](CONTRIBUTING.md) for setup inst To report a vulnerability, see [SECURITY.md](SECURITY.md). +## ❤️ Support the Project + +If you find this project useful, consider supporting my open-source work. + +[![Buy Me A Coffee](https://img.shields.io/badge/Buy%20Me%20a%20Coffee-support-orange?logo=buymeacoffee)](https://buymeacoffee.com/nyk_builderz) + +**Solana donations** + +`BYLu8XD8hGDUtdRBWpGWu5HKoiPrWqCxYFSh4oxXuvPg` + ## License [MIT](LICENSE) © 2026 [Builderz Labs](https://github.com/builderz-labs) diff --git a/SECURITY.md b/SECURITY.md index 75c8ce3..4f61543 100644 --- a/SECURITY.md +++ b/SECURITY.md @@ -27,5 +27,42 @@ Mission Control handles authentication credentials and API keys. When deploying: - Always set strong values for `AUTH_PASS` and `API_KEY`. - Use `MC_ALLOWED_HOSTS` to restrict network access in production. - Keep `.env` files out of version control (already in `.gitignore`). -- Enable `MC_COOKIE_SECURE=true` when serving over HTTPS. +- Enable `MC_COOKIE_SECURE=1` when serving over HTTPS. - Review the [Environment Variables](README.md#environment-variables) section for all security-relevant configuration. + +## Hardening Checklist + +Run `bash scripts/security-audit.sh` to check your deployment automatically. + +### Credentials +- [ ] `AUTH_PASS` is a strong, unique password (12+ characters) +- [ ] `API_KEY` is a random hex string (not the default) +- [ ] `AUTH_SECRET` is a random string +- [ ] `.env` file permissions are `600` (owner read/write only) + +### Network +- [ ] `MC_ALLOWED_HOSTS` is configured (not `MC_ALLOW_ANY_HOST=1`) +- [ ] Dashboard is behind a reverse proxy with TLS (Caddy, nginx, Tailscale) +- [ ] `MC_ENABLE_HSTS=1` is set for HTTPS deployments +- [ ] `MC_COOKIE_SECURE=1` is set for HTTPS deployments +- [ ] `MC_COOKIE_SAMESITE=strict` + +### Docker (if applicable) +- [ ] Use the hardened compose overlay: `docker compose -f docker-compose.yml -f docker-compose.hardened.yml up` +- [ ] Container runs as non-root user (default: `nextjs`, UID 1001) +- [ ] Read-only filesystem with tmpfs for temp dirs +- [ ] All Linux capabilities dropped except `NET_BIND_SERVICE` +- [ ] `no-new-privileges` security option enabled +- [ ] Log rotation configured (max-size, max-file) + +### OpenClaw Gateway +- [ ] Gateway bound to localhost (`OPENCLAW_GATEWAY_HOST=127.0.0.1`) +- [ ] Gateway token configured (`OPENCLAW_GATEWAY_TOKEN`) +- [ ] Gateway token NOT exposed via `NEXT_PUBLIC_*` variables + +### Monitoring +- [ ] Rate limiting is active (`MC_DISABLE_RATE_LIMIT` is NOT set) +- [ ] Audit logging is enabled with appropriate retention +- [ ] Regular database backups configured + +See [docs/SECURITY-HARDENING.md](docs/SECURITY-HARDENING.md) for the full hardening guide. diff --git a/SKILL.md b/SKILL.md new file mode 100644 index 0000000..c3b19e3 --- /dev/null +++ b/SKILL.md @@ -0,0 +1,278 @@ +--- +name: mission-control +description: "Interact with Mission Control — AI agent orchestration dashboard. Use when registering agents, managing tasks, syncing skills, or querying agent/task status via MC APIs." +--- + +# Mission Control Agent Skill + +Mission Control (MC) is an AI agent orchestration dashboard with real-time SSE/WebSocket, a skill registry, framework adapters, and RBAC. This skill teaches agents how to interact with MC APIs programmatically. + +## Quick Start + +**Base URL:** `http://localhost:3000` (default Next.js dev) or your deployed host. + +**Auth header:** `x-api-key: ` + +**Register + heartbeat in two calls:** + +```bash +# 1. Register +curl -X POST http://localhost:3000/api/adapters \ + -H "Content-Type: application/json" \ + -H "x-api-key: $MC_API_KEY" \ + -d '{ + "framework": "generic", + "action": "register", + "payload": { "agentId": "my-agent-01", "name": "My Agent" } + }' + +# 2. Heartbeat (repeat every 5 minutes) +curl -X POST http://localhost:3000/api/adapters \ + -H "Content-Type: application/json" \ + -H "x-api-key: $MC_API_KEY" \ + -d '{ + "framework": "generic", + "action": "heartbeat", + "payload": { "agentId": "my-agent-01", "status": "online" } + }' +``` + +## Authentication + +MC supports two auth methods: + +| Method | Header | Use Case | +|--------|--------|----------| +| API Key | `x-api-key: ` or `Authorization: Bearer ` | Agents, scripts, CI/CD | +| Session cookie | `Cookie: mc-session=` | Browser UI | + +**Roles (hierarchical):** `viewer` < `operator` < `admin` + +- **viewer** — Read-only access (GET endpoints) +- **operator** — Create/update agents, tasks, skills, use adapters +- **admin** — Full access including user management + +API key auth grants `admin` role by default. The key is set via `API_KEY` env var or the `security.api_key` DB setting. + +Agents can identify themselves with the optional `X-Agent-Name` header for attribution in audit logs. + +## Agent Lifecycle + +``` +register → heartbeat (5m interval) → fetch assignments → report task status → disconnect +``` + +All lifecycle actions go through the adapter protocol (`POST /api/adapters`). + +### 1. Register + +```json +{ + "framework": "generic", + "action": "register", + "payload": { + "agentId": "my-agent-01", + "name": "My Agent", + "metadata": { "version": "1.0", "capabilities": ["code", "review"] } + } +} +``` + +### 2. Heartbeat + +Send every ~5 minutes to stay marked as online. + +```json +{ + "framework": "generic", + "action": "heartbeat", + "payload": { + "agentId": "my-agent-01", + "status": "online", + "metrics": { "tasks_completed": 5, "uptime_seconds": 3600 } + } +} +``` + +### 3. Fetch Assignments + +Returns up to 5 pending tasks sorted by priority (critical → low), then due date. + +```json +{ + "framework": "generic", + "action": "assignments", + "payload": { "agentId": "my-agent-01" } +} +``` + +**Response:** + +```json +{ + "assignments": [ + { "taskId": "42", "description": "Fix login bug\nUsers cannot log in with SSO", "priority": 1 } + ], + "framework": "generic" +} +``` + +### 4. Report Task Progress + +```json +{ + "framework": "generic", + "action": "report", + "payload": { + "taskId": "42", + "agentId": "my-agent-01", + "progress": 75, + "status": "in_progress", + "output": "Fixed SSO handler, running tests..." + } +} +``` + +`status` values: `in_progress`, `done`, `failed`, `blocked` + +### 5. Disconnect + +```json +{ + "framework": "generic", + "action": "disconnect", + "payload": { "agentId": "my-agent-01" } +} +``` + +## Core API Reference + +### Agents — `/api/agents` + +| Method | Min Role | Description | +|--------|----------|-------------| +| GET | viewer | List agents. Query: `?status=online&role=dev&limit=50&offset=0` | +| POST | operator | Create agent. Body: `{ name, role, status?, config?, template?, session_key?, soul_content? }` | +| PUT | operator | Update agent. Body: `{ name, status?, role?, config?, session_key?, soul_content?, last_activity? }` | + +**GET response shape:** + +```json +{ + "agents": [{ + "id": 1, "name": "scout", "role": "researcher", "status": "online", + "config": {}, "taskStats": { "total": 10, "assigned": 2, "in_progress": 1, "completed": 7 } + }], + "total": 1, "page": 1, "limit": 50 +} +``` + +### Tasks — `/api/tasks` + +| Method | Min Role | Description | +|--------|----------|-------------| +| GET | viewer | List tasks. Query: `?status=in_progress&assigned_to=scout&priority=high&project_id=1&limit=50&offset=0` | +| POST | operator | Create task. Body: `{ title, description?, status?, priority?, assigned_to?, project_id?, tags?, metadata?, due_date?, estimated_hours? }` | +| PUT | operator | Bulk status update. Body: `{ tasks: [{ id, status }] }` | + +**Priority values:** `critical`, `high`, `medium`, `low` + +**Status values:** `inbox`, `assigned`, `in_progress`, `review`, `done`, `failed`, `blocked`, `cancelled` + +Note: Moving a task to `done` via PUT requires an Aegis quality review approval. + +**POST response:** + +```json +{ + "task": { + "id": 42, "title": "Fix login bug", "status": "assigned", + "priority": "high", "assigned_to": "scout", "ticket_ref": "GEN-001", + "tags": ["bug"], "metadata": {} + } +} +``` + +### Skills — `/api/skills` + +| Method | Min Role | Description | +|--------|----------|-------------| +| GET | viewer | List all skills across roots | +| GET `?mode=content&source=...&name=...` | viewer | Read a skill's SKILL.md content | +| GET `?mode=check&source=...&name=...` | viewer | Run security check on a skill | +| POST | operator | Create/upsert skill. Body: `{ source, name, content }` | +| PUT | operator | Update skill content. Body: `{ source, name, content }` | +| DELETE `?source=...&name=...` | operator | Delete a skill | + +**Skill sources:** `user-agents`, `user-codex`, `project-agents`, `project-codex`, `openclaw` + +### Status — `/api/status` + +| Action | Min Role | Description | +|--------|----------|-------------| +| GET `?action=overview` | viewer | System status (uptime, memory, disk, sessions) | +| GET `?action=dashboard` | viewer | Aggregated dashboard data with DB stats | +| GET `?action=gateway` | viewer | Gateway process status and port check | +| GET `?action=models` | viewer | Available AI models (catalog + local Ollama) | +| GET `?action=health` | viewer | Health checks (gateway, disk, memory) | +| GET `?action=capabilities` | viewer | Feature flags: gateway reachable, Claude home, subscriptions | + +### Adapters — `/api/adapters` + +| Method | Min Role | Description | +|--------|----------|-------------| +| GET | viewer | List available framework adapter names | +| POST | operator | Execute adapter action (see Agent Lifecycle above) | + +## Framework Adapter Protocol + +All agent lifecycle operations use a single endpoint: + +``` +POST /api/adapters +Content-Type: application/json +x-api-key: + +{ + "framework": "", + "action": "", + "payload": { ... } +} +``` + +**Available frameworks:** `generic`, `openclaw`, `crewai`, `langgraph`, `autogen`, `claude-sdk` + +**Available actions:** `register`, `heartbeat`, `report`, `assignments`, `disconnect` + +All adapters implement the same `FrameworkAdapter` interface — choose the one matching your agent framework, or use `generic` as a universal fallback. + +**Payload shapes by action:** + +| Action | Required Fields | Optional Fields | +|--------|----------------|-----------------| +| `register` | `agentId`, `name` | `metadata` | +| `heartbeat` | `agentId` | `status`, `metrics` | +| `report` | `taskId`, `agentId` | `progress`, `status`, `output` | +| `assignments` | `agentId` | — | +| `disconnect` | `agentId` | — | + +## Environment Variables + +| Variable | Default | Description | +|----------|---------|-------------| +| `API_KEY` | — | API key for agent/script authentication | +| `OPENCLAW_GATEWAY_HOST` | `127.0.0.1` | Gateway host address | +| `OPENCLAW_GATEWAY_PORT` | `18789` | Gateway port | +| `MISSION_CONTROL_DB_PATH` | `.data/mission-control.db` | SQLite database path | +| `OPENCLAW_STATE_DIR` | `~/.openclaw` | OpenClaw state directory | +| `OPENCLAW_CONFIG_PATH` | `/openclaw.json` | Gateway config file path | +| `MC_CLAUDE_HOME` | `~/.claude` | Claude home directory | + +## Real-Time Events + +MC broadcasts events via SSE (`/api/events`) and WebSocket. Key event types: + +- `agent.created`, `agent.updated`, `agent.status_changed` +- `task.created`, `task.updated`, `task.status_changed` + +Subscribe to SSE for live dashboard updates when building integrations. diff --git a/docker-compose.hardened.yml b/docker-compose.hardened.yml new file mode 100644 index 0000000..6cd7310 --- /dev/null +++ b/docker-compose.hardened.yml @@ -0,0 +1,21 @@ +# Production hardening overlay +# Usage: docker compose -f docker-compose.yml -f docker-compose.hardened.yml up -d +services: + mission-control: + logging: + driver: json-file + options: + max-size: "10m" + max-file: "3" + environment: + - MC_ALLOWED_HOSTS=localhost,127.0.0.1 + - MC_COOKIE_SECURE=1 + - MC_COOKIE_SAMESITE=strict + - MC_ENABLE_HSTS=1 + networks: + mc-internal: + +networks: + mc-internal: + driver: bridge + internal: true diff --git a/docker-compose.yml b/docker-compose.yml index 7ae529a..fb53bc9 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -11,7 +11,29 @@ services: required: false volumes: - mc-data:/app/.data + read_only: true + tmpfs: + - /tmp + - /app/.next/cache + cap_drop: + - ALL + cap_add: + - NET_BIND_SERVICE + security_opt: + - no-new-privileges:true + pids_limit: 256 + deploy: + resources: + limits: + memory: 512M + cpus: '1.0' + networks: + - mc-net restart: unless-stopped volumes: mc-data: + +networks: + mc-net: + driver: bridge diff --git a/docs/LANDING-PAGE-HANDOFF.md b/docs/LANDING-PAGE-HANDOFF.md new file mode 100644 index 0000000..f8a0a1f --- /dev/null +++ b/docs/LANDING-PAGE-HANDOFF.md @@ -0,0 +1,248 @@ +# Mission Control — Landing Page Handoff + +> Last updated: 2026-03-07 | Version: 1.3.0 | Branch: `fix/refactor` (bb5029e) + +This document contains all copy, stats, features, and structure needed to build or update the Mission Control landing page. Everything below reflects the current state of the shipped product. + +--- + +## Hero Section + +**Headline:** +The Open-Source Dashboard for AI Agent Orchestration + +**Subheadline:** +Manage agent fleets, track tasks, monitor costs, and orchestrate workflows — all from a single pane of glass. Zero external dependencies. One `pnpm start` to run. + +**CTA:** `Get Started` -> GitHub repo | `Live Demo` -> demo instance (if available) + +**Badges:** +- MIT License +- Next.js 16 +- React 19 +- TypeScript 5.7 +- SQLite (WAL mode) +- 165 unit tests (Vitest) +- 295 E2E tests (Playwright) + +**Hero image:** `docs/mission-control.jpg` (current dashboard screenshot — should be refreshed with latest UI) + +--- + +## Key Stats (above the fold) + +| Stat | Value | +|------|-------| +| Panels | 31 feature panels | +| API routes | 98 REST endpoints | +| Schema migrations | 36 | +| Test coverage | 165 unit + 295 E2E | +| Total commits | 239+ | +| External dependencies required | 0 (SQLite only, no Redis/Postgres/Docker) | +| Auth methods | 3 (session, API key, Google OAuth) | +| Framework adapters | 6 (OpenClaw, CrewAI, LangGraph, AutoGen, Claude SDK, Generic) | + +--- + +## Feature Grid + +### 1. Task Board (Kanban) +Six-column kanban (Inbox > Assigned > In Progress > Review > Quality Review > Done) with drag-and-drop, priority levels, assignments, threaded comments, and inline sub-agent spawning. Multi-project support with per-project ticket prefixes (e.g. `PA-001`). + +### 2. Agent Management +Full lifecycle — register, heartbeat, wake, retire. Redesigned agent detail modal with compact overview, inline model selector, editable sub-agent configuration, and SOUL personality system. Local agent discovery from `~/.agents/`, `~/.codex/agents/`, `~/.claude/agents/`. + +### 3. Real-Time Monitoring +Live activity feed, session inspector, and log viewer with filtering. WebSocket + SSE push updates with smart polling that pauses when you're away. Gateway connection state with live dot indicators. + +### 4. Cost Tracking +Token usage dashboard with per-model breakdowns, trend charts, and cost analysis. Per-agent cost panels with session-level granularity. + +### 5. Quality Gates (Aegis) +Built-in review system that blocks task completion without sign-off. Automated Aegis quality review — scheduler polls review tasks and approves/rejects based on configurable criteria. + +### 6. Recurring Tasks +Natural language scheduling — "every morning at 9am", "every 2 hours". Zero-dependency schedule parser converts to cron. Template-clone pattern spawns dated child tasks (e.g. "Daily Report — Mar 07"). + +### 7. Task Dispatch +Scheduler polls assigned tasks and runs agents via CLI. Dispatched tasks link to agent sessions for full traceability. + +### 8. Skills Hub +Browse, install, and manage agent skills from local directories and external registries (ClawdHub, skills.sh). Built-in security scanner checks for prompt injection, credential leaks, data exfiltration, and obfuscated content. Bidirectional disk-DB sync with SHA-256 change detection. + +### 9. Claude Code Integration +- **Session tracking** — auto-discovers sessions from `~/.claude/projects/`, extracts tokens, model info, costs +- **Task bridge** — read-only integration surfaces Claude Code team tasks and configs +- **Direct CLI** — connect Claude Code, Codex, or any CLI directly without a gateway + +### 10. Memory Knowledge Graph +Visual knowledge graph for agent memory in gateway mode. Interactive node-edge visualization of agent memory relationships. + +### 11. Agent Messaging (Comms) +Session-threaded inter-agent communication via comms API (`a2a:*`, `coord:*`, `session:*`). Coordinator inbox support with runtime tool-call visibility. + +### 12. Multi-Gateway +Connect to multiple agent gateways simultaneously. OS-level gateway discovery (systemd, Tailscale Serve). Auto-connect with health probes. + +### 13. Framework Adapters +Built-in adapter layer for multi-agent registration: OpenClaw, CrewAI, LangGraph, AutoGen, Claude SDK, and generic fallback. Each normalizes registration, heartbeats, and task reporting. + +### 14. Background Automation +Scheduled tasks for DB backups, stale record cleanup, agent heartbeat monitoring, recurring task spawning, and automated quality reviews. + +### 15. Webhooks & Alerts +Outbound webhooks with delivery history, retry with exponential backoff, circuit breaker, and HMAC-SHA256 signature verification. Configurable alert rules with cooldowns. + +### 16. GitHub Sync +Bidirectional GitHub Issues sync with label and assignee mapping. Full parity sync implementation. + +### 17. Security +- Ed25519 device identity for gateway handshake +- scrypt password hashing +- RBAC (viewer, operator, admin) +- CSRF origin checks +- CSP headers +- Rate limiting with trusted proxy support +- Per-agent rate limiting with `x-agent-name` identity-based quotas +- Skill security scanner + +### 18. Self-Update +GitHub release check with banner notification. One-click admin update (git pull, pnpm install, pnpm build). Dirty working trees rejected. All updates audit-logged. + +### 19. Audit Trail +Complete action type coverage with grouped filters. Full audit history for compliance and debugging. + +### 20. Pipelines & Workflows +Pipeline orchestration with workflow templates. Start, monitor, and manage multi-step agent workflows. + +--- + +## "How It Works" Section + +``` +1. Clone & Start git clone ... && pnpm install && pnpm dev +2. Agents Register Via gateway, CLI, or self-registration endpoint +3. Tasks Flow Kanban board with automatic dispatch and quality gates +4. Monitor & Scale Real-time dashboards, cost tracking, recurring automation +``` + +--- + +## Tech Stack Section + +| Layer | Technology | +|-------|------------| +| Framework | Next.js 16 (App Router) | +| UI | React 19, Tailwind CSS 3.4 | +| Language | TypeScript 5.7 | +| Database | SQLite via better-sqlite3 (WAL mode) | +| State | Zustand 5 | +| Charts | Recharts 3 | +| Real-time | WebSocket + Server-Sent Events | +| Auth | scrypt hashing, session tokens, RBAC | +| Validation | Zod 4 | +| Testing | Vitest + Playwright | + +--- + +## Auth & Access Section + +**Three auth methods:** +1. Session cookie — username/password login (7-day expiry) +2. API key — `x-api-key` header for headless/agent access +3. Google Sign-In — OAuth with admin approval workflow + +**Three roles:** +| Role | Access | +|------|--------| +| Viewer | Read-only dashboard access | +| Operator | Read + write (tasks, agents, chat, spawn) | +| Admin | Full access (users, settings, system ops, webhooks) | + +--- + +## Architecture Diagram (simplified) + +``` +mission-control/ + src/ + app/api/ 98 REST API routes + components/ + panels/ 31 feature panels + dashboard/ Overview dashboard + chat/ Agent chat workspace + layout/ NavRail, HeaderBar, LiveFeed + lib/ + auth.ts Session + API key + Google OAuth + db.ts SQLite (WAL mode, 36 migrations) + scheduler.ts Background automation + websocket.ts Gateway WebSocket client + adapters/ 6 framework adapters + .data/ Runtime SQLite DB + token logs +``` + +--- + +## Quick Start Section + +```bash +git clone https://github.com/builderz-labs/mission-control.git +cd mission-control +pnpm install +cp .env.example .env # edit with your values +pnpm dev # http://localhost:3000 +``` + +Initial login seeded from `AUTH_USER` / `AUTH_PASS` on first run. + +--- + +## Social Proof / Traction + +- 239+ commits of active development +- Open-source MIT license +- Used in production for multi-agent orchestration +- Supports 6 agent frameworks out of the box +- Zero-config SQLite — no Docker, Redis, or Postgres required + +--- + +## Roadmap / Coming Soon + +- Agent-agnostic gateway support (OpenClaw, ZeroClaw, OpenFang, NeoBot, IronClaw, etc.) +- **Flight Deck** — native desktop companion app (Tauri v2) with real PTY terminal grid and system tray HUD (private beta) +- First-class per-agent cost breakdowns panel +- OAuth approval UI improvements +- API token rotation UI + +--- + +## Recent Changelog (latest 20 notable changes) + +1. **Memory knowledge graph** — interactive visualization for agent memory in gateway mode +2. **Agent detail modal redesign** — minimal header, compact overview, inline model selector +3. **Spawn/task unification** — spawn moved inline to task board, sub-agent config to agent detail +4. **Agent comms hardening** — session-threaded messaging with runtime tool visibility +5. **Audit trail** — complete action type coverage with grouped filters +6. **OS-level gateway discovery** — detect gateways via systemd and Tailscale Serve +7. **GitHub sync** — full parity sync with loading state fixes +8. **Automated Aegis quality review** — scheduler-driven approve/reject +9. **Task dispatch** — scheduler polls and runs agents via CLI with session linking +10. **Natural language recurring tasks** — zero-dep schedule parser + template spawning +11. **Claude Code task bridge** — read-only team task and config integration +12. **Agent card redesign** — gateway badge tooltips, ws:// localhost support +13. **Skills Hub** — registry integration, bidirectional sync, security scanner +14. **Per-agent rate limiting** — identity-based quotas via `x-agent-name` +15. **Agent self-registration** — autonomous onboarding endpoint +16. **Framework adapters** — OpenClaw, CrewAI, LangGraph, AutoGen, Claude SDK, generic +17. **Self-update mechanism** — one-click update with audit logging +18. **Local agent discovery** — auto-detect from ~/.agents, ~/.codex, ~/.claude +19. **Chat workspace** — embedded chat with local session continuation +20. **Ed25519 device identity** — secure gateway challenge-response handshake + +--- + +## Footer + +MIT License | 2026 Builderz Labs +GitHub: github.com/builderz-labs/mission-control diff --git a/docs/SECURITY-HARDENING.md b/docs/SECURITY-HARDENING.md new file mode 100644 index 0000000..a6c941f --- /dev/null +++ b/docs/SECURITY-HARDENING.md @@ -0,0 +1,277 @@ +# Security Hardening Guide + +Comprehensive security hardening guide for Mission Control and OpenClaw Gateway deployments. + +## Quick Assessment + +Run the automated security audit: + +```bash +bash scripts/security-audit.sh # Check .env and configuration +bash scripts/station-doctor.sh # Check runtime health +``` + +Or use the diagnostics API (admin only): + +```bash +curl -H "x-api-key: $API_KEY" http://localhost:3000/api/diagnostics +curl -H "x-api-key: $API_KEY" http://localhost:3000/api/security-audit?timeframe=day +``` + +The `posture.score` field (0-100) gives a quick posture assessment. The **Security Audit Panel** (`/security` in the dashboard) provides a full real-time view with timeline charts, agent trust scores, and eval results. + +--- + +## Mission Control Hardening + +### 1. Credentials + +**Generate strong credentials** using the included script: + +```bash +bash scripts/generate-env.sh # Generates .env with random secrets +chmod 600 .env # Lock down permissions +``` + +The installer (`install.sh`) does this automatically. If you set up manually, ensure: + +- `AUTH_PASS` is 12+ characters, not a dictionary word +- `API_KEY` is 32+ hex characters +- `AUTH_SECRET` is a unique random string +- `.env` file permissions are `600` + +### 2. Network Access Control + +Mission Control uses a host allowlist in production: + +```env +# Only allow connections from these hosts (comma-separated) +MC_ALLOWED_HOSTS=localhost,127.0.0.1 + +# For Tailscale: MC_ALLOWED_HOSTS=localhost,127.0.0.1,100.* +# For a domain: MC_ALLOWED_HOSTS=mc.example.com,localhost + +# NEVER set this in production: +# MC_ALLOW_ANY_HOST=1 +``` + +Deploy behind a reverse proxy with TLS (Caddy, nginx, Tailscale Funnel) for any network-accessible deployment. + +### 3. HTTPS & Cookies + +For HTTPS deployments: + +```env +MC_COOKIE_SECURE=1 # Cookies only sent over HTTPS +MC_COOKIE_SAMESITE=strict # CSRF protection +MC_ENABLE_HSTS=1 # HTTP Strict Transport Security +``` + +### 4. Rate Limiting + +Rate limiting is enabled by default: + +| Endpoint Type | Limit | +|--------------|-------| +| Login | 5 attempts/min (always active) | +| Mutations | 60 requests/min | +| Reads | 120 requests/min | +| Heavy operations | 10 requests/min | +| Agent heartbeat | 30/min per agent | +| Agent task polling | 20/min per agent | + +Never set `MC_DISABLE_RATE_LIMIT=1` in production. + +### 5. Docker Hardening + +Use the production compose overlay: + +```bash +docker compose -f docker-compose.yml -f docker-compose.hardened.yml up -d +``` + +This enables: +- **Read-only filesystem** with tmpfs for `/tmp` and `/app/.next/cache` +- **Capability dropping** — all Linux capabilities dropped, only `NET_BIND_SERVICE` retained +- **No new privileges** — prevents privilege escalation +- **PID limit** — prevents fork bombs +- **Memory/CPU limits** — prevents resource exhaustion +- **Log rotation** — prevents disk filling from verbose logging +- **HSTS, secure cookies** — forced via environment + +### 6. Security Headers + +Mission Control sets these headers automatically: + +| Header | Value | +|--------|-------| +| `Content-Security-Policy` | `default-src 'self'; script-src 'self' 'unsafe-inline' 'nonce-...'` | +| `X-Frame-Options` | `DENY` | +| `X-Content-Type-Options` | `nosniff` | +| `Referrer-Policy` | `strict-origin-when-cross-origin` | +| `Permissions-Policy` | `camera=(), microphone=(), geolocation=()` | +| `X-Request-Id` | Unique per-request UUID for log correlation | +| `Strict-Transport-Security` | Set when `MC_ENABLE_HSTS=1` | + +### 7. Audit Logging + +All security-relevant events are logged to the audit trail: + +- Login attempts (success and failure) +- Task mutations +- User management actions +- Settings changes +- Update operations + +Additionally, the **security event system** automatically logs: + +- Auth failures (invalid passwords, expired tokens, access denials) +- Rate limit hits (429 responses with IP/agent correlation) +- Injection attempts (prompt injection, command injection, exfiltration) +- Secret exposures (AWS keys, GitHub tokens, Stripe keys, JWTs, private keys detected in agent messages) +- MCP tool calls (agent, tool, duration, success/failure) + +These events feed into the **Security Audit Panel** (`/security`) which provides: + +- **Posture score** (0-100) with level badges (hardened/secure/needs-attention/at-risk) +- **Agent trust scores** — weighted calculation based on auth failures, injection attempts, and task success rates +- **MCP call audit** — tool-use frequency, success/failure rates per agent +- **Timeline visualization** — event density over selected timeframe + +Configure retention: `MC_RETAIN_AUDIT_DAYS=365` (default: 1 year). + +### 8. Hook Profiles + +Security strictness is tunable via hook profiles in Settings > Security Profiles: + +| Profile | Secret Scanning | MCP Auditing | Block on Secrets | Rate Limit Multiplier | +|---------|----------------|--------------|------------------|----------------------| +| **minimal** | Off | Off | No | 2x (relaxed) | +| **standard** (default) | On | On | No | 1x | +| **strict** | On | On | Yes (blocks messages) | 0.5x (tighter) | + +Set via the Settings panel or the `hook_profile` key in the settings API. + +### 9. Agent Eval Framework + +The four-layer eval stack helps detect degrading agent quality: + +- **Output evals** — score task completion against golden datasets +- **Trace evals** — convergence scoring (>3.0 indicates looping behavior) +- **Component evals** — tool reliability from MCP call logs (p50/p95/p99 latency) +- **Drift detection** — 10% threshold vs 4-week rolling baseline triggers alerts + +Access via `/api/agents/evals` or the Security Audit Panel's eval section. + +### 10. Data Retention + +```env +MC_RETAIN_ACTIVITIES_DAYS=90 # Activity feed +MC_RETAIN_AUDIT_DAYS=365 # Security audit trail +MC_RETAIN_LOGS_DAYS=30 # Application logs +MC_RETAIN_NOTIFICATIONS_DAYS=60 # Notifications +MC_RETAIN_PIPELINE_RUNS_DAYS=90 # Pipeline logs +MC_RETAIN_TOKEN_USAGE_DAYS=90 # Token/cost records +MC_RETAIN_GATEWAY_SESSIONS_DAYS=90 # Gateway session history +``` + +--- + +## OpenClaw Gateway Hardening + +Mission Control acts as the mothership for your OpenClaw fleet. The installer automatically checks and repairs common OpenClaw configuration issues. + +### 1. Network Security + +- **Never expose the gateway publicly.** It runs on port 18789 by default. +- **Bind to localhost:** Set `gateway.bind: "loopback"` in `openclaw.json`. +- **Use SSH tunneling or Tailscale** for remote access. +- **Docker users:** Be aware that Docker can bypass UFW rules. Use `DOCKER-USER` chain rules. + +### 2. Authentication + +- **Always enable gateway auth** with a strong random token. +- Generate: `openclaw doctor --generate-gateway-token` +- Store in `OPENCLAW_GATEWAY_TOKEN` env var (never in `NEXT_PUBLIC_*` variables). +- Rotate regularly. + +### 3. Hardened Gateway Configuration + +```json +{ + "gateway": { + "mode": "local", + "bind": "loopback", + "auth": { + "mode": "token", + "token": "replace-with-long-random-token" + } + }, + "session": { + "dmScope": "per-channel-peer" + }, + "tools": { + "profile": "messaging", + "deny": ["group:automation", "group:runtime", "group:fs", "sessions_spawn", "sessions_send"], + "fs": { "workspaceOnly": true }, + "exec": { "security": "deny", "ask": "always" } + }, + "elevated": { "enabled": false } +} +``` + +### 4. File Permissions + +```bash +chmod 700 ~/.openclaw +chmod 600 ~/.openclaw/openclaw.json +chmod 600 ~/.openclaw/credentials/* +``` + +### 5. Tool Security + +- Apply the principle of least privilege — only grant tools the agent needs. +- Audit third-party skills before installing (Mission Control's Skills Hub runs automatic security scans). +- Run agents processing untrusted content in a sandbox with a minimal toolset. + +### 6. Monitoring + +- Enable comprehensive logging: `logging.redactSensitive: "tools"` +- Store logs separately where agents cannot modify them. +- Use Mission Control's diagnostics API to monitor gateway health. +- Have an incident response plan: stop gateway, revoke API keys, review audit logs. + +### 7. Known CVEs + +Keep OpenClaw updated. Notable past vulnerabilities: + +| CVE | Severity | Description | Fixed In | +|-----|----------|-------------|----------| +| CVE-2026-25253 | Critical | RCE via Control UI token hijack | v2026.1.29 | +| CVE-2026-26327 | High | Auth bypass via gateway spoofing | v2026.2.25 | +| CVE-2026-26322 | High | SSRF | v2026.2.25 | +| CVE-2026-26329 | High | Path traversal | v2026.2.25 | +| CVE-2026-26319 | Medium | Missing webhook auth | v2026.2.25 | + +--- + +## Deployment Architecture + +For production, the recommended architecture is: + +``` +Internet + | +[Reverse Proxy (Caddy/nginx) + TLS] + | +[Mission Control :3000] ---- [SQLite .data/] + | +[OpenClaw Gateway :18789 (localhost only)] + | +[Agent Workspaces] +``` + +- Reverse proxy handles TLS termination, rate limiting, and access control +- Mission Control listens on localhost or a private network +- OpenClaw Gateway is bound to loopback only +- Agent workspaces are isolated per-agent directories diff --git a/docs/deployment.md b/docs/deployment.md index feb3a59..aaa5772 100644 --- a/docs/deployment.md +++ b/docs/deployment.md @@ -48,6 +48,32 @@ PORT=3000 pnpm start **Important:** The production build bundles platform-specific native binaries. You must run `pnpm install` and `pnpm build` on the same OS and architecture as the target server. A build created on macOS will not work on Linux. +## Production (Standalone) + +Use this for bare-metal deployments that run Next's standalone server directly. +This path is preferred over ad hoc `node .next/standalone/server.js` because it +syncs `.next/static` and `public/` into the standalone bundle before launch. + +```bash +pnpm install --frozen-lockfile +pnpm build +pnpm start:standalone +``` + +For a full in-place update on the target host: + +```bash +BRANCH=fix/refactor PORT=3000 pnpm deploy:standalone +``` + +What `deploy:standalone` does: +- fetches and fast-forwards the requested branch +- reinstalls dependencies with the lockfile +- rebuilds from a clean `.next/` +- stops the old process bound to the target port +- starts the standalone server through `scripts/start-standalone.sh` +- verifies that the rendered login page references a CSS asset and that the CSS is served as `text/css` + ## Production (Docker) ```bash diff --git a/docs/plans/2026-03-10-onboarding-walkthrough-hardening.md b/docs/plans/2026-03-10-onboarding-walkthrough-hardening.md new file mode 100644 index 0000000..2924963 --- /dev/null +++ b/docs/plans/2026-03-10-onboarding-walkthrough-hardening.md @@ -0,0 +1,31 @@ +# Onboarding + Walkthrough hardening plan + +Base branch: `fix/refactor` +Working branch: `fix/refactor-onboarding-walkthrough` + +## Goals +- Verify current onboarding and walkthrough flows are functional. +- Fix edge cases for first-run, skip, replay, and recovery states. +- Improve UX discoverability of walkthrough entry points. +- Add regression tests to keep flows stable. + +## Phase 1: audit and test map +1. Identify current onboarding/walkthrough code paths. +2. Document triggers, persistence flags, and routing. +3. Add failing tests for first-run, skip, replay, and already-seen states. + +## Phase 2: implementation hardening +1. Fix state transitions and persistence updates. +2. Ensure walkthrough can be reopened from primary UI. +3. Add visible hint/help entry to improve discoverability. +4. Handle corrupted or partial onboarding state safely. + +## Phase 3: verification +1. Run targeted tests for onboarding/walkthrough. +2. Run full project checks. +3. Validate end-to-end flow manually in local dev. + +## Deliverables +- Code changes in onboarding/walkthrough modules +- Automated tests covering key onboarding paths +- Updated docs/help text for walkthrough discoverability diff --git a/install.sh b/install.sh new file mode 100755 index 0000000..5c3ac06 --- /dev/null +++ b/install.sh @@ -0,0 +1,429 @@ +#!/usr/bin/env bash +# Mission Control — One-Command Installer +# The mothership for your OpenClaw fleet. +# +# Usage: +# curl -fsSL https://raw.githubusercontent.com/builderz-labs/mission-control/main/install.sh | bash +# # or +# bash install.sh [--docker|--local] [--port PORT] [--data-dir DIR] +# +# Installs Mission Control and optionally repairs/configures OpenClaw. + +set -euo pipefail + +# ── Defaults ────────────────────────────────────────────────────────────────── +MC_PORT="${MC_PORT:-3000}" +MC_DATA_DIR="" +DEPLOY_MODE="" +SKIP_OPENCLAW=false +REPO_URL="https://github.com/builderz-labs/mission-control.git" +INSTALL_DIR="${MC_INSTALL_DIR:-$(pwd)/mission-control}" + +# ── Parse arguments ─────────────────────────────────────────────────────────── +while [[ $# -gt 0 ]]; do + case "$1" in + --docker) DEPLOY_MODE="docker"; shift ;; + --local) DEPLOY_MODE="local"; shift ;; + --port) MC_PORT="$2"; shift 2 ;; + --data-dir) MC_DATA_DIR="$2"; shift 2 ;; + --skip-openclaw) SKIP_OPENCLAW=true; shift ;; + --dir) INSTALL_DIR="$2"; shift 2 ;; + -h|--help) + echo "Usage: install.sh [--docker|--local] [--port PORT] [--data-dir DIR] [--dir INSTALL_DIR] [--skip-openclaw]" + exit 0 ;; + *) echo "Unknown option: $1"; exit 1 ;; + esac +done + +# ── Helpers ─────────────────────────────────────────────────────────────────── +info() { echo -e "\033[1;34m[MC]\033[0m $*"; } +ok() { echo -e "\033[1;32m[OK]\033[0m $*"; } +warn() { echo -e "\033[1;33m[!!]\033[0m $*"; } +err() { echo -e "\033[1;31m[ERR]\033[0m $*" >&2; } +die() { err "$*"; exit 1; } + +command_exists() { command -v "$1" &>/dev/null; } + +detect_os() { + local os arch + os="$(uname -s)" + arch="$(uname -m)" + + case "$os" in + Linux) OS="linux" ;; + Darwin) OS="darwin" ;; + *) die "Unsupported OS: $os" ;; + esac + + case "$arch" in + x86_64|amd64) ARCH="x64" ;; + aarch64|arm64) ARCH="arm64" ;; + *) die "Unsupported architecture: $arch" ;; + esac + + ok "Detected $OS/$ARCH" +} + +check_prerequisites() { + local has_docker=false has_node=false + + if command_exists docker && docker info &>/dev/null 2>&1; then + has_docker=true + ok "Docker available ($(docker --version | head -1))" + fi + + if command_exists node; then + local node_major + node_major=$(node -v | sed 's/v//' | cut -d. -f1) + if [[ "$node_major" -ge 20 ]]; then + has_node=true + ok "Node.js $(node -v) available" + else + warn "Node.js $(node -v) found but v20+ required" + fi + fi + + if ! $has_docker && ! $has_node; then + die "Either Docker or Node.js 20+ is required. Install one and retry." + fi + + # Auto-select deploy mode if not specified + if [[ -z "$DEPLOY_MODE" ]]; then + if $has_docker; then + DEPLOY_MODE="docker" + info "Auto-selected Docker deployment (use --local to override)" + else + DEPLOY_MODE="local" + info "Auto-selected local deployment (Docker not available)" + fi + fi + + # Validate chosen mode + if [[ "$DEPLOY_MODE" == "docker" ]] && ! $has_docker; then + die "Docker deployment requested but Docker is not available" + fi + if [[ "$DEPLOY_MODE" == "local" ]] && ! $has_node; then + die "Local deployment requested but Node.js 20+ is not available" + fi + if [[ "$DEPLOY_MODE" == "local" ]] && ! command_exists pnpm; then + info "Installing pnpm via corepack..." + corepack enable && corepack prepare pnpm@latest --activate + ok "pnpm installed" + fi +} + +# ── Clone or update repo ───────────────────────────────────────────────────── +fetch_source() { + if [[ -d "$INSTALL_DIR/.git" ]]; then + info "Updating existing installation at $INSTALL_DIR..." + cd "$INSTALL_DIR" + git fetch --tags + local latest_tag + latest_tag=$(git describe --tags --abbrev=0 origin/main 2>/dev/null || echo "") + if [[ -n "$latest_tag" ]]; then + git checkout "$latest_tag" + ok "Checked out $latest_tag" + else + git pull origin main + ok "Updated to latest main" + fi + else + info "Cloning Mission Control..." + if command_exists git; then + git clone --depth 1 "$REPO_URL" "$INSTALL_DIR" + cd "$INSTALL_DIR" + ok "Cloned to $INSTALL_DIR" + else + die "git is required to clone the repository" + fi + fi +} + +# ── Generate .env ───────────────────────────────────────────────────────────── +setup_env() { + if [[ -f "$INSTALL_DIR/.env" ]]; then + info "Existing .env found — keeping current configuration" + return + fi + + info "Generating secure .env configuration..." + bash "$INSTALL_DIR/scripts/generate-env.sh" "$INSTALL_DIR/.env" + + # Set the port if non-default + if [[ "$MC_PORT" != "3000" ]]; then + if [[ "$(uname)" == "Darwin" ]]; then + sed -i '' "s|^# PORT=3000|PORT=$MC_PORT|" "$INSTALL_DIR/.env" + else + sed -i "s|^# PORT=3000|PORT=$MC_PORT|" "$INSTALL_DIR/.env" + fi + fi + + ok "Secure .env generated" +} + +# ── Docker deployment ───────────────────────────────────────────────────────── +deploy_docker() { + info "Starting Docker deployment..." + + export MC_PORT + docker compose up -d --build + + # Wait for healthy + info "Waiting for Mission Control to become healthy..." + local retries=30 + while [[ $retries -gt 0 ]]; do + if docker compose ps --format json 2>/dev/null | grep -q '"Health":"healthy"'; then + break + fi + # Fallback: try HTTP check + if curl -sf "http://localhost:$MC_PORT/login" &>/dev/null; then + break + fi + sleep 2 + ((retries--)) + done + + if [[ $retries -eq 0 ]]; then + warn "Timeout waiting for health check — container may still be starting" + docker compose logs --tail 20 + else + ok "Mission Control is running in Docker" + fi +} + +# ── Local deployment ────────────────────────────────────────────────────────── +deploy_local() { + info "Starting local deployment..." + + cd "$INSTALL_DIR" + pnpm install --frozen-lockfile 2>/dev/null || pnpm install + ok "Dependencies installed" + + info "Building Mission Control..." + pnpm build + ok "Build complete" + + # Create systemd service on Linux if systemctl is available + if [[ "$OS" == "linux" ]] && command_exists systemctl; then + setup_systemd + fi + + info "Starting Mission Control..." + PORT="$MC_PORT" nohup pnpm start > "$INSTALL_DIR/.data/mc.log" 2>&1 & + local pid=$! + echo "$pid" > "$INSTALL_DIR/.data/mc.pid" + + sleep 3 + if kill -0 "$pid" 2>/dev/null; then + ok "Mission Control running (PID $pid)" + else + err "Failed to start. Check logs: $INSTALL_DIR/.data/mc.log" + exit 1 + fi +} + +# ── Systemd service ────────────────────────────────────────────────────────── +setup_systemd() { + local service_file="/etc/systemd/system/mission-control.service" + if [[ -f "$service_file" ]]; then + info "Systemd service already exists" + return + fi + + info "Creating systemd service..." + local user + user="$(whoami)" + local node_path + node_path="$(which node)" + + cat > /tmp/mission-control.service </dev/null)" || continue + if ! kill -0 "$pid" 2>/dev/null; then + rm -f "$pidfile" + ((stale_count++)) + fi + done + if [[ $stale_count -gt 0 ]]; then + ok "Cleaned $stale_count stale PID file(s)" + fi + + # Check logs directory size + local logs_dir="$oc_home/logs" + if [[ -d "$logs_dir" ]]; then + local logs_size + if [[ "$(uname)" == "Darwin" ]]; then + logs_size="$(du -sh "$logs_dir" 2>/dev/null | cut -f1)" + else + logs_size="$(du -sh "$logs_dir" 2>/dev/null | cut -f1)" + fi + info "Logs directory: $logs_size ($logs_dir)" + + # Clean old logs (> 30 days) + local old_logs + old_logs=$(find "$logs_dir" -name "*.log" -mtime +30 2>/dev/null | wc -l | tr -d ' ') + if [[ "$old_logs" -gt 0 ]]; then + find "$logs_dir" -name "*.log" -mtime +30 -delete 2>/dev/null || true + ok "Cleaned $old_logs log file(s) older than 30 days" + fi + fi + + # Check workspace directory + local workspace="$oc_home/workspace" + if [[ -d "$workspace" ]]; then + local agent_count + agent_count=$(find "$workspace" -maxdepth 1 -type d 2>/dev/null | wc -l | tr -d ' ') + ((agent_count--)) # subtract the workspace dir itself + info "Workspace: $agent_count agent workspace(s) in $workspace" + fi + else + info "OpenClaw home not found at $oc_home" + info "Set OPENCLAW_HOME in .env to point to your OpenClaw state directory" + fi + + # Check gateway port + local gw_host="${OPENCLAW_GATEWAY_HOST:-127.0.0.1}" + local gw_port="${OPENCLAW_GATEWAY_PORT:-18789}" + if nc -z "$gw_host" "$gw_port" 2>/dev/null || (echo > "/dev/tcp/$gw_host/$gw_port") 2>/dev/null; then + ok "Gateway reachable at $gw_host:$gw_port" + else + info "Gateway not reachable at $gw_host:$gw_port (start it with: openclaw gateway start)" + fi +} + +# ── Main ────────────────────────────────────────────────────────────────────── +main() { + echo "" + echo " ╔══════════════════════════════════════╗" + echo " ║ Mission Control Installer ║" + echo " ║ The mothership for your fleet ║" + echo " ╚══════════════════════════════════════╝" + echo "" + + detect_os + check_prerequisites + + # If running from within an existing clone, use current dir + if [[ -f "$(pwd)/package.json" ]] && grep -q '"mission-control"' "$(pwd)/package.json" 2>/dev/null; then + INSTALL_DIR="$(pwd)" + info "Running from existing clone at $INSTALL_DIR" + else + fetch_source + fi + + # Ensure data directory exists + mkdir -p "$INSTALL_DIR/.data" + + setup_env + + case "$DEPLOY_MODE" in + docker) deploy_docker ;; + local) deploy_local ;; + *) die "Unknown deploy mode: $DEPLOY_MODE" ;; + esac + + check_openclaw + + # ── Print summary ── + echo "" + echo " ╔══════════════════════════════════════╗" + echo " ║ Installation Complete ║" + echo " ╚══════════════════════════════════════╝" + echo "" + info "Dashboard: http://localhost:$MC_PORT" + info "Mode: $DEPLOY_MODE" + info "Data: $INSTALL_DIR/.data/" + echo "" + info "Credentials are in: $INSTALL_DIR/.env" + echo "" + + if [[ "$DEPLOY_MODE" == "docker" ]]; then + info "Manage:" + info " docker compose logs -f # view logs" + info " docker compose restart # restart" + info " docker compose down # stop" + else + info "Manage:" + info " cat $INSTALL_DIR/.data/mc.log # view logs" + info " kill \$(cat $INSTALL_DIR/.data/mc.pid) # stop" + fi + + echo "" +} + +main "$@" diff --git a/next.config.js b/next.config.js index a93ab1a..9522c9c 100644 --- a/next.config.js +++ b/next.config.js @@ -1,6 +1,9 @@ /** @type {import('next').NextConfig} */ const nextConfig = { output: 'standalone', + outputFileTracingExcludes: { + '/*': ['./.data/**/*'], + }, turbopack: {}, // Transpile ESM-only packages so they resolve correctly in all environments transpilePackages: ['react-markdown', 'remark-gfm'], @@ -11,12 +14,13 @@ const nextConfig = { const csp = [ `default-src 'self'`, - `script-src 'self' 'unsafe-inline'${googleEnabled ? ' https://accounts.google.com' : ''}`, + `script-src 'self' 'unsafe-inline' blob:${googleEnabled ? ' https://accounts.google.com' : ''}`, `style-src 'self' 'unsafe-inline'`, - `connect-src 'self' ws: wss: http://127.0.0.1:* http://localhost:*`, + `connect-src 'self' ws: wss: http://127.0.0.1:* http://localhost:* https://cdn.jsdelivr.net`, `img-src 'self' data: blob:${googleEnabled ? ' https://*.googleusercontent.com https://lh3.googleusercontent.com' : ''}`, `font-src 'self' data:`, `frame-src 'self'${googleEnabled ? ' https://accounts.google.com' : ''}`, + `worker-src 'self' blob:`, ].join('; ') return [ diff --git a/openclaw_hardening_guide.md b/openclaw_hardening_guide.md new file mode 100644 index 0000000..5dfbcef --- /dev/null +++ b/openclaw_hardening_guide.md @@ -0,0 +1,124 @@ +# OpenClaw Gateway Security and Hardening Best Practices + +This document consolidates security and hardening best practices for the OpenClaw Gateway, drawing from official documentation and recent security advisories. + +## 1. Core Security Model & Deployment Considerations + +OpenClaw is designed primarily for a **personal assistant deployment model**, assuming one trusted operator per gateway. It is **not intended for multi-tenant environments** with untrusted or adversarial users. For such scenarios, run separate gateway instances for each trust boundary. + +## 2. Hardened Baseline Configuration + +For a secure starting point, consider the following configuration, which keeps the Gateway local, isolates DMs, and disables potentially dangerous tools by default: + +```json +{ + "gateway": { + "mode": "local", + "bind": "loopback", + "auth": { + "mode": "token", + "token": "replace-with-long-random-token" + } + }, + "session": { + "dmScope": "per-channel-peer" + }, + "tools": { + "profile": "messaging", + "deny": ["group:automation", "group:runtime", "group:fs", "sessions_spawn", "sessions_send"], + "fs": { + "workspaceOnly": true + }, + "exec": { + "security": "deny", + "ask": "always" + } + }, + "elevated": { + "enabled": false + }, + "channels": { + "whatsapp": { + "dmPolicy": "pairing", + "groups": { + "*": { + "requireMention": true + } + } + } + } +} +``` + +## 3. Key Hardening Recommendations + +### 3.1. Network Security + +* **Do Not Expose Publicly:** Never expose the OpenClaw gateway directly to the public internet. It typically runs on port 18789. Publicly exposed gateways are easily discoverable. +* **Bind to Localhost:** Configure the gateway to listen only for connections from the local machine by binding it to `127.0.0.1` (localhost) or `loopback` in your `openclaw.json`. +* **Firewall Rules:** Implement strict firewall rules to block all unnecessary inbound and outbound connections, allowing only essential traffic. +* **Secure Remote Access:** For remote access, use secure methods like SSH tunneling or a VPN (e.g., Tailscale) instead of direct exposure. +* **Docker Considerations:** If using Docker, be aware that it can bypass UFW rules. Configure rules in the `DOCKER-USER` chain to control exposure. + +### 3.2. Authentication and Access Control + +* **Enable Gateway Authentication:** Always enable gateway authentication and use a strong, randomly generated authentication token. Generate a token with `openclaw doctor --generate-gateway-token`. +* **Manage Access Tokens:** Treat your gateway authentication token like a password. Rotate it regularly and store it securely (e.g., as an environment variable, not in plaintext config files). +* **Restrict Chat and Messaging:** If integrating with chat platforms, use allowlists to specify which user IDs can interact with your agent. +* **Direct Messages (DMs) and Groups:** + * For DMs, use the default `pairing` policy (`dmPolicy: "pairing"`) to require approval for unknown senders. + * For group chats, require the bot to be explicitly mentioned to respond (`requireMention: true`). + * Isolate DM sessions using `session.dmScope: "per-channel-peer"` to prevent context leakage. + +### 3.3. Isolation and Sandboxing + +* **Run in a Docker Container:** The recommended approach is to run OpenClaw within a Docker container for process isolation, filesystem restrictions, and network controls. +* **Harden Docker Configuration:** + * Do not mount your home directory or the Docker socket. + * Use read-only filesystems where possible. + * Drop unnecessary Linux capabilities. + * Run the container as a non-root user. +* **Enable Sandbox Mode:** For tasks that execute code, enable OpenClaw's sandbox mode to prevent malicious or compromised prompts from accessing your system or network. Configure this in `agents.defaults.sandbox`. + +### 3.4. Credential and Secret Management + +* **Avoid Plaintext Storage:** Never store API keys, tokens, or other sensitive information in plaintext configuration files. +* **Use Secure Storage Mechanisms:** Load credentials from environment variables or use dedicated secrets management solutions (e.g., Hashicorp Vault, AWS Secrets Manager). + +### 3.5. File System Permissions + +* Ensure your configuration and state files are private. +* `~/.openclaw/openclaw.json` should have permissions `600` (user read/write only). +* The `~/.openclaw` directory should have permissions `700` (user access only). +* `~/.openclaw/credentials/` and its contents should also be `600`. + +### 3.6. Tool and Skill Security + +* **Principle of Least Privilege:** Only grant the agent the permissions and tools it absolutely needs. +* **Audit Third-Party Skills:** Be extremely cautious with third-party skills, as they can contain malicious code. Research has shown a significant number of skills on marketplaces may be malicious. + +### 3.7. Prompt Injection Mitigation + +* Lock down who can message the bot using DM pairing and allowlists. +* Require mentions in group chats. +* Run agents that process untrusted content in a sandbox with a minimal toolset. +* Use the latest, most powerful models, as they are generally more resistant to prompt injection. + +### 3.8. Monitoring and Incident Response + +* **Enable Logging:** Turn on comprehensive logging for all agent activities (command executions, API calls, file access). Store logs in a secure, separate location where the agent cannot modify them. +* **Log Redaction:** Keep log redaction enabled (`logging.redactSensitive: "tools"`) to prevent sensitive information from leaking into logs. +* **Incident Response Plan:** Have a plan for suspected compromises, including stopping the gateway and revoking API keys. + +## 4. Staying Updated and Aware of Vulnerabilities + +The OpenClaw project is under active development, and new vulnerabilities are discovered. + +* **Keep Software Updated:** Regularly update OpenClaw and its dependencies to ensure you have the latest security patches. +* **Be Aware of Recent Threats:** Stay informed about new vulnerabilities. Notable past vulnerabilities include: + * **ClawJacked (High Severity):** Allowed malicious websites to hijack locally running OpenClaw instances via WebSocket connections and brute-force password. Patched in v2026.2.25. + * **Remote Code Execution (Critical - CVE-2026-25253):** A malicious link could trick the Control UI into sending an auth token to an attacker-controlled server, leading to RCE. Patched in v2026.1.29. + * **Authentication Bypass (High Severity - CVE-2026-26327):** Allowed attackers on the same local network to intercept credentials by spoofing a legitimate gateway. + * **Other Vulnerabilities:** Server-Side Request Forgery (SSRF - CVE-2026-26322), missing webhook authentication (CVE-2026-26319), and path traversal (CVE-2026-26329). + +By diligently applying these practices, you can significantly enhance the security posture of your OpenClaw Gateway deployment. diff --git a/package.json b/package.json index 91ec7a0..ab1527c 100644 --- a/package.json +++ b/package.json @@ -1,28 +1,33 @@ { "name": "mission-control", - "version": "1.3.0", + "version": "2.0.0", "description": "OpenClaw Mission Control — open-source agent orchestration dashboard", "scripts": { - "dev": "next dev --hostname 127.0.0.1 --port ${PORT:-3000}", - "build": "next build", - "start": "next start --hostname 0.0.0.0 --port ${PORT:-3000}", - "lint": "eslint .", - "typecheck": "tsc --noEmit", - "test": "vitest run", + "verify:node": "node scripts/check-node-version.mjs", + "dev": "pnpm run verify:node && next dev --hostname 127.0.0.1 --port ${PORT:-3000}", + "build": "pnpm run verify:node && next build", + "start": "pnpm run verify:node && next start --hostname 0.0.0.0 --port ${PORT:-3000}", + "start:standalone": "pnpm run verify:node && bash scripts/start-standalone.sh", + "deploy:standalone": "pnpm run verify:node && bash scripts/deploy-standalone.sh", + "lint": "pnpm run verify:node && eslint .", + "typecheck": "pnpm run verify:node && tsc --noEmit", + "test": "pnpm run verify:node && vitest run", "test:watch": "vitest", "test:ui": "vitest --ui", - "test:e2e": "playwright test", - "test:e2e:openclaw:local": "E2E_GATEWAY_EXPECTED=0 playwright test -c playwright.openclaw.local.config.ts", - "test:e2e:openclaw:gateway": "E2E_GATEWAY_EXPECTED=1 playwright test -c playwright.openclaw.gateway.config.ts", + "test:e2e": "pnpm run verify:node && playwright test", + "test:e2e:openclaw:local": "pnpm run verify:node && E2E_GATEWAY_EXPECTED=0 playwright test -c playwright.openclaw.local.config.ts", + "test:e2e:openclaw:gateway": "pnpm run verify:node && E2E_GATEWAY_EXPECTED=1 playwright test -c playwright.openclaw.gateway.config.ts", "test:e2e:openclaw": "pnpm test:e2e:openclaw:local && pnpm test:e2e:openclaw:gateway", "test:all": "pnpm lint && pnpm typecheck && pnpm test && pnpm build && pnpm test:e2e", "quality:gate": "pnpm test:all" }, "dependencies": { + "@radix-ui/react-slot": "^1.2.4", "@scalar/api-reference-react": "^0.8.66", "@xyflow/react": "^12.10.0", "autoprefixer": "^10.4.20", "better-sqlite3": "^12.6.2", + "class-variance-authority": "^0.7.1", "clsx": "^2.1.1", "eslint": "^9.18.0", "eslint-config-next": "^16.1.6", @@ -34,6 +39,7 @@ "react-dom": "^19.0.1", "react-markdown": "^10.1.0", "reactflow": "^11.11.4", + "reagraph": "^4.30.8", "recharts": "^3.7.0", "remark-gfm": "^4.0.1", "tailwind-merge": "^3.4.0", @@ -60,7 +66,7 @@ "vitest": "^2.1.5" }, "engines": { - "node": ">=20" + "node": ">=22 <23" }, "keywords": [ "openclaw", diff --git a/playwright.config.ts b/playwright.config.ts index 854a182..56630dc 100644 --- a/playwright.config.ts +++ b/playwright.config.ts @@ -18,14 +18,13 @@ export default defineConfig({ { name: 'chromium', use: { ...devices['Desktop Chrome'] } } ], webServer: { - command: 'node .next/standalone/server.js', + command: 'node scripts/e2e-openclaw/start-e2e-server.mjs --mode=local', url: 'http://127.0.0.1:3005', reuseExistingServer: true, timeout: 120_000, env: { ...process.env, - HOSTNAME: process.env.HOSTNAME || '127.0.0.1', - PORT: process.env.PORT || '3005', + MISSION_CONTROL_TEST_MODE: process.env.MISSION_CONTROL_TEST_MODE || '1', MC_DISABLE_RATE_LIMIT: process.env.MC_DISABLE_RATE_LIMIT || '1', MC_WORKLOAD_QUEUE_DEPTH_THROTTLE: process.env.MC_WORKLOAD_QUEUE_DEPTH_THROTTLE || '1000', MC_WORKLOAD_QUEUE_DEPTH_SHED: process.env.MC_WORKLOAD_QUEUE_DEPTH_SHED || '2000', @@ -34,7 +33,6 @@ export default defineConfig({ API_KEY: process.env.API_KEY || 'test-api-key-e2e-12345', AUTH_USER: process.env.AUTH_USER || 'testadmin', AUTH_PASS: process.env.AUTH_PASS || 'testpass1234!', - OPENCLAW_MEMORY_DIR: process.env.OPENCLAW_MEMORY_DIR || '.data/e2e-memory', }, } }) diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index e230b81..6cb9bca 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -8,6 +8,9 @@ importers: .: dependencies: + '@radix-ui/react-slot': + specifier: ^1.2.4 + version: 1.2.4(@types/react@19.2.13)(react@19.2.4) '@scalar/api-reference-react': specifier: ^0.8.66 version: 0.8.66(react@19.2.4)(tailwindcss@3.4.19(yaml@2.8.2))(typescript@5.9.3) @@ -20,6 +23,9 @@ importers: better-sqlite3: specifier: ^12.6.2 version: 12.6.2 + class-variance-authority: + specifier: ^0.7.1 + version: 0.7.1 clsx: specifier: ^2.1.1 version: 2.1.1 @@ -53,6 +59,9 @@ importers: reactflow: specifier: ^11.11.4 version: 11.11.4(@types/react@19.2.13)(immer@11.1.3)(react-dom@19.2.4(react@19.2.4))(react@19.2.4) + reagraph: + specifier: ^4.30.8 + version: 4.30.8(@types/react@19.2.13)(@types/three@0.183.1)(graphology-types@0.24.8)(immer@11.1.3)(react-dom@19.2.4(react@19.2.4))(react@19.2.4)(use-sync-external-store@1.6.0(react@19.2.4)) recharts: specifier: ^3.7.0 version: 3.7.0(@types/react@19.2.13)(react-dom@19.2.4(react@19.2.4))(react-is@17.0.2)(react@19.2.4)(redux@5.0.1) @@ -306,6 +315,9 @@ packages: resolution: {integrity: sha512-Vd/9EVDiu6PPJt9yAh6roZP6El1xHrdvIVGjyBsHR0RYwNHgL7FJPyIIW4fANJNG6FtyZfvlRPpFI4ZM/lubvw==} engines: {node: '>=18'} + '@dimforge/rapier3d-compat@0.12.0': + resolution: {integrity: sha512-uekIGetywIgopfD97oDL5PfeezkFpNhwlzlaEYNOA0N6ghdsOvh/HYjSMek5Q2O1PYvRSDFcqFVJl4r4ZBwOow==} + '@emnapi/core@1.8.1': resolution: {integrity: sha512-AvT9QFpxK0Zd8J0jopedNm+w/2fIzvtPKPjqyw9jwvBaReTTqPBk9Hixaz7KbjimP+QNz605/XnjFcDAL2pqBg==} @@ -736,6 +748,14 @@ packages: '@marijn/find-cluster-break@1.0.2': resolution: {integrity: sha512-l0h88YhZFyKdXIFNfSWpyjStDjGHwZ/U7iobcK1cQQD8sejsONdQtTVU+1wVN1PBw40PiiHB1vA5S7VTfQiP9g==} + '@mediapipe/tasks-vision@0.10.17': + resolution: {integrity: sha512-CZWV/q6TTe8ta61cZXjfnnHsfWIdFhms03M9T7Cnd5y2mdpylJM0rF1qRq+wsQVRMLz1OYPVEBU9ph2Bx8cxrg==} + + '@monogrid/gainmap-js@3.4.0': + resolution: {integrity: sha512-2Z0FATFHaoYJ8b+Y4y4Hgfn3FRFwuU5zRrk+9dFWp4uGAdHGqVEdP7HP+gLA3X469KXHmfupJaUbKo1b/aDKIg==} + peerDependencies: + three: '>= 0.159.0' + '@napi-rs/wasm-runtime@0.2.12': resolution: {integrity: sha512-ZVWUcfwY4E/yPitQJl481FjFo3K22D6qF0DuFH6Y/nbnE11GY5uguDxZMGXPQ8WQ0128MXQD7TnfHyK4oWoIJQ==} @@ -828,6 +848,88 @@ packages: engines: {node: '>=18'} hasBin: true + '@radix-ui/react-compose-refs@1.1.2': + resolution: {integrity: sha512-z4eqJvfiNnFMHIIvXP3CY57y2WJs5g2v3X0zm9mEJkrkNv4rDxu+sg9Jh8EkXyeqBkB7SOcboo9dMVqhyrACIg==} + peerDependencies: + '@types/react': '*' + react: ^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc + peerDependenciesMeta: + '@types/react': + optional: true + + '@radix-ui/react-slot@1.2.4': + resolution: {integrity: sha512-Jl+bCv8HxKnlTLVrcDE8zTMJ09R9/ukw4qBs/oZClOfoQk/cOTbDn+NceXfV7j09YPVQUryJPHurafcSg6EVKA==} + peerDependencies: + '@types/react': '*' + react: ^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc + peerDependenciesMeta: + '@types/react': + optional: true + + '@react-spring/animated@10.0.3': + resolution: {integrity: sha512-7MrxADV3vaUADn2V9iYhaIL6iOWRx9nCJjYrsk2AHD2kwPr6fg7Pt0v+deX5RnCDmCKNnD6W5fasiyM8D+wzJQ==} + peerDependencies: + react: ^16.8.0 || ^17.0.0 || ^18.0.0 || ^19.0.0 + + '@react-spring/core@10.0.3': + resolution: {integrity: sha512-D4DwNO68oohDf/0HG2G0Uragzb9IA1oXblxrd6MZAcBcUQG2EHUWXewjdECMPLNmQvlYVyyBRH6gPxXM5DX7DQ==} + peerDependencies: + react: ^16.8.0 || ^17.0.0 || ^18.0.0 || ^19.0.0 + + '@react-spring/rafz@10.0.3': + resolution: {integrity: sha512-Ri2/xqt8OnQ2iFKkxKMSF4Nqv0LSWnxXT4jXFzBDsHgeeH/cHxTLupAWUwmV9hAGgmEhBmh5aONtj3J6R/18wg==} + + '@react-spring/shared@10.0.3': + resolution: {integrity: sha512-geCal66nrkaQzUVhPkGomylo+Jpd5VPK8tPMEDevQEfNSWAQP15swHm+MCRG4wVQrQlTi9lOzKzpRoTL3CA84Q==} + peerDependencies: + react: ^16.8.0 || ^17.0.0 || ^18.0.0 || ^19.0.0 + + '@react-spring/three@10.0.3': + resolution: {integrity: sha512-hZP7ChF/EwnWn+H2xuzAsRRfQdhquoBTI1HKgO6X9V8tcVCuR69qJmsA9N00CA4Nzx0bo/zwBtqONmi55Ffm5w==} + peerDependencies: + '@react-three/fiber': '>=6.0' + react: ^16.8.0 || ^17.0.0 || ^18.0.0 || ^19.0.0 + three: '>=0.126' + + '@react-spring/types@10.0.3': + resolution: {integrity: sha512-H5Ixkd2OuSIgHtxuHLTt7aJYfhMXKXT/rK32HPD/kSrOB6q6ooeiWAXkBy7L8F3ZxdkBb9ini9zP9UwnEFzWgQ==} + + '@react-three/drei@10.7.7': + resolution: {integrity: sha512-ff+J5iloR0k4tC++QtD/j9u3w5fzfgFAWDtAGQah9pF2B1YgOq/5JxqY0/aVoQG5r3xSZz0cv5tk2YuBob4xEQ==} + peerDependencies: + '@react-three/fiber': ^9.0.0 + react: ^19 + react-dom: ^19 + three: '>=0.159' + peerDependenciesMeta: + react-dom: + optional: true + + '@react-three/fiber@9.5.0': + resolution: {integrity: sha512-FiUzfYW4wB1+PpmsE47UM+mCads7j2+giRBltfwH7SNhah95rqJs3ltEs9V3pP8rYdS0QlNne+9Aj8dS/SiaIA==} + peerDependencies: + expo: '>=43.0' + expo-asset: '>=8.4' + expo-file-system: '>=11.0' + expo-gl: '>=11.0' + react: '>=19 <19.3' + react-dom: '>=19 <19.3' + react-native: '>=0.78' + three: '>=0.156' + peerDependenciesMeta: + expo: + optional: true + expo-asset: + optional: true + expo-file-system: + optional: true + expo-gl: + optional: true + react-dom: + optional: true + react-native: + optional: true + '@reactflow/background@11.3.14': resolution: {integrity: sha512-Gewd7blEVT5Lh6jqrvOgd4G6Qk17eGKQfsDXgyRSqM+CTwDqRldG2LsWN4sNeno6sbqVIC2fZ+rAUBFA9ZEUDA==} peerDependencies: @@ -1175,6 +1277,9 @@ packages: '@types/react-dom': optional: true + '@tweenjs/tween.js@23.1.3': + resolution: {integrity: sha512-vJmvvwFxYuGnF2axRtPYocag6Clbb5YS7kLL+SO/TeVFzHqDIWrNKYtcsPMibjDx9O+bu+psAy9NKfWklassUA==} + '@tybys/wasm-util@0.10.1': resolution: {integrity: sha512-9tTaPJLSiejZKx+Bmog4uSubteqTvFrVrURwkmHixBo0G4seD0zUxp98E1DzUBJxLQ3NPwXrGKDiVjwx/DpPsg==} @@ -1292,6 +1397,9 @@ packages: '@types/debug@4.1.12': resolution: {integrity: sha512-vIChWdVG3LG1SMxEvI/AK+FWJthlrqlTu7fbrlywTkkaONwk/UAGaULXRlf8vkzFBLVm0zkMdCquhL5aOjhXPQ==} + '@types/draco3d@1.4.10': + resolution: {integrity: sha512-AX22jp8Y7wwaBgAixaSvkoG4M/+PlAcm3Qs4OW8yT9DM4xUpWKeFhLueTAyZF39pviAdcDdeJoACapiAceqNcw==} + '@types/estree-jsx@1.0.5': resolution: {integrity: sha512-52CcUVNFyfb1A2ALocQw/Dd1BQFNmSdkuC3BkZ6iqhdMfQz7JWOFRuJFloOzjk+6WijU56m9oKXFAXc7o3Towg==} @@ -1322,14 +1430,28 @@ packages: '@types/node@22.19.9': resolution: {integrity: sha512-PD03/U8g1F9T9MI+1OBisaIARhSzeidsUjQaf51fOxrfjeiKN9bLVO06lHuHYjxdnqLWJijJHfqXPSJri2EM2A==} + '@types/offscreencanvas@2019.7.3': + resolution: {integrity: sha512-ieXiYmgSRXUDeOntE1InxjWyvEelZGP63M+cGuquuRLuIKKT1osnkXjxev9B7d1nXSug5vpunx+gNlbVxMlC9A==} + '@types/react-dom@19.2.3': resolution: {integrity: sha512-jp2L/eY6fn+KgVVQAOqYItbF0VY/YApe5Mz2F0aykSO8gx31bYCZyvSeYxCHKvzHG5eZjc+zyaS5BrBWya2+kQ==} peerDependencies: '@types/react': ^19.2.0 + '@types/react-reconciler@0.28.9': + resolution: {integrity: sha512-HHM3nxyUZ3zAylX8ZEyrDNd2XZOnQ0D5XfunJF5FLQnZbHHYq4UWvW1QfelQNXv1ICNkwYhfxjwfnqivYB6bFg==} + peerDependencies: + '@types/react': '*' + '@types/react@19.2.13': resolution: {integrity: sha512-KkiJeU6VbYbUOp5ITMIc7kBfqlYkKA5KhEHVrGMmUUMt7NeaZg65ojdPk+FtNrBAOXNVM5QM72jnADjM+XVRAQ==} + '@types/stats.js@0.17.4': + resolution: {integrity: sha512-jIBvWWShCvlBqBNIZt0KAshWpvSjhkwkEu4ZUcASoAvhmrgAUI2t1dXrjSL4xXVLB4FznPrIsX3nKXFl/Dt4vA==} + + '@types/three@0.183.1': + resolution: {integrity: sha512-f2Pu5Hrepfgavttdye3PsH5RWyY/AvdZQwIVhrc4uNtvF7nOWJacQKcoVJn0S4f0yYbmAE6AR+ve7xDcuYtMGw==} + '@types/unist@2.0.11': resolution: {integrity: sha512-CmBKiL6NNo/OqgmMn95Fk9Whlp2mtvIv+KNpQKN2F4SjvrEesubTRWGYSg+BnWZOnlCaSTU1sMpsBOzgbYhnsA==} @@ -1345,6 +1467,9 @@ packages: '@types/web-bluetooth@0.0.21': resolution: {integrity: sha512-oIQLCGWtcFZy2JW77j9k8nHzAOpqMHLQejDA48XXMWH6tjCQHz5RCFz1bzsmROyL6PUm+LLnUiI4BCn221inxA==} + '@types/webxr@0.5.24': + resolution: {integrity: sha512-h8fgEd/DpoS9CBrjEQXR+dIDraopAEfu4wYVNY2tEPwk60stPWhvZMf4Foo5FakuQ7HFZoa8WceaWFervK2Ovg==} + '@types/ws@8.18.1': resolution: {integrity: sha512-ThVF6DCVhA8kUGy+aazFQ4kXQ7E1Ty7A3ypFOe0IcJV8O/M511G99AW24irKrW56Wt44yG9+ij8FaqoBGkuBXg==} @@ -1518,6 +1643,14 @@ packages: cpu: [x64] os: [win32] + '@use-gesture/core@10.3.1': + resolution: {integrity: sha512-WcINiDt8WjqBdUXye25anHiNxPc0VOrlT8F6LLkU6cycrOGUDyY/yyFmsg3k8i5OLvv25llc0QC45GhR/C8llw==} + + '@use-gesture/react@10.3.1': + resolution: {integrity: sha512-Yy19y6O2GJq8f7CHf7L0nxL8bf4PZCPaVOCgJrusOeFHY1LvHgYXnmnXg6N5iwAnbgbZCDjo60SiM6IPJi9C5g==} + peerDependencies: + react: '>= 16.8.0' + '@vercel/oidc@3.1.0': resolution: {integrity: sha512-Fw28YZpRnA3cAHHDlkt7xQHiJ0fcL+NRcIqsocZQUSmbzeIKRpwttJjik5ZGanXP+vlA4SbTg+AbA3bP363l+w==} engines: {node: '>= 20'} @@ -1653,6 +1786,9 @@ packages: peerDependencies: vue: ^3.5.0 + '@webgpu/types@0.1.69': + resolution: {integrity: sha512-RPmm6kgRbI8e98zSD3RVACvnuktIja5+yLgDAkTmxLr90BEwdTXRQWNLF3ETTTyH/8mKhznZuN5AveXYFEsMGQ==} + '@xyflow/react@12.10.0': resolution: {integrity: sha512-eOtz3whDMWrB4KWVatIBrKuxECHqip6PfA8fTpaS2RUGVpiEAe+nqDKsLqkViVWxDGreq0lWX71Xth/SPAzXiw==} peerDependencies: @@ -1662,6 +1798,9 @@ packages: '@xyflow/system@0.0.74': resolution: {integrity: sha512-7v7B/PkiVrkdZzSbL+inGAo6tkR/WQHHG0/jhSvLQToCsfa8YubOGmBYd1s08tpKpihdHDZFwzQZeR69QSBb4Q==} + '@yomguithereal/helpers@1.1.1': + resolution: {integrity: sha512-UYvAq/XCA7xoh1juWDYsq3W0WywOB+pz8cgVnE1b45ZfdMhBvHDrgmSFG3jXeZSr2tMTYLGHFHON+ekG05Jebg==} + acorn-jsx@5.3.2: resolution: {integrity: sha512-rq9s+JNhf0IChjtDXxllJ7g41oZk5SlXtp0LHwyA5cejwn7vKmKp4pPri6YEePv2PU65sAsegbXtIinmDFDXgQ==} peerDependencies: @@ -1823,6 +1962,9 @@ packages: resolution: {integrity: sha512-8VYKM3MjCa9WcaSAI3hzwhmyHVlH8tiGFwf0RlTsZPWJ1I5MkzjiudCo4KC4DxOaL/53A5B1sI/IbldNFDbsKA==} engines: {node: 20.x || 22.x || 23.x || 24.x || 25.x} + bidi-js@1.0.3: + resolution: {integrity: sha512-RKshQI1R3YQ+n9YJz2QQ147P66ELpa1FQEg20Dk8oW9t2KgLbpDLLp9aGZ7y8WHSshDknG0bknqGw5/tyCs5tw==} + binary-extensions@2.3.0: resolution: {integrity: sha512-Ceh+7ox5qe7LJuLHoY0feh3pHuUDHAcRUeyL2VYghZwfpkNIy/+8Ocg0a3UuSoYzavmylwuLWQOf3hl0jjMMIw==} engines: {node: '>=8'} @@ -1851,6 +1993,9 @@ packages: buffer@5.7.1: resolution: {integrity: sha512-EHcyIPBQ4BSGlvjB16k5KgAJ27CIsHY/2JBmCRReo48y9rQ3MaUzWX3KVlBa4U7MyX02HdVj0K7C3WaB3ju7FQ==} + buffer@6.0.3: + resolution: {integrity: sha512-FTiCpNxtwiZZHEZbcbTIcZjERVICn9yq/pDFkTl95/AxzD1naBctN7YO68riM/gLSDY7sdrMby8hofADYuuqOA==} + cac@6.7.14: resolution: {integrity: sha512-b6Ilus+c3RrdDk+JhLKUAQfzzgLEPy6wcXqS7f/xe1EETvsDP6GORG7SFuOs6cID5YkqchW/LXZbX5bc8j7ZcQ==} engines: {node: '>=8'} @@ -1875,6 +2020,12 @@ packages: resolution: {integrity: sha512-QOSvevhslijgYwRx6Rv7zKdMF8lbRmx+uQGx2+vDc+KI/eBnsy9kit5aj23AgGu3pa4t9AgwbnXWqS+iOY+2aA==} engines: {node: '>= 6'} + camera-controls@3.1.2: + resolution: {integrity: sha512-xkxfpG2ECZ6Ww5/9+kf4mfg1VEYAoe9aDSY+IwF0UEs7qEzwy0aVRfs2grImIECs/PoBtWFrh7RXsQkwG922JA==} + engines: {node: '>=22.0.0', npm: '>=10.5.1'} + peerDependencies: + three: '>=0.126.1' + caniuse-lite@1.0.30001769: resolution: {integrity: sha512-BCfFL1sHijQlBGWBMuJyhZUhzo7wer5sVj9hqekB/7xn0Ypy+pER/edCYQm4exbXj4WiySGp40P8UuTh6w1srg==} @@ -1916,9 +2067,15 @@ packages: chownr@1.1.4: resolution: {integrity: sha512-jJ0bqzaylmJtVnNgzTeSOs8DPavpbYgEr/b0YL8/2GO3xJEhInFmhKMUnEJQjZumK7KXGFhUy89PrsJWlakBVg==} + class-variance-authority@0.7.1: + resolution: {integrity: sha512-Ka+9Trutv7G8M6WT6SeiRWz792K5qEqIGEGzXKhAE6xOWAY6pPH8U+9IY3oCMv6kqTmLsv7Xh/2w2RigkePMsg==} + classcat@5.0.5: resolution: {integrity: sha512-JhZUT7JFcQy/EzW605k/ktHtncoo9vnyW/2GspNYwFlN1C/WmjuV/xtS04e9SOkL2sTdw0VAZ2UGCcQ9lR6p6w==} + classnames@2.5.1: + resolution: {integrity: sha512-saHYOzhIQs6wy2sVxTM6bUDsQO4F50V9RQ22qBpEdCW+I+/Wmke2HOl6lS6dTpdxVhb88/I6+Hs+438c3lfUow==} + client-only@0.0.1: resolution: {integrity: sha512-IV3Ou0jSMzZrd3pZ48nLkT9DA7Ag1pnPzaiQhpW7c3RbcqqzvzzVu+L8gfqMp/8IM2MQtSiqaCxrrcfu8I8rMA==} @@ -1956,6 +2113,11 @@ packages: crelt@1.0.6: resolution: {integrity: sha512-VQ2MBenTq1fWZUH9DJNGti7kKv6EeAuYr3cLwxUWhIu1baTaXh4Ib5W2CqHVqib4/MqbYGJqiL3Zb8GJZr3l4g==} + cross-env@7.0.3: + resolution: {integrity: sha512-+/HKd6EgcQCJGh2PSjZuUitQBQynKor4wrFbRg4DtAgS1aWO+gU52xpH7M9ScGgXSYmAVS9bIJ8EzuaGw0oNAw==} + engines: {node: '>=10.14', npm: '>=6', yarn: '>=1'} + hasBin: true + cross-spawn@7.0.6: resolution: {integrity: sha512-uV2QOWP2nWzsy2aMp8aRibhi9dlzF5Hgh5SHaB9OiTGEyDTiJJyx0uy51QXdyWbtAHNua4XJzUKca3OzKUd3vA==} engines: {node: '>= 8'} @@ -1995,6 +2157,9 @@ packages: resolution: {integrity: sha512-tdQAmyA18i4J7wprpYq8ClcxZy3SC31QMeByyCFyRt7BVHdREQZ5lpzoe5mFEYZUWe+oq8HBvk9JjpibyEV4Jg==} engines: {node: '>=12'} + d3-binarytree@1.0.2: + resolution: {integrity: sha512-cElUNH+sHu95L04m92pG73t2MEJXKu+GeKUN1TJkFsu93E5W8E9Sc3kHEGJKgenGvj19m6upSn2EunvMgMD2Yw==} + d3-color@3.1.0: resolution: {integrity: sha512-zg/chbXyeBtMQ1LbD/WSoW2DpC3I0mpmPdW+ynRTj/x2DAWYrIY7qeZIHidozwV24m4iavr15lNwIwLxRmOxhA==} engines: {node: '>=12'} @@ -2011,18 +2176,33 @@ packages: resolution: {integrity: sha512-wR/XK3D3XcLIZwpbvQwQ5fK+8Ykds1ip7A2Txe0yxncXSdq1L9skcG7blcedkOX+ZcgxGAmLX1FrRGbADwzi0w==} engines: {node: '>=12'} + d3-force-3d@3.0.6: + resolution: {integrity: sha512-4tsKHUPLOVkyfEffZo1v6sFHvGFwAIIjt/W8IThbp08DYAsXZck+2pSHEG5W1+gQgEvFLdZkYvmJAbRM2EzMnA==} + engines: {node: '>=12'} + d3-format@3.1.2: resolution: {integrity: sha512-AJDdYOdnyRDV5b6ArilzCPPwc1ejkHcoyFarqlPqT7zRYjhavcT3uSrqcMvsgh2CgoPbK3RCwyHaVyxYcP2Arg==} engines: {node: '>=12'} + d3-hierarchy@3.1.2: + resolution: {integrity: sha512-FX/9frcub54beBdugHjDCdikxThEqjnR93Qt7PvQTOHxyiNCAlvMrHhclk3cD5VeAaq9fxmfRp+CnWw9rEMBuA==} + engines: {node: '>=12'} + d3-interpolate@3.0.1: resolution: {integrity: sha512-3bYs1rOD33uo8aqJfKP3JWPAibgw8Zm2+L9vBKEHJ2Rg+viTR7o5Mmv5mZcieN+FRYaAOWX5SJATX6k1PWz72g==} engines: {node: '>=12'} + d3-octree@1.1.0: + resolution: {integrity: sha512-F8gPlqpP+HwRPMO/8uOu5wjH110+6q4cgJvgJT6vlpy3BEaDIKlTZrgHKZSp/i1InRpVfh4puY/kvL6MxK930A==} + d3-path@3.1.0: resolution: {integrity: sha512-p3KP5HCf/bvjBSSKuXid6Zqijx7wIfNW+J/maPs+iwR35at5JCbLUT0LzF1cnjbCHWhqzQTIN2Jpe8pRebIEFQ==} engines: {node: '>=12'} + d3-quadtree@3.0.1: + resolution: {integrity: sha512-04xDrxQTDTCFwP5H6hRhsRcb9xxv2RzkcsygFzmkSIOJy3PeRJP7sNk3VRIbKXcog561P9oU0/rVH6vDROAgUw==} + engines: {node: '>=12'} + d3-scale@4.0.2: resolution: {integrity: sha512-GZW464g1SH7ag3Y7hXjf8RoUuAFIqklOAq3MRl4OaWabTFJY9PN/E1YklhXLh+OQ3fM9yS2nOkCoS+WLZ6kvxQ==} engines: {node: '>=12'} @@ -2135,6 +2315,9 @@ packages: resolution: {integrity: sha512-0je+qPKHEMohvfRTCEo3CrPG6cAzAYgmzKyxRiYSSDkS6eGJdyVJm7WaYA5ECaAD9wLB2T4EEeymA5aFVcYXCA==} engines: {node: '>=6'} + detect-gpu@5.0.70: + resolution: {integrity: sha512-bqerEP1Ese6nt3rFkwPnGbsUF9a4q+gMmpTVVOEzoCyeCc+y7/RvJnQZJx1JwhgQI5Ntg0Kgat8Uu7XpBqnz1w==} + detect-libc@2.1.2: resolution: {integrity: sha512-Btj2BOOO83o3WyH59e8MgXsxEQVcarkUOpEYrubB0urwnN10yQ364rsiByU11nZlqWYZm05i/of7io4mzihBtQ==} engines: {node: '>=8'} @@ -2158,6 +2341,9 @@ packages: dom-accessibility-api@0.6.3: resolution: {integrity: sha512-7ZgogeTnjuHbo+ct10G9Ffp0mif17idi0IyWNVA/wcwcm7NPOD/WEHVP3n7n3MhXqxoIYm8d6MuZohYWIZ4T3w==} + draco3d@1.5.7: + resolution: {integrity: sha512-m6WCKt/erDXcw+70IJXnG7M3awwQPAsZvJGX5zY7beBqpELw6RDGkYVU0W43AFxye4pDZ5i2Lbyc/NNGqwjUVQ==} + dunder-proto@1.0.1: resolution: {integrity: sha512-KIN/nDJBQRcXw0MLVhZE9iQHmG68qAVIBg9CqmUYjmQIhgij9U5MFvrqkUL5FbtyyzZuOeOt0zdeRe4UY7ct+A==} engines: {node: '>= 0.4'} @@ -2165,6 +2351,9 @@ packages: electron-to-chromium@1.5.286: resolution: {integrity: sha512-9tfDXhJ4RKFNerfjdCcZfufu49vg620741MNs26a9+bhLThdB+plgMeou98CAaHu/WATj2iHOOHTp1hWtABj2A==} + ellipsize@0.6.2: + resolution: {integrity: sha512-zB4m5iEETalVrrP8RzcF0Qzqyw3MkUQ4R43NiczRAp0Hpp0+0bRdwKnoaFXyJoVJCipm2/3xc7Hkg0OOAorUPw==} + emoji-regex@9.2.2: resolution: {integrity: sha512-L18DaJsXSUk2+42pv8mLs5jJT2hqFkFE4j21wOmgbUqsZ2hL72NsUU785g9RXgo3s0ZNgVl42TiHp3ZtOv/Vyg==} @@ -2362,6 +2551,10 @@ packages: eventemitter3@5.0.4: resolution: {integrity: sha512-mlsTRyGaPBjPedk6Bvw+aqbsXDtoAyAzm5MO7JgU+yVRyMQ5O8bD4Kcci7BS85f93veegeCPkL8R4GLClnjLFw==} + events@3.3.0: + resolution: {integrity: sha512-mQw+2fkQbALzQ7V0MY0IqdnXNOeTtP4r0lN9z7AAawCXgqea7bDii20AYrIBrFd/Hx0M2Ocz6S111CaFkUcb0Q==} + engines: {node: '>=0.8.x'} + eventsource-parser@3.0.6: resolution: {integrity: sha512-Vo1ab+QXPzZ4tCa8SwIHJFaSzy4R6SHf7BY79rFBDf0idraZWAkYrDjDj8uWaSm3S2TK+hJ7/t1CEmZ7jXw+pg==} engines: {node: '>=18.0.0'} @@ -2415,6 +2608,12 @@ packages: picomatch: optional: true + fflate@0.6.10: + resolution: {integrity: sha512-IQrh3lEPM93wVCEczc9SaAOvkmcoQn/G8Bo1e8ZPlY3X3bnAxWaBdvTdvM1hP62iZp0BXWDy4vTAy4fF0+Dlpg==} + + fflate@0.8.2: + resolution: {integrity: sha512-cPJU47OaAoCbg0pBvzsgpTPhmhqI5eJjh/JIu8tPj5q+T7iLvW/JAYUqmE7KOB4R1ZyEhzBaIQpQpardBF5z8A==} + file-entry-cache@8.0.0: resolution: {integrity: sha512-XXTUwCvisa5oacNGRP9SfNtYBNAMi+RPwBFmblZEF7N7swHYQS6/Zfk7SRwx4D5j3CH211YNRco1DEMNVfZCnQ==} engines: {node: '>=16.0.0'} @@ -2534,10 +2733,56 @@ packages: globrex@0.1.2: resolution: {integrity: sha512-uHJgbwAMwNFf5mLst7IWLNg14x1CkeqglJb/K3doi4dw6q2IvAAmM/Y81kevy83wP+Sst+nutFTYOGg3d1lsxg==} + glsl-noise@0.0.0: + resolution: {integrity: sha512-b/ZCF6amfAUb7dJM/MxRs7AetQEahYzJ8PtgfrmEdtw6uyGOr+ZSGtgjFm6mfsBkxJ4d2W7kg+Nlqzqvn3Bc0w==} + gopd@1.2.0: resolution: {integrity: sha512-ZUKRh6/kUFoAiTAtTYPZJ3hw9wNxx+BIBOijnlG9PnrJsCcSjs1wyyD6vJpaYtgnzDrKYRSqf3OO6Rfa93xsRg==} engines: {node: '>= 0.4'} + graphology-indices@0.17.0: + resolution: {integrity: sha512-A7RXuKQvdqSWOpn7ZVQo4S33O0vCfPBnUSf7FwE0zNCasqwZVUaCXePuWo5HBpWw68KJcwObZDHpFk6HKH6MYQ==} + peerDependencies: + graphology-types: '>=0.20.0' + + graphology-layout-forceatlas2@0.10.1: + resolution: {integrity: sha512-ogzBeF1FvWzjkikrIFwxhlZXvD2+wlY54lqhsrWprcdPjopM2J9HoMweUmIgwaTvY4bUYVimpSsOdvDv1gPRFQ==} + peerDependencies: + graphology-types: '>=0.19.0' + + graphology-layout-noverlap@0.4.2: + resolution: {integrity: sha512-13WwZSx96zim6l1dfZONcqLh3oqyRcjIBsqz2c2iJ3ohgs3605IDWjldH41Gnhh462xGB1j6VGmuGhZ2FKISXA==} + peerDependencies: + graphology-types: '>=0.19.0' + + graphology-layout@0.6.1: + resolution: {integrity: sha512-m9aMvbd0uDPffUCFPng5ibRkb2pmfNvdKjQWeZrf71RS1aOoat5874+DcyNfMeCT4aQguKC7Lj9eCbqZj/h8Ag==} + peerDependencies: + graphology-types: '>=0.19.0' + + graphology-metrics@2.4.0: + resolution: {integrity: sha512-7WOfOP+mFLCaTJx55Qg4eY+211vr1/b3D/R3biz3SXGhAaCVcWYkfabnmO4O4WBNWANEHtVnFrGgJ0kj6MM6xw==} + peerDependencies: + graphology-types: '>=0.20.0' + + graphology-shortest-path@2.1.0: + resolution: {integrity: sha512-KbT9CTkP/u72vGEJzyRr24xFC7usI9Es3LMmCPHGwQ1KTsoZjxwA9lMKxfU0syvT/w+7fZUdB/Hu2wWYcJBm6Q==} + peerDependencies: + graphology-types: '>=0.20.0' + + graphology-types@0.24.8: + resolution: {integrity: sha512-hDRKYXa8TsoZHjgEaysSRyPdT6uB78Ci8WnjgbStlQysz7xR52PInxNsmnB7IBOM1BhikxkNyCVEFgmPKnpx3Q==} + + graphology-utils@2.5.2: + resolution: {integrity: sha512-ckHg8MXrXJkOARk56ZaSCM1g1Wihe2d6iTmz1enGOz4W/l831MBCKSayeFQfowgF8wd+PQ4rlch/56Vs/VZLDQ==} + peerDependencies: + graphology-types: '>=0.23.0' + + graphology@0.26.0: + resolution: {integrity: sha512-8SSImzgUUYC89Z042s+0r/vMibY7GX/Emz4LDO5e7jYXhuoWfHISPFJYjpRLUSJGq6UQ6xlenvX1p/hJdfXuXg==} + peerDependencies: + graphology-types: '>=0.24.0' + guess-json-indent@3.0.1: resolution: {integrity: sha512-LWZ3Vr8BG7DHE3TzPYFqkhjNRw4vYgFSsv2nfMuHklAlOfiy54/EwiDQuQfFVLxENCVv20wpbjfTayooQHrEhQ==} engines: {node: '>=18.18.0'} @@ -2639,6 +2884,12 @@ packages: highlightjs-curl@1.3.0: resolution: {integrity: sha512-50UEfZq1KR0Lfk2Tr6xb/MUIZH3h10oNC0OTy9g7WELcs5Fgy/mKN1vEhuKTkKbdo8vr5F9GXstu2eLhApfQ3A==} + hls.js@1.6.15: + resolution: {integrity: sha512-E3a5VwgXimGHwpRGV+WxRTKeSp2DW5DI5MWv34ulL3t5UNmyJWCQ1KmLEHbYzcfThfXG8amBL+fCYPneGHC4VA==} + + hold-event@1.1.2: + resolution: {integrity: sha512-Bx0A6OBY70cs23orUWk0DuBAAeJjEbmyg8Gnye9+M8+XeWy2CcmRyfiJhTnQQz9s25r9SYjici3URy176MFs5A==} + hookable@6.0.1: resolution: {integrity: sha512-uKGyY8BuzN/a5gvzvA+3FVWo0+wUjgtfSdnmjtrOVwQCZPHpHDH2WRO3VZSOeluYrHoDCiXFffZXs8Dj1ULWtw==} @@ -2682,6 +2933,9 @@ packages: resolution: {integrity: sha512-Hs59xBNfUIunMFgWAbGX5cq6893IbWg4KnrjbYwX3tx0ztorVgTDA6B2sxf8ejHJ4wz8BqGUMYlnzNBer5NvGg==} engines: {node: '>= 4'} + immediate@3.0.6: + resolution: {integrity: sha512-XXOFtyqDjNDAQxVfYxuF7g9Il/IbWmmlQg2MYKOH8ExIT1qg6xc4zyS3HaEEATgs1btfzxq15ciUiY7gjSXRGQ==} + immer@10.2.0: resolution: {integrity: sha512-d/+XTN3zfODyjr89gM3mPq1WNX2B8pYsu7eORitdwyA2sBubnTl3laYlBk4sXY5FUa5qTZGBDPJICVbvqzjlbw==} @@ -2819,6 +3073,9 @@ packages: is-potential-custom-element-name@1.0.1: resolution: {integrity: sha512-bCYeRA2rVibKZd+s2625gGnGF/t7DSqDs4dP7CrLA1m7jKWz6pps0LpYLJN8Q64HtmPKJ1hrN3nzPNKFEKOUiQ==} + is-promise@2.2.2: + resolution: {integrity: sha512-+lP4/6lKUBfQjZ2pdxThZvLUAafmZb8OAxFb8XXtiQmS35INgr85hdOGoEs124ez1FCnZJt6jau/T+alh58QFQ==} + is-regex@1.2.1: resolution: {integrity: sha512-MjYsKHO5O7mCsmRGxWcLWheFqN9DJ/2TmngvjKXihe6efViPqc274+Fx/4fYj/r03+ESvBdTXK0V6tA3rgez1g==} engines: {node: '>= 0.4'} @@ -2869,6 +3126,11 @@ packages: resolution: {integrity: sha512-H0dkQoCa3b2VEeKQBOxFph+JAbcrQdE7KC0UkqwpLmv2EC4P41QXP+rqo9wYodACiG5/WM5s9oDApTU8utwj9g==} engines: {node: '>= 0.4'} + its-fine@2.0.0: + resolution: {integrity: sha512-KLViCmWx94zOvpLwSlsx6yOCeMhZYaxrJV87Po5k/FoZzcPSahvK5qJ7fYhS61sZi5ikmh2S3Hz55A2l3U69ng==} + peerDependencies: + react: ^19.0.0 + jiti@1.21.7: resolution: {integrity: sha512-/imKNG4EbWNrVjoNC/1H5/9GFy+tqjGBHCaSsN+P2RnPqjsLmv6UD3Ej+Kj8nBWaRAwyk7kK5ZUc+OEatnTR3A==} hasBin: true @@ -2954,6 +3216,9 @@ packages: resolution: {integrity: sha512-+bT2uH4E5LGE7h/n3evcS/sQlJXCpIp6ym8OWJ5eV6+67Dsql/LaaT7qJBAt2rzfoa/5QBGBhxDix1dMt2kQKQ==} engines: {node: '>= 0.8.0'} + lie@3.3.0: + resolution: {integrity: sha512-UaiMJzeWRlEujzAuw5LokY1L5ecNQYZKfmyZ9L7wDHb/p5etKaxXhohBcrw0EYby+G/NA52vRSN4N39dxHAIwQ==} + lilconfig@3.1.3: resolution: {integrity: sha512-/vlFKAoH5Cgt3Ie+JLhRbwOsCQePABiU3tJ1egGvyQ+33R/vcwM2Zl2QR/LzjsBeItPt3oSVXapn+m4nQDvpzw==} engines: {node: '>=14'} @@ -2991,6 +3256,12 @@ packages: resolution: {integrity: sha512-h5bgJWpxJNswbU7qCrV0tIKQCaS3blPDrqKWx+QxzuzL1zGUzij9XCWLrSLsJPu5t+eWA/ycetzYAO5IOMcWAQ==} hasBin: true + maath@0.10.8: + resolution: {integrity: sha512-tRvbDF0Pgqz+9XUa4jjfgAQ8/aPKmQdWXilFu2tMy4GWj4NOsx99HlULO4IeREfbO3a0sA145DZYyvXPkybm0g==} + peerDependencies: + '@types/three': '>=0.134.0' + three: '>=0.134.0' + magic-string@0.30.21: resolution: {integrity: sha512-vd2F4YUyEXKGcLHoq+TEyCjxueSeHnFxyyjNp80yg0XV4vUhnDer/lvvlqM/arB5bXQN5K2/3oinyCRyx8T2CQ==} @@ -3054,6 +3325,14 @@ packages: resolution: {integrity: sha512-8q7VEgMJW4J8tcfVPy8g09NcQwZdbwFEqhe/WZkoIzjn/3TGDwtOCYtXGxA3O8tPzpczCCDgv+P2P5y00ZJOOg==} engines: {node: '>= 8'} + meshline@3.3.1: + resolution: {integrity: sha512-/TQj+JdZkeSUOl5Mk2J7eLcYTLiQm2IDzmlSvYm7ov15anEcDJ92GHqqazxTSreeNgfnYu24kiEvvv0WlbCdFQ==} + peerDependencies: + three: '>=0.137' + + meshoptimizer@1.0.1: + resolution: {integrity: sha512-Vix+QlA1YYT3FwmBBZ+49cE5y/b+pRrcXKqGpS5ouh33d3lSp2PoTpCw19E0cKDFWalembrHnIaZetf27a+W2g==} + microdiff@1.5.0: resolution: {integrity: sha512-Drq+/THMvDdzRYrK0oxJmOKiC24ayUV8ahrt8l3oRK51PWt6gdtrIGrlIH3pT/lFh1z93FbAcidtsHcWbnRz8Q==} @@ -3166,6 +3445,9 @@ packages: mkdirp-classic@0.5.3: resolution: {integrity: sha512-gKLcREMhtuZRwRAfqP3RFW+TK4JqApVBtOIftVgjuABpAtpxhPGaDcfvbhNvD0B8iD1oUr/txX35NjcaY6Ns/A==} + mnemonist@0.39.8: + resolution: {integrity: sha512-vyWo2K3fjrUw8YeeZ1zF0fy6Mu59RHokURlld8ymdUPjMlD9EC9ov1/YPqTgqRvUN9nTr3Gqfz29LYAmu0PHPQ==} + ms@2.1.3: resolution: {integrity: sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==} @@ -3275,6 +3557,9 @@ packages: resolution: {integrity: sha512-gXah6aZrcUxjWg2zR2MwouP2eHlCBzdV4pygudehaKXSGW4v2AsRQUK+lwwXhii6KFZcunEnmSUoYp5CXibxtA==} engines: {node: '>= 0.4'} + obliterator@2.0.5: + resolution: {integrity: sha512-42CPE9AhahZRsMNslczq0ctAEtqk8Eka26QofnqC346BZdHDySk3LWka23LI7ULIw11NmltpiLagIq8gBozxTw==} + on-exit-leak-free@2.1.2: resolution: {integrity: sha512-0eJJY6hXLGf1udHwfNftBqH+g73EU4B504nZeKpz1sYRKafAghwxEJunB2O7rDZkL4PGfsMVnTXZ2EjibbqcsA==} engines: {node: '>=14.0.0'} @@ -3306,6 +3591,9 @@ packages: resolution: {integrity: sha512-MyIV3ZA/PmyBN/ud8vV9XzwTrNtR4jFrObymZYnZqMmW0zA8Z17vnT0rBgFE/TlohB+YCHqXMgZzb3Csp49vqg==} engines: {node: '>=14.16'} + pandemonium@2.4.1: + resolution: {integrity: sha512-wRqjisUyiUfXowgm7MFH2rwJzKIr20rca5FsHXCMNm1W5YPP1hCtrZfgmQ62kP7OZ7Xt+cR858aB28lu5NX55g==} + parent-module@1.0.1: resolution: {integrity: sha512-GQ2EWRpQV8/o+Aw8YqtfZZPfNRWZYkbidE9k5rpl/hC3vtHHBfGm2Ifi6qWV+coDGkrUKZAxE3Lot5kcsRlh+g==} engines: {node: '>=6'} @@ -3439,6 +3727,9 @@ packages: resolution: {integrity: sha512-3Ybi1tAuwAP9s0r1UQ2J4n5Y0G05bJkpUIO0/bI9MhwmD70S5aTWbXGBwxHrelT+XM1k6dM0pk+SwNkpTRN7Pg==} engines: {node: ^10 || ^12 || >=14} + potpack@1.0.2: + resolution: {integrity: sha512-choctRBIV9EMT9WGAZHn3V7t0Z2pMQyl0EZE6pFc/6ml3ssw7Dlf/oAOvFwjm1HVsqfQN8GfeFyJ+d8tRzqueQ==} + prebuild-install@7.1.3: resolution: {integrity: sha512-8Mf2cbV7x1cXPUILADGI3wuhfqWvtiLA1iclTDbFRZkgRQS0NqsPZphna9V+HyTEadheuPmjaJMsbzKQFOzLug==} engines: {node: '>=10'} @@ -3463,6 +3754,9 @@ packages: process-warning@5.0.0: resolution: {integrity: sha512-a39t9ApHNx2L4+HBnQKqxxHNs1r7KF+Intd8Q/g1bUh6q0WIp9voPXJ/x0j+ZL45KF1pJd9+q2jLIRMfvEshkA==} + promise-worker-transferable@1.0.4: + resolution: {integrity: sha512-bN+0ehEnrXfxV2ZQvU2PetO0n4gqBD4ulq3MI1WOPLgr7/Mg9yRQkX5+0v1vagr74ZTsl7XtzlaYDo2EuCeYJw==} + prop-types@15.8.1: resolution: {integrity: sha512-oj87CgZICdulUohogVAR7AjlC0327U4el4L6eAvOqCeudMDVU0NThNaV+b9Df4dXgSP1gXMTnPdhfe/2qDH5cg==} @@ -3524,6 +3818,15 @@ packages: resolution: {integrity: sha512-z6F7K9bV85EfseRCp2bzrpyQ0Gkw1uLoCel9XBVWPg/TjRj94SkJzUTGfOa4bs7iJvBWtQG0Wq7wnI0syw3EBQ==} engines: {node: '>=0.10.0'} + react-use-measure@2.1.7: + resolution: {integrity: sha512-KrvcAo13I/60HpwGO5jpW7E9DfusKyLPLvuHlUyP5zqnmAPhNc6qTRjUQrdTADl0lpPpDVU2/Gg51UlOGHXbdg==} + peerDependencies: + react: '>=16.13' + react-dom: '>=16.13' + peerDependenciesMeta: + react-dom: + optional: true + react@19.2.4: resolution: {integrity: sha512-9nfp2hYpCwOjAN+8TZFGhtWEwgvWHXqESH8qT89AT/lWklpLON22Lc8pEtnpsZz7VmawabSU0gCjnj8aC0euHQ==} engines: {node: '>=0.10.0'} @@ -3545,6 +3848,13 @@ packages: resolution: {integrity: sha512-hOS089on8RduqdbhvQ5Z37A0ESjsqz6qnRcffsMU3495FuTdqSm+7bhJ29JvIOsBDEEnan5DPu9t3To9VRlMzA==} engines: {node: '>=8.10.0'} + reagraph@4.30.8: + resolution: {integrity: sha512-DXmG2lAM5k7LQiTQJ4sad8Ks8Q1c98303USwwN+mxpJ5GelBYLjMVG+DPct4CE8ezON1WUoE8PL3RUXiEua22Q==} + engines: {node: ^20.19.0 || >=22.12.0} + peerDependencies: + react: '>=16' + react-dom: '>=16' + real-require@0.2.0: resolution: {integrity: sha512-57frrGM/OCTLqLOAh0mhVA9VBMHd+9U7Zb2THMGdBUoZVOtGbJzjxsYGDJ3A9AYYCP4hn6y1TVbaOfzWtm5GFg==} engines: {node: '>= 12.13.0'} @@ -3763,6 +4073,15 @@ packages: stackback@0.0.2: resolution: {integrity: sha512-1XMJE5fQo1jGH6Y/7ebnwPOBEkIEnT4QF32d5R1+VXdXveM0IBMJt8zfaxX1P3QhVwrYe+576+jkANtSS2mBbw==} + stats-gl@2.4.2: + resolution: {integrity: sha512-g5O9B0hm9CvnM36+v7SFl39T7hmAlv541tU81ME8YeSb3i1CIP5/QdDeSB3A0la0bKNHpxpwxOVRo2wFTYEosQ==} + peerDependencies: + '@types/three': '*' + three: '*' + + stats.js@0.17.0: + resolution: {integrity: sha512-hNKz8phvYLPEcRkeG1rsGmV5ChMjKDAWU7/OJJdDErPBNChQXxCo3WZurGpnWc6gZhAzEPFad1aVgyOANH1sMw==} + std-env@3.10.0: resolution: {integrity: sha512-5GS12FdOZNliM5mAOxFRg7Ir0pWz8MdpYm6AY6VPkGpbA7ZzmbzNcBJQ0GPvvyWgcY7QAhCgf9Uy89I03faLkg==} @@ -3870,6 +4189,11 @@ packages: resolution: {integrity: sha512-ot0WnXS9fgdkgIcePe6RHNk1WA8+muPa6cSjeR3V8K27q9BB1rTE3R1p7Hv0z1ZyAc8s6Vvv8DIyWf681MAt0w==} engines: {node: '>= 0.4'} + suspend-react@0.1.3: + resolution: {integrity: sha512-aqldKgX9aZqpoDp3e8/BZ8Dm7x1pJl+qI3ZKxDN0i/IQTWUwBx/ManmlVJ3wowqbno6c2bmiIfs+Um6LbsjJyQ==} + peerDependencies: + react: '>=17.0' + swrv@1.1.0: resolution: {integrity: sha512-pjllRDr2s0iTwiE5Isvip51dZGR7GjLH1gCSVyE8bQnbAx6xackXsFdojau+1O5u98yHF5V73HQGOFxKUXO9gQ==} peerDependencies: @@ -3911,6 +4235,19 @@ packages: resolution: {integrity: sha512-4iMVL6HAINXWf1ZKZjIPcz5wYaOdPhtO8ATvZ+Xqp3BTdaqtAwQkNmKORqcIo5YkQqGXq5cwfswDwMqqQNrpJA==} engines: {node: '>=20'} + three-mesh-bvh@0.8.3: + resolution: {integrity: sha512-4G5lBaF+g2auKX3P0yqx+MJC6oVt6sB5k+CchS6Ob0qvH0YIhuUk1eYr7ktsIpY+albCqE80/FVQGV190PmiAg==} + peerDependencies: + three: '>= 0.159.0' + + three-stdlib@2.36.1: + resolution: {integrity: sha512-XyGQrFmNQ5O/IoKm556ftwKsBg11TIb301MB5dWNicziQBEs2g3gtOYIf7pFiLa0zI2gUwhtCjv9fmjnxKZ1Cg==} + peerDependencies: + three: '>=0.128.0' + + three@0.180.0: + resolution: {integrity: sha512-o+qycAMZrh+TsE01GqWUxUIKR1AL0S8pq7zDkYOQw8GqfX8b8VoCKYUoHbhiX5j+7hr8XsuHDVU6+gkQJQKg9w==} + time-span@5.1.0: resolution: {integrity: sha512-75voc/9G4rDIJleOo4jPvN4/YC4GRZrY8yy1uU4lwrB3XEQbWve8zXoO5No4eFrGcTAMYyoY67p8jRQdtA1HbA==} engines: {node: '>=12'} @@ -3962,6 +4299,19 @@ packages: trim-lines@3.0.1: resolution: {integrity: sha512-kRj8B+YHZCc9kQYdWfJB2/oUl9rA99qbowYYBtr4ui4mZyAQ2JpvVBd/6U2YloATfqBhBTSMhTpgBHtU0Mf3Rg==} + troika-three-text@0.52.4: + resolution: {integrity: sha512-V50EwcYGruV5rUZ9F4aNsrytGdKcXKALjEtQXIOBfhVoZU9VAqZNIoGQ3TMiooVqFAbR1w15T+f+8gkzoFzawg==} + peerDependencies: + three: '>=0.125.0' + + troika-three-utils@0.52.4: + resolution: {integrity: sha512-NORAStSVa/BDiG52Mfudk4j1FG4jC4ILutB3foPnfGbOeIs9+G5vZLa0pnmnaftZUGm4UwSoqEpWdqvC7zms3A==} + peerDependencies: + three: '>=0.125.0' + + troika-worker-utils@0.52.0: + resolution: {integrity: sha512-W1CpvTHykaPH5brv5VHLfQo9D1OYuo0cSBEUQFFT/nBUzM8iD6Lq2/tgG/f1OelbAS1WtaTPQzE5uM49egnngw==} + trough@2.2.0: resolution: {integrity: sha512-tmMpK00BjZiUyVyvrBK7knerNgmgvcV/KLVyuma/SC+TQN167GrMRciANTz09+k3zW8L8t60jWO1GpfkZdjTaw==} @@ -4001,6 +4351,9 @@ packages: tunnel-agent@0.6.0: resolution: {integrity: sha512-McnNiV1l8RYeY8tBgEpuodCC1mLUdbSN+CYBL7kJsJNInOP8UjDDEwdk6Mw60vdLLrr5NHKZhMAOSrR2NZuQ+w==} + tunnel-rat@0.1.2: + resolution: {integrity: sha512-lR5VHmkPhzdhrM092lI2nACsLO4QubF0/yoOhzX7c+wIpbN1GjHNzCc91QlpxBi+cnx8vVJ+Ur6vL5cEoQPFpQ==} + type-check@0.4.0: resolution: {integrity: sha512-XleUoc9uwGXqjWwXaUTZAmzMcFZ5858QA2vvx1Ur5xIcixXIP+8LnFDgRplU30us6teqdlskFfu+ae4K79Ooew==} engines: {node: '>= 0.8.0'} @@ -4092,6 +4445,10 @@ packages: util-deprecate@1.0.2: resolution: {integrity: sha512-EPD5q1uXyFxJpCrLnCc1nHnq3gOa6DZBocAIiI2TaSCA7VCJ1UJDMagCzIkXNsUYfD1daK//LTEQ8xiIbrHtcw==} + utility-types@3.11.0: + resolution: {integrity: sha512-6Z7Ma2aVEWisaL6TvBCy7P8rm2LQoPv6dJ7ecIaIixHcwfbJ0x7mWdbcwlIM5IGQxPZSFYeqRCqlOOeKoJYMkw==} + engines: {node: '>= 4'} + vfile-location@5.0.3: resolution: {integrity: sha512-5yXvWDEgqeiYiBe1lbxYF7UMAIm/IcopxMHrMQDq3nvKcjPKIhZklUKL+AE7J7uApI4kwe2snsK+eI6UTj9EHg==} @@ -4216,6 +4573,12 @@ packages: web-worker@1.5.0: resolution: {integrity: sha512-RiMReJrTAiA+mBjGONMnjVDP2u3p9R1vkcGz6gDIrOMT3oGuYwX2WRMYI9ipkphSuE5XKEhydbhNEJh4NY9mlw==} + webgl-constants@1.1.1: + resolution: {integrity: sha512-LkBXKjU5r9vAW7Gcu3T5u+5cvSvh5WwINdr0C+9jpzVB41cjQAP5ePArDtk/WHYdVj0GefCgM73BA7FlIiNtdg==} + + webgl-sdf-generator@1.1.1: + resolution: {integrity: sha512-9Z0JcMTFxeE+b2x1LJTdnaT8rT8aEp7MVxkNwoycNmJWwPdzoXzMh0BjJSh/AEFP+KPYZUli814h8bJZFIZ2jA==} + webidl-conversions@7.0.0: resolution: {integrity: sha512-VwddBukDzu71offAQR975unBIGqfKZpM+8ZX6ySk8nYhVoo5CYaZyzt3YBvYtRtO+aoGlqxPg/B87NGVZ/fu6g==} engines: {node: '>=12'} @@ -4339,6 +4702,24 @@ packages: use-sync-external-store: optional: true + zustand@5.0.8: + resolution: {integrity: sha512-gyPKpIaxY9XcO2vSMrLbiER7QMAMGOQZVRdJ6Zi782jkbzZygq5GI9nG8g+sMgitRtndwaBSl7uiqC49o1SSiw==} + engines: {node: '>=12.20.0'} + peerDependencies: + '@types/react': '>=18.0.0' + immer: '>=9.0.6' + react: '>=18.0.0' + use-sync-external-store: '>=1.2.0' + peerDependenciesMeta: + '@types/react': + optional: true + immer: + optional: true + react: + optional: true + use-sync-external-store: + optional: true + zwitch@2.0.4: resolution: {integrity: sha512-bXE4cR/kVZhKZX/RjPEflHaKVhUVl85noU3v6b8apfQEc1x4A+zBxjZ4lN8LqGd6WZ3dl98pY4o717VFmoPp+A==} @@ -4611,6 +4992,8 @@ snapshots: '@csstools/css-tokenizer@3.0.4': {} + '@dimforge/rapier3d-compat@0.12.0': {} + '@emnapi/core@1.8.1': dependencies: '@emnapi/wasi-threads': 1.1.0 @@ -4954,6 +5337,13 @@ snapshots: '@marijn/find-cluster-break@1.0.2': {} + '@mediapipe/tasks-vision@0.10.17': {} + + '@monogrid/gainmap-js@3.4.0(three@0.180.0)': + dependencies: + promise-worker-transferable: 1.0.4 + three: 0.180.0 + '@napi-rs/wasm-runtime@0.2.12': dependencies: '@emnapi/core': 1.8.1 @@ -5015,6 +5405,105 @@ snapshots: dependencies: playwright: 1.58.2 + '@radix-ui/react-compose-refs@1.1.2(@types/react@19.2.13)(react@19.2.4)': + dependencies: + react: 19.2.4 + optionalDependencies: + '@types/react': 19.2.13 + + '@radix-ui/react-slot@1.2.4(@types/react@19.2.13)(react@19.2.4)': + dependencies: + '@radix-ui/react-compose-refs': 1.1.2(@types/react@19.2.13)(react@19.2.4) + react: 19.2.4 + optionalDependencies: + '@types/react': 19.2.13 + + '@react-spring/animated@10.0.3(react@19.2.4)': + dependencies: + '@react-spring/shared': 10.0.3(react@19.2.4) + '@react-spring/types': 10.0.3 + react: 19.2.4 + + '@react-spring/core@10.0.3(react@19.2.4)': + dependencies: + '@react-spring/animated': 10.0.3(react@19.2.4) + '@react-spring/shared': 10.0.3(react@19.2.4) + '@react-spring/types': 10.0.3 + react: 19.2.4 + + '@react-spring/rafz@10.0.3': {} + + '@react-spring/shared@10.0.3(react@19.2.4)': + dependencies: + '@react-spring/rafz': 10.0.3 + '@react-spring/types': 10.0.3 + react: 19.2.4 + + '@react-spring/three@10.0.3(@react-three/fiber@9.5.0(@types/react@19.2.13)(immer@11.1.3)(react-dom@19.2.4(react@19.2.4))(react@19.2.4)(three@0.180.0))(react@19.2.4)(three@0.180.0)': + dependencies: + '@react-spring/animated': 10.0.3(react@19.2.4) + '@react-spring/core': 10.0.3(react@19.2.4) + '@react-spring/shared': 10.0.3(react@19.2.4) + '@react-spring/types': 10.0.3 + '@react-three/fiber': 9.5.0(@types/react@19.2.13)(immer@11.1.3)(react-dom@19.2.4(react@19.2.4))(react@19.2.4)(three@0.180.0) + react: 19.2.4 + three: 0.180.0 + + '@react-spring/types@10.0.3': {} + + '@react-three/drei@10.7.7(@react-three/fiber@9.5.0(@types/react@19.2.13)(immer@11.1.3)(react-dom@19.2.4(react@19.2.4))(react@19.2.4)(three@0.180.0))(@types/react@19.2.13)(@types/three@0.183.1)(immer@11.1.3)(react-dom@19.2.4(react@19.2.4))(react@19.2.4)(three@0.180.0)': + dependencies: + '@babel/runtime': 7.28.6 + '@mediapipe/tasks-vision': 0.10.17 + '@monogrid/gainmap-js': 3.4.0(three@0.180.0) + '@react-three/fiber': 9.5.0(@types/react@19.2.13)(immer@11.1.3)(react-dom@19.2.4(react@19.2.4))(react@19.2.4)(three@0.180.0) + '@use-gesture/react': 10.3.1(react@19.2.4) + camera-controls: 3.1.2(three@0.180.0) + cross-env: 7.0.3 + detect-gpu: 5.0.70 + glsl-noise: 0.0.0 + hls.js: 1.6.15 + maath: 0.10.8(@types/three@0.183.1)(three@0.180.0) + meshline: 3.3.1(three@0.180.0) + react: 19.2.4 + stats-gl: 2.4.2(@types/three@0.183.1)(three@0.180.0) + stats.js: 0.17.0 + suspend-react: 0.1.3(react@19.2.4) + three: 0.180.0 + three-mesh-bvh: 0.8.3(three@0.180.0) + three-stdlib: 2.36.1(three@0.180.0) + troika-three-text: 0.52.4(three@0.180.0) + tunnel-rat: 0.1.2(@types/react@19.2.13)(immer@11.1.3)(react@19.2.4) + use-sync-external-store: 1.6.0(react@19.2.4) + utility-types: 3.11.0 + zustand: 5.0.11(@types/react@19.2.13)(immer@11.1.3)(react@19.2.4)(use-sync-external-store@1.6.0(react@19.2.4)) + optionalDependencies: + react-dom: 19.2.4(react@19.2.4) + transitivePeerDependencies: + - '@types/react' + - '@types/three' + - immer + + '@react-three/fiber@9.5.0(@types/react@19.2.13)(immer@11.1.3)(react-dom@19.2.4(react@19.2.4))(react@19.2.4)(three@0.180.0)': + dependencies: + '@babel/runtime': 7.28.6 + '@types/webxr': 0.5.24 + base64-js: 1.5.1 + buffer: 6.0.3 + its-fine: 2.0.0(@types/react@19.2.13)(react@19.2.4) + react: 19.2.4 + react-use-measure: 2.1.7(react-dom@19.2.4(react@19.2.4))(react@19.2.4) + scheduler: 0.27.0 + suspend-react: 0.1.3(react@19.2.4) + three: 0.180.0 + use-sync-external-store: 1.6.0(react@19.2.4) + zustand: 5.0.11(@types/react@19.2.13)(immer@11.1.3)(react@19.2.4)(use-sync-external-store@1.6.0(react@19.2.4)) + optionalDependencies: + react-dom: 19.2.4(react@19.2.4) + transitivePeerDependencies: + - '@types/react' + - immer + '@reactflow/background@11.3.14(@types/react@19.2.13)(immer@11.1.3)(react-dom@19.2.4(react@19.2.4))(react@19.2.4)': dependencies: '@reactflow/core': 11.11.4(@types/react@19.2.13)(immer@11.1.3)(react-dom@19.2.4(react@19.2.4))(react@19.2.4) @@ -5612,6 +6101,8 @@ snapshots: '@types/react': 19.2.13 '@types/react-dom': 19.2.3(@types/react@19.2.13) + '@tweenjs/tween.js@23.1.3': {} + '@tybys/wasm-util@0.10.1': dependencies: tslib: 2.8.1 @@ -5765,6 +6256,8 @@ snapshots: dependencies: '@types/ms': 2.1.0 + '@types/draco3d@1.4.10': {} + '@types/estree-jsx@1.0.5': dependencies: '@types/estree': 1.0.8 @@ -5793,14 +6286,32 @@ snapshots: dependencies: undici-types: 6.21.0 + '@types/offscreencanvas@2019.7.3': {} + '@types/react-dom@19.2.3(@types/react@19.2.13)': dependencies: '@types/react': 19.2.13 + '@types/react-reconciler@0.28.9(@types/react@19.2.13)': + dependencies: + '@types/react': 19.2.13 + '@types/react@19.2.13': dependencies: csstype: 3.2.3 + '@types/stats.js@0.17.4': {} + + '@types/three@0.183.1': + dependencies: + '@dimforge/rapier3d-compat': 0.12.0 + '@tweenjs/tween.js': 23.1.3 + '@types/stats.js': 0.17.4 + '@types/webxr': 0.5.24 + '@webgpu/types': 0.1.69 + fflate: 0.8.2 + meshoptimizer: 1.0.1 + '@types/unist@2.0.11': {} '@types/unist@3.0.3': {} @@ -5811,6 +6322,8 @@ snapshots: '@types/web-bluetooth@0.0.21': {} + '@types/webxr@0.5.24': {} + '@types/ws@8.18.1': dependencies: '@types/node': 22.19.9 @@ -5973,6 +6486,13 @@ snapshots: '@unrs/resolver-binding-win32-x64-msvc@1.11.1': optional: true + '@use-gesture/core@10.3.1': {} + + '@use-gesture/react@10.3.1(react@19.2.4)': + dependencies: + '@use-gesture/core': 10.3.1 + react: 19.2.4 + '@vercel/oidc@3.1.0': {} '@vitejs/plugin-react@4.7.0(vite@5.4.21(@types/node@22.19.9))': @@ -6124,6 +6644,8 @@ snapshots: dependencies: vue: 3.5.29(typescript@5.9.3) + '@webgpu/types@0.1.69': {} + '@xyflow/react@12.10.0(@types/react@19.2.13)(immer@11.1.3)(react-dom@19.2.4(react@19.2.4))(react@19.2.4)': dependencies: '@xyflow/system': 0.0.74 @@ -6147,6 +6669,8 @@ snapshots: d3-selection: 3.0.0 d3-zoom: 3.0.0 + '@yomguithereal/helpers@1.1.1': {} + acorn-jsx@5.3.2(acorn@8.15.0): dependencies: acorn: 8.15.0 @@ -6319,6 +6843,10 @@ snapshots: bindings: 1.5.0 prebuild-install: 7.1.3 + bidi-js@1.0.3: + dependencies: + require-from-string: 2.0.2 + binary-extensions@2.3.0: {} bindings@1.5.0: @@ -6357,6 +6885,11 @@ snapshots: base64-js: 1.5.1 ieee754: 1.2.1 + buffer@6.0.3: + dependencies: + base64-js: 1.5.1 + ieee754: 1.2.1 + cac@6.7.14: {} call-bind-apply-helpers@1.0.2: @@ -6380,6 +6913,10 @@ snapshots: camelcase-css@2.0.1: {} + camera-controls@3.1.2(three@0.180.0): + dependencies: + three: 0.180.0 + caniuse-lite@1.0.30001769: {} ccount@2.0.1: {} @@ -6423,8 +6960,14 @@ snapshots: chownr@1.1.4: {} + class-variance-authority@0.7.1: + dependencies: + clsx: 2.1.1 + classcat@5.0.5: {} + classnames@2.5.1: {} + client-only@0.0.1: {} clsx@2.1.1: {} @@ -6449,6 +6992,10 @@ snapshots: crelt@1.0.6: {} + cross-env@7.0.3: + dependencies: + cross-spawn: 7.0.6 + cross-spawn@7.0.6: dependencies: path-key: 3.1.1 @@ -6482,6 +7029,8 @@ snapshots: dependencies: internmap: 2.0.3 + d3-binarytree@1.0.2: {} + d3-color@3.1.0: {} d3-dispatch@3.0.1: {} @@ -6493,14 +7042,28 @@ snapshots: d3-ease@3.0.1: {} + d3-force-3d@3.0.6: + dependencies: + d3-binarytree: 1.0.2 + d3-dispatch: 3.0.1 + d3-octree: 1.1.0 + d3-quadtree: 3.0.1 + d3-timer: 3.0.1 + d3-format@3.1.2: {} + d3-hierarchy@3.1.2: {} + d3-interpolate@3.0.1: dependencies: d3-color: 3.1.0 + d3-octree@1.1.0: {} + d3-path@3.1.0: {} + d3-quadtree@3.0.1: {} + d3-scale@4.0.2: dependencies: d3-array: 3.2.4 @@ -6611,6 +7174,10 @@ snapshots: dequal@2.0.3: {} + detect-gpu@5.0.70: + dependencies: + webgl-constants: 1.1.1 + detect-libc@2.1.2: {} devlop@1.1.0: @@ -6629,6 +7196,8 @@ snapshots: dom-accessibility-api@0.6.3: {} + draco3d@1.5.7: {} + dunder-proto@1.0.1: dependencies: call-bind-apply-helpers: 1.0.2 @@ -6637,6 +7206,8 @@ snapshots: electron-to-chromium@1.5.286: {} + ellipsize@0.6.2: {} + emoji-regex@9.2.2: {} end-of-stream@1.4.5: @@ -6997,6 +7568,8 @@ snapshots: eventemitter3@5.0.4: {} + events@3.3.0: {} + eventsource-parser@3.0.6: {} expand-template@2.0.3: {} @@ -7041,6 +7614,10 @@ snapshots: optionalDependencies: picomatch: 4.0.3 + fflate@0.6.10: {} + + fflate@0.8.2: {} + file-entry-cache@8.0.0: dependencies: flat-cache: 4.0.1 @@ -7155,8 +7732,60 @@ snapshots: globrex@0.1.2: {} + glsl-noise@0.0.0: {} + gopd@1.2.0: {} + graphology-indices@0.17.0(graphology-types@0.24.8): + dependencies: + graphology-types: 0.24.8 + graphology-utils: 2.5.2(graphology-types@0.24.8) + mnemonist: 0.39.8 + + graphology-layout-forceatlas2@0.10.1(graphology-types@0.24.8): + dependencies: + graphology-types: 0.24.8 + graphology-utils: 2.5.2(graphology-types@0.24.8) + + graphology-layout-noverlap@0.4.2(graphology-types@0.24.8): + dependencies: + graphology-types: 0.24.8 + graphology-utils: 2.5.2(graphology-types@0.24.8) + + graphology-layout@0.6.1(graphology-types@0.24.8): + dependencies: + graphology-types: 0.24.8 + graphology-utils: 2.5.2(graphology-types@0.24.8) + pandemonium: 2.4.1 + + graphology-metrics@2.4.0(graphology-types@0.24.8): + dependencies: + graphology-indices: 0.17.0(graphology-types@0.24.8) + graphology-shortest-path: 2.1.0(graphology-types@0.24.8) + graphology-types: 0.24.8 + graphology-utils: 2.5.2(graphology-types@0.24.8) + mnemonist: 0.39.8 + pandemonium: 2.4.1 + + graphology-shortest-path@2.1.0(graphology-types@0.24.8): + dependencies: + '@yomguithereal/helpers': 1.1.1 + graphology-indices: 0.17.0(graphology-types@0.24.8) + graphology-types: 0.24.8 + graphology-utils: 2.5.2(graphology-types@0.24.8) + mnemonist: 0.39.8 + + graphology-types@0.24.8: {} + + graphology-utils@2.5.2(graphology-types@0.24.8): + dependencies: + graphology-types: 0.24.8 + + graphology@0.26.0(graphology-types@0.24.8): + dependencies: + events: 3.3.0 + graphology-types: 0.24.8 + guess-json-indent@3.0.1: {} has-bigints@1.1.0: {} @@ -7345,6 +7974,10 @@ snapshots: highlightjs-curl@1.3.0: {} + hls.js@1.6.15: {} + + hold-event@1.1.2: {} + hookable@6.0.1: {} html-encoding-sniffer@4.0.0: @@ -7385,6 +8018,8 @@ snapshots: ignore@7.0.5: {} + immediate@3.0.6: {} + immer@10.2.0: {} immer@11.1.3: {} @@ -7513,6 +8148,8 @@ snapshots: is-potential-custom-element-name@1.0.1: {} + is-promise@2.2.2: {} + is-regex@1.2.1: dependencies: call-bound: 1.0.4 @@ -7567,6 +8204,13 @@ snapshots: has-symbols: 1.1.0 set-function-name: 2.0.2 + its-fine@2.0.0(@types/react@19.2.13)(react@19.2.4): + dependencies: + '@types/react-reconciler': 0.28.9(@types/react@19.2.13) + react: 19.2.4 + transitivePeerDependencies: + - '@types/react' + jiti@1.21.7: {} joycon@3.1.1: {} @@ -7652,6 +8296,10 @@ snapshots: prelude-ls: 1.2.1 type-check: 0.4.0 + lie@3.3.0: + dependencies: + immediate: 3.0.6 + lilconfig@3.1.3: {} lines-and-columns@1.2.4: {} @@ -7684,6 +8332,11 @@ snapshots: lz-string@1.5.0: {} + maath@0.10.8(@types/three@0.183.1)(three@0.180.0): + dependencies: + '@types/three': 0.183.1 + three: 0.180.0 + magic-string@0.30.21: dependencies: '@jridgewell/sourcemap-codec': 1.5.5 @@ -7853,6 +8506,12 @@ snapshots: merge2@1.4.1: {} + meshline@3.3.1(three@0.180.0): + dependencies: + three: 0.180.0 + + meshoptimizer@1.0.1: {} + microdiff@1.5.0: {} micromark-core-commonmark@2.0.3: @@ -8067,6 +8726,10 @@ snapshots: mkdirp-classic@0.5.3: {} + mnemonist@0.39.8: + dependencies: + obliterator: 2.0.5 + ms@2.1.3: {} mz@2.7.0: @@ -8174,6 +8837,8 @@ snapshots: define-properties: 1.2.1 es-object-atoms: 1.1.1 + obliterator@2.0.5: {} + on-exit-leak-free@2.1.2: {} once@1.4.0: @@ -8209,6 +8874,10 @@ snapshots: p-timeout@6.1.4: {} + pandemonium@2.4.1: + dependencies: + mnemonist: 0.39.8 + parent-module@1.0.1: dependencies: callsites: 3.1.0 @@ -8341,6 +9010,8 @@ snapshots: picocolors: 1.1.1 source-map-js: 1.2.1 + potpack@1.0.2: {} + prebuild-install@7.1.3: dependencies: detect-libc: 2.1.2 @@ -8372,6 +9043,11 @@ snapshots: process-warning@5.0.0: {} + promise-worker-transferable@1.0.4: + dependencies: + is-promise: 2.2.2 + lie: 3.3.0 + prop-types@15.8.1: dependencies: loose-envify: 1.4.0 @@ -8453,6 +9129,12 @@ snapshots: react-refresh@0.17.0: {} + react-use-measure@2.1.7(react-dom@19.2.4(react@19.2.4))(react@19.2.4): + dependencies: + react: 19.2.4 + optionalDependencies: + react-dom: 19.2.4(react@19.2.4) + react@19.2.4: {} reactflow@11.11.4(@types/react@19.2.13)(immer@11.1.3)(react-dom@19.2.4(react@19.2.4))(react@19.2.4): @@ -8483,6 +9165,43 @@ snapshots: dependencies: picomatch: 2.3.1 + reagraph@4.30.8(@types/react@19.2.13)(@types/three@0.183.1)(graphology-types@0.24.8)(immer@11.1.3)(react-dom@19.2.4(react@19.2.4))(react@19.2.4)(use-sync-external-store@1.6.0(react@19.2.4)): + dependencies: + '@react-spring/three': 10.0.3(@react-three/fiber@9.5.0(@types/react@19.2.13)(immer@11.1.3)(react-dom@19.2.4(react@19.2.4))(react@19.2.4)(three@0.180.0))(react@19.2.4)(three@0.180.0) + '@react-three/drei': 10.7.7(@react-three/fiber@9.5.0(@types/react@19.2.13)(immer@11.1.3)(react-dom@19.2.4(react@19.2.4))(react@19.2.4)(three@0.180.0))(@types/react@19.2.13)(@types/three@0.183.1)(immer@11.1.3)(react-dom@19.2.4(react@19.2.4))(react@19.2.4)(three@0.180.0) + '@react-three/fiber': 9.5.0(@types/react@19.2.13)(immer@11.1.3)(react-dom@19.2.4(react@19.2.4))(react@19.2.4)(three@0.180.0) + '@use-gesture/react': 10.3.1(react@19.2.4) + camera-controls: 3.1.2(three@0.180.0) + classnames: 2.5.1 + d3-array: 3.2.4 + d3-force-3d: 3.0.6 + d3-hierarchy: 3.1.2 + d3-scale: 4.0.2 + ellipsize: 0.6.2 + graphology: 0.26.0(graphology-types@0.24.8) + graphology-layout: 0.6.1(graphology-types@0.24.8) + graphology-layout-forceatlas2: 0.10.1(graphology-types@0.24.8) + graphology-layout-noverlap: 0.4.2(graphology-types@0.24.8) + graphology-metrics: 2.4.0(graphology-types@0.24.8) + graphology-shortest-path: 2.1.0(graphology-types@0.24.8) + hold-event: 1.1.2 + react: 19.2.4 + react-dom: 19.2.4(react@19.2.4) + three: 0.180.0 + three-stdlib: 2.36.1(three@0.180.0) + zustand: 5.0.8(@types/react@19.2.13)(immer@11.1.3)(react@19.2.4)(use-sync-external-store@1.6.0(react@19.2.4)) + transitivePeerDependencies: + - '@types/react' + - '@types/three' + - expo + - expo-asset + - expo-file-system + - expo-gl + - graphology-types + - immer + - react-native + - use-sync-external-store + real-require@0.2.0: {} recharts@3.7.0(@types/react@19.2.13)(react-dom@19.2.4(react@19.2.4))(react-is@17.0.2)(react@19.2.4)(redux@5.0.1): @@ -8819,6 +9538,13 @@ snapshots: stackback@0.0.2: {} + stats-gl@2.4.2(@types/three@0.183.1)(three@0.180.0): + dependencies: + '@types/three': 0.183.1 + three: 0.180.0 + + stats.js@0.17.0: {} + std-env@3.10.0: {} stop-iteration-iterator@1.1.0: @@ -8947,6 +9673,10 @@ snapshots: supports-preserve-symlinks-flag@1.0.0: {} + suspend-react@0.1.3(react@19.2.4): + dependencies: + react: 19.2.4 + swrv@1.1.0(vue@3.5.29(typescript@5.9.3)): dependencies: vue: 3.5.29(typescript@5.9.3) @@ -9014,6 +9744,22 @@ snapshots: dependencies: real-require: 0.2.0 + three-mesh-bvh@0.8.3(three@0.180.0): + dependencies: + three: 0.180.0 + + three-stdlib@2.36.1(three@0.180.0): + dependencies: + '@types/draco3d': 1.4.10 + '@types/offscreencanvas': 2019.7.3 + '@types/webxr': 0.5.24 + draco3d: 1.5.7 + fflate: 0.6.10 + potpack: 1.0.2 + three: 0.180.0 + + three@0.180.0: {} + time-span@5.1.0: dependencies: convert-hrtime: 5.0.0 @@ -9055,6 +9801,20 @@ snapshots: trim-lines@3.0.1: {} + troika-three-text@0.52.4(three@0.180.0): + dependencies: + bidi-js: 1.0.3 + three: 0.180.0 + troika-three-utils: 0.52.4(three@0.180.0) + troika-worker-utils: 0.52.0 + webgl-sdf-generator: 1.1.1 + + troika-three-utils@0.52.4(three@0.180.0): + dependencies: + three: 0.180.0 + + troika-worker-utils@0.52.0: {} + trough@2.2.0: {} truncate-json@3.0.1: @@ -9088,6 +9848,14 @@ snapshots: dependencies: safe-buffer: 5.2.1 + tunnel-rat@0.1.2(@types/react@19.2.13)(immer@11.1.3)(react@19.2.4): + dependencies: + zustand: 4.5.7(@types/react@19.2.13)(immer@11.1.3)(react@19.2.4) + transitivePeerDependencies: + - '@types/react' + - immer + - react + type-check@0.4.0: dependencies: prelude-ls: 1.2.1 @@ -9235,6 +10003,8 @@ snapshots: util-deprecate@1.0.2: {} + utility-types@3.11.0: {} + vfile-location@5.0.3: dependencies: '@types/unist': 3.0.3 @@ -9374,6 +10144,10 @@ snapshots: web-worker@1.5.0: {} + webgl-constants@1.1.1: {} + + webgl-sdf-generator@1.1.1: {} + webidl-conversions@7.0.0: {} whatwg-encoding@3.1.1: @@ -9474,4 +10248,11 @@ snapshots: react: 19.2.4 use-sync-external-store: 1.6.0(react@19.2.4) + zustand@5.0.8(@types/react@19.2.13)(immer@11.1.3)(react@19.2.4)(use-sync-external-store@1.6.0(react@19.2.4)): + optionalDependencies: + '@types/react': 19.2.13 + immer: 11.1.3 + react: 19.2.4 + use-sync-external-store: 1.6.0(react@19.2.4) + zwitch@2.0.4: {} diff --git a/public/brand/claude-logo.png b/public/brand/claude-logo.png new file mode 100644 index 0000000..5057173 Binary files /dev/null and b/public/brand/claude-logo.png differ diff --git a/public/brand/codex-logo.png b/public/brand/codex-logo.png new file mode 100644 index 0000000..3ea92d9 Binary files /dev/null and b/public/brand/codex-logo.png differ diff --git a/public/brand/hermes-logo.png b/public/brand/hermes-logo.png new file mode 100644 index 0000000..1f17bee Binary files /dev/null and b/public/brand/hermes-logo.png differ diff --git a/public/brand/mc-logo-128.png b/public/brand/mc-logo-128.png new file mode 100644 index 0000000..3bd165f Binary files /dev/null and b/public/brand/mc-logo-128.png differ diff --git a/public/brand/mc-logo-256.png b/public/brand/mc-logo-256.png new file mode 100644 index 0000000..7dbc2b6 Binary files /dev/null and b/public/brand/mc-logo-256.png differ diff --git a/public/brand/mc-logo-512.png b/public/brand/mc-logo-512.png new file mode 100644 index 0000000..b84af74 Binary files /dev/null and b/public/brand/mc-logo-512.png differ diff --git a/public/brand/openclaw-logo.png b/public/brand/openclaw-logo.png new file mode 100644 index 0000000..0394c6f Binary files /dev/null and b/public/brand/openclaw-logo.png differ diff --git a/public/mc-logo.png b/public/mc-logo.png new file mode 100644 index 0000000..edae4c6 Binary files /dev/null and b/public/mc-logo.png differ diff --git a/public/mc.png b/public/mc.png new file mode 100644 index 0000000..6ee475c Binary files /dev/null and b/public/mc.png differ diff --git a/scripts/check-node-version.mjs b/scripts/check-node-version.mjs new file mode 100644 index 0000000..9f415a7 --- /dev/null +++ b/scripts/check-node-version.mjs @@ -0,0 +1,16 @@ +#!/usr/bin/env node + +const REQUIRED_NODE_MAJOR = 22 + +const current = process.versions.node +const currentMajor = Number.parseInt(current.split('.')[0] || '', 10) + +if (currentMajor !== REQUIRED_NODE_MAJOR) { + console.error( + [ + `error: Mission Control requires Node ${REQUIRED_NODE_MAJOR}.x, but found ${current}.`, + 'use `nvm use 22` (or your version manager equivalent) before installing, building, or starting the app.', + ].join('\n') + ) + process.exit(1) +} diff --git a/scripts/deploy-standalone.sh b/scripts/deploy-standalone.sh new file mode 100644 index 0000000..7c5d9db --- /dev/null +++ b/scripts/deploy-standalone.sh @@ -0,0 +1,251 @@ +#!/usr/bin/env bash + +set -euo pipefail + +SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)" +PROJECT_ROOT="$(cd "$SCRIPT_DIR/.." && pwd)" + +BRANCH="${BRANCH:-$(git -C "$PROJECT_ROOT" branch --show-current)}" +PORT="${PORT:-3000}" +LISTEN_HOST="${MC_HOSTNAME:-0.0.0.0}" +LOG_PATH="${LOG_PATH:-/tmp/mc.log}" +VERIFY_HOST="${VERIFY_HOST:-127.0.0.1}" +PID_FILE="${PID_FILE:-$PROJECT_ROOT/.next/standalone/server.pid}" +SOURCE_DATA_DIR="$PROJECT_ROOT/.data" +BUILD_DATA_DIR="$PROJECT_ROOT/.next/build-runtime" +NODE_VERSION_FILE="$PROJECT_ROOT/.nvmrc" + +use_project_node() { + if [[ ! -f "$NODE_VERSION_FILE" ]]; then + return + fi + + if [[ -z "${NVM_DIR:-}" ]]; then + export NVM_DIR="$HOME/.nvm" + fi + + if [[ -s "$NVM_DIR/nvm.sh" ]]; then + # shellcheck disable=SC1090 + source "$NVM_DIR/nvm.sh" + nvm use >/dev/null + fi +} + +list_listener_pids() { + local combined="" + + if command -v lsof >/dev/null 2>&1; then + combined+="$( + lsof -tiTCP:"$PORT" -sTCP:LISTEN 2>/dev/null || true + )"$'\n' + fi + + if command -v ss >/dev/null 2>&1; then + combined+="$( + ss -ltnp 2>/dev/null | awk -v port=":$PORT" ' + index($4, port) || index($5, port) { + if (match($0, /pid=[0-9]+/)) { + print substr($0, RSTART + 4, RLENGTH - 4) + } + } + ' + )"$'\n' + fi + + printf '%s\n' "$combined" | awk -v port="$PORT" ' + /^[0-9]+$/ { + seen[$0] = 1 + } + END { + for (pid in seen) { + print pid + } + } + ' | sort -u +} + +stop_pid() { + local pid="$1" + local label="$2" + + if [[ -z "$pid" ]] || ! kill -0 "$pid" 2>/dev/null; then + return + fi + + echo "==> stopping $label (pid=$pid)" + kill "$pid" 2>/dev/null || true + + for _ in $(seq 1 10); do + if ! kill -0 "$pid" 2>/dev/null; then + return + fi + sleep 1 + done + + echo "==> force stopping $label (pid=$pid)" + kill -9 "$pid" 2>/dev/null || true +} + +stop_existing_server() { + local -a candidate_pids=() + + if [[ -f "$PID_FILE" ]]; then + candidate_pids+=("$(cat "$PID_FILE" 2>/dev/null || true)") + fi + + while IFS= read -r pid; do + candidate_pids+=("$pid") + done < <(list_listener_pids) + + if command -v pgrep >/dev/null 2>&1; then + while IFS= read -r pid; do + candidate_pids+=("$pid") + done < <(pgrep -f "$PROJECT_ROOT/.next/standalone/server.js" || true) + fi + + if [[ ${#candidate_pids[@]} -eq 0 ]]; then + return + fi + + declare -A seen=() + for pid in "${candidate_pids[@]}"; do + [[ -z "$pid" ]] && continue + [[ -n "${seen[$pid]:-}" ]] && continue + seen[$pid]=1 + stop_pid "$pid" "standalone server" + done + + for _ in $(seq 1 10); do + if [[ -z "$(list_listener_pids | head -n1)" ]]; then + rm -f "$PID_FILE" + return + fi + sleep 1 + done + + echo "error: port $PORT is still in use after stopping existing server" >&2 + exit 1 +} + +load_env() { + set -a + if [[ -f .env ]]; then + # shellcheck disable=SC1091 + source .env + fi + if [[ -f .env.local ]]; then + # shellcheck disable=SC1091 + source .env.local + fi + set +a +} + +migrate_runtime_data_dir() { + local target_data_dir="${MISSION_CONTROL_DATA_DIR:-$SOURCE_DATA_DIR}" + + if [[ "$target_data_dir" == "$SOURCE_DATA_DIR" ]]; then + return + fi + + mkdir -p "$target_data_dir" + + local source_db="$SOURCE_DATA_DIR/mission-control.db" + local target_db="$target_data_dir/mission-control.db" + + if [[ -s "$target_db" || ! -s "$source_db" ]]; then + return + fi + + echo "==> migrating runtime data to $target_data_dir" + if command -v sqlite3 >/dev/null 2>&1; then + local target_db_tmp="$target_db.tmp" + rm -f "$target_db_tmp" + sqlite3 "$source_db" ".backup '$target_db_tmp'" + mv "$target_db_tmp" "$target_db" + + if [[ -f "$SOURCE_DATA_DIR/mission-control-tokens.json" ]]; then + cp "$SOURCE_DATA_DIR/mission-control-tokens.json" "$target_data_dir/mission-control-tokens.json" + fi + if [[ -d "$SOURCE_DATA_DIR/backups" ]]; then + rsync -a "$SOURCE_DATA_DIR/backups"/ "$target_data_dir/backups"/ + fi + else + rsync -a \ + --exclude 'mission-control.db-shm' \ + --exclude 'mission-control.db-wal' \ + --exclude '*.db-shm' \ + --exclude '*.db-wal' \ + "$SOURCE_DATA_DIR"/ "$target_data_dir"/ + fi +} + +cd "$PROJECT_ROOT" +use_project_node + +echo "==> fetching branch $BRANCH" +git fetch origin "$BRANCH" +git merge --ff-only FETCH_HEAD + +load_env +migrate_runtime_data_dir + +echo "==> stopping existing standalone server before rebuild" +stop_existing_server + +echo "==> installing dependencies" +pnpm install --frozen-lockfile + +echo "==> rebuilding standalone bundle" +rm -rf .next +mkdir -p "$BUILD_DATA_DIR" +MISSION_CONTROL_DATA_DIR="$BUILD_DATA_DIR" \ +MISSION_CONTROL_DB_PATH="$BUILD_DATA_DIR/mission-control.db" \ +MISSION_CONTROL_TOKENS_PATH="$BUILD_DATA_DIR/mission-control-tokens.json" \ +pnpm build + +echo "==> starting standalone server" +load_env + +PORT="$PORT" HOSTNAME="$LISTEN_HOST" nohup bash "$PROJECT_ROOT/scripts/start-standalone.sh" >"$LOG_PATH" 2>&1 & +new_pid=$! +echo "$new_pid" > "$PID_FILE" + +echo "==> verifying process and static assets" +for _ in $(seq 1 20); do + if curl -fsS "http://$VERIFY_HOST:$PORT/login" >/dev/null 2>&1; then + break + fi + sleep 1 +done + +login_html="$(curl -fsS "http://$VERIFY_HOST:$PORT/login")" +css_path="$(printf '%s\n' "$login_html" | sed -n 's|.*\(/_next/static/chunks/[^"]*\.css\).*|\1|p' | sed -n '1p')" +if [[ -z "${css_path:-}" ]]; then + echo "error: no css asset found in rendered login HTML" >&2 + exit 1 +fi + +listener_pid="$(list_listener_pids | head -n1)" +if [[ -z "${listener_pid:-}" ]]; then + echo "error: no listener detected on port $PORT after startup" >&2 + exit 1 +fi +if [[ "$listener_pid" != "$new_pid" ]]; then + echo "error: port $PORT is owned by pid=$listener_pid, expected new pid=$new_pid" >&2 + exit 1 +fi + +css_disk_path="$PROJECT_ROOT/.next/standalone/.next${css_path#/_next}" +if [[ ! -f "$css_disk_path" ]]; then + echo "error: rendered css asset missing on disk: $css_disk_path" >&2 + exit 1 +fi + +content_type="$(curl -fsSI "http://$VERIFY_HOST:$PORT$css_path" | awk 'BEGIN{IGNORECASE=1} /^content-type:/ {print $2}' | tr -d '\r')" +if [[ "${content_type:-}" != text/css* ]]; then + echo "error: css asset served with unexpected content-type: ${content_type:-missing}" >&2 + exit 1 +fi + +echo "==> deployed commit $(git rev-parse --short HEAD)" +echo " pid=$new_pid port=$PORT css=$css_path" diff --git a/scripts/e2e-openclaw/start-e2e-server.mjs b/scripts/e2e-openclaw/start-e2e-server.mjs index 35703b0..e62dcb9 100755 --- a/scripts/e2e-openclaw/start-e2e-server.mjs +++ b/scripts/e2e-openclaw/start-e2e-server.mjs @@ -1,9 +1,30 @@ #!/usr/bin/env node import { spawn } from 'node:child_process' import fs from 'node:fs' +import net from 'node:net' import path from 'node:path' import process from 'node:process' +async function findAvailablePort(host = '127.0.0.1') { + return await new Promise((resolve, reject) => { + const server = net.createServer() + server.unref() + server.on('error', reject) + server.listen(0, host, () => { + const address = server.address() + if (!address || typeof address === 'string') { + server.close(() => reject(new Error('failed to resolve dynamic port'))) + return + } + const { port } = address + server.close((err) => { + if (err) reject(err) + else resolve(port) + }) + }) + }) +} + const modeArg = process.argv.find((arg) => arg.startsWith('--mode=')) const mode = modeArg ? modeArg.split('=')[1] : 'local' if (mode !== 'local' && mode !== 'gateway') { @@ -16,6 +37,7 @@ const fixtureSource = path.join(repoRoot, 'tests', 'fixtures', 'openclaw') const runtimeRoot = path.join(repoRoot, '.tmp', 'e2e-openclaw', mode) const dataDir = path.join(runtimeRoot, 'data') const mockBinDir = path.join(repoRoot, 'scripts', 'e2e-openclaw', 'bin') +const skillsRoot = path.join(runtimeRoot, 'skills') fs.rmSync(runtimeRoot, { recursive: true, force: true }) fs.mkdirSync(runtimeRoot, { recursive: true }) @@ -23,13 +45,14 @@ fs.mkdirSync(dataDir, { recursive: true }) fs.cpSync(fixtureSource, runtimeRoot, { recursive: true }) const gatewayHost = '127.0.0.1' -const gatewayPort = '18789' +const gatewayPort = String(await findAvailablePort(gatewayHost)) const baseEnv = { ...process.env, API_KEY: process.env.API_KEY || 'test-api-key-e2e-12345', AUTH_USER: process.env.AUTH_USER || 'admin', AUTH_PASS: process.env.AUTH_PASS || 'admin', + MISSION_CONTROL_TEST_MODE: process.env.MISSION_CONTROL_TEST_MODE || '1', MC_DISABLE_RATE_LIMIT: '1', MISSION_CONTROL_DATA_DIR: dataDir, MISSION_CONTROL_DB_PATH: path.join(dataDir, 'mission-control.db'), @@ -39,11 +62,17 @@ const baseEnv = { OPENCLAW_GATEWAY_PORT: gatewayPort, OPENCLAW_BIN: path.join(mockBinDir, 'openclaw'), CLAWDBOT_BIN: path.join(mockBinDir, 'clawdbot'), + MC_SKILLS_USER_AGENTS_DIR: path.join(skillsRoot, 'user-agents'), + MC_SKILLS_USER_CODEX_DIR: path.join(skillsRoot, 'user-codex'), + MC_SKILLS_PROJECT_AGENTS_DIR: path.join(skillsRoot, 'project-agents'), + MC_SKILLS_PROJECT_CODEX_DIR: path.join(skillsRoot, 'project-codex'), + MC_SKILLS_OPENCLAW_DIR: path.join(skillsRoot, 'openclaw'), PATH: `${mockBinDir}:${process.env.PATH || ''}`, E2E_GATEWAY_EXPECTED: mode === 'gateway' ? '1' : '0', } const children = [] +let app = null if (mode === 'gateway') { const gw = spawn('node', ['scripts/e2e-openclaw/mock-gateway.mjs'], { @@ -51,11 +80,24 @@ if (mode === 'gateway') { env: baseEnv, stdio: 'inherit', }) + gw.on('error', (err) => { + process.stderr.write(`[openclaw-e2e] mock gateway failed to start: ${String(err)}\n`) + shutdown('SIGTERM') + process.exit(1) + }) + gw.on('exit', (code, signal) => { + const exitCode = code ?? (signal ? 1 : 0) + if (exitCode !== 0) { + process.stderr.write(`[openclaw-e2e] mock gateway exited unexpectedly (code=${exitCode}, signal=${signal ?? 'none'})\n`) + shutdown('SIGTERM') + process.exit(exitCode) + } + }) children.push(gw) } const standaloneServerPath = path.join(repoRoot, '.next', 'standalone', 'server.js') -const app = fs.existsSync(standaloneServerPath) +app = fs.existsSync(standaloneServerPath) ? spawn('node', [standaloneServerPath], { cwd: repoRoot, env: { diff --git a/scripts/generate-env.sh b/scripts/generate-env.sh new file mode 100755 index 0000000..c728377 --- /dev/null +++ b/scripts/generate-env.sh @@ -0,0 +1,81 @@ +#!/usr/bin/env bash +# Generate a secure .env file from .env.example with random secrets. +# Usage: bash scripts/generate-env.sh [output-path] +# +# If output-path is omitted, writes to .env in the project root. +# Will NOT overwrite an existing .env unless --force is passed. + +set -euo pipefail + +SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)" +PROJECT_ROOT="$(cd "$SCRIPT_DIR/.." && pwd)" +EXAMPLE_FILE="$PROJECT_ROOT/.env.example" +FORCE=false +OUTPUT="" + +for arg in "$@"; do + case "$arg" in + --force) FORCE=true ;; + *) OUTPUT="$arg" ;; + esac +done + +OUTPUT="${OUTPUT:-$PROJECT_ROOT/.env}" + +if [[ -f "$OUTPUT" ]] && ! $FORCE; then + echo "Error: $OUTPUT already exists. Use --force to overwrite." + exit 1 +fi + +if [[ ! -f "$EXAMPLE_FILE" ]]; then + echo "Error: .env.example not found at $EXAMPLE_FILE" + exit 1 +fi + +# Generate cryptographically random values +generate_password() { + local len="${1:-24}" + # Use openssl if available, fallback to /dev/urandom + if command -v openssl &>/dev/null; then + openssl rand -base64 "$((len * 3 / 4 + 1))" | tr -dc 'A-Za-z0-9' | head -c "$len" + else + head -c "$((len * 2))" /dev/urandom | LC_ALL=C tr -dc 'A-Za-z0-9' | head -c "$len" + fi +} + +generate_hex() { + local len="${1:-32}" + if command -v openssl &>/dev/null; then + openssl rand -hex "$((len / 2))" + else + head -c "$((len / 2))" /dev/urandom | od -An -tx1 | tr -d ' \n' | head -c "$len" + fi +} + +AUTH_PASS="$(generate_password 24)" +API_KEY="$(generate_hex 32)" +AUTH_SECRET="$(generate_password 32)" + +# Copy .env.example and replace default secrets +cp "$EXAMPLE_FILE" "$OUTPUT" + +# Replace the insecure defaults with generated values +if [[ "$(uname)" == "Darwin" ]]; then + sed -i '' "s|^AUTH_PASS=.*|AUTH_PASS=$AUTH_PASS|" "$OUTPUT" + sed -i '' "s|^API_KEY=.*|API_KEY=$API_KEY|" "$OUTPUT" + sed -i '' "s|^AUTH_SECRET=.*|AUTH_SECRET=$AUTH_SECRET|" "$OUTPUT" +else + sed -i "s|^AUTH_PASS=.*|AUTH_PASS=$AUTH_PASS|" "$OUTPUT" + sed -i "s|^API_KEY=.*|API_KEY=$API_KEY|" "$OUTPUT" + sed -i "s|^AUTH_SECRET=.*|AUTH_SECRET=$AUTH_SECRET|" "$OUTPUT" +fi + +# Lock down permissions +chmod 600 "$OUTPUT" + +echo "Generated secure .env at $OUTPUT" +echo " AUTH_USER: admin" +echo " AUTH_PASS: $AUTH_PASS" +echo " API_KEY: $API_KEY" +echo "" +echo "Save these credentials — they are not stored elsewhere." diff --git a/scripts/security-audit.sh b/scripts/security-audit.sh new file mode 100755 index 0000000..d640c4a --- /dev/null +++ b/scripts/security-audit.sh @@ -0,0 +1,168 @@ +#!/usr/bin/env bash +# Mission Control Security Audit +# Run: bash scripts/security-audit.sh [--env-file .env] + +set -euo pipefail + +SCORE=0 +MAX_SCORE=0 +ISSUES=() + +pass() { echo " [PASS] $1"; ((SCORE++)); ((MAX_SCORE++)); } +fail() { echo " [FAIL] $1"; ISSUES+=("$1"); ((MAX_SCORE++)); } +warn() { echo " [WARN] $1"; ((MAX_SCORE++)); } +info() { echo " [INFO] $1"; } + +# Load .env if exists +ENV_FILE="${1:-.env}" +if [[ -f "$ENV_FILE" ]]; then + while IFS='=' read -r key value; do + [[ "$key" =~ ^#.*$ ]] && continue + [[ -z "$key" ]] && continue + declare "$key=$value" 2>/dev/null || true + done < "$ENV_FILE" +fi + +echo "=== Mission Control Security Audit ===" +echo "" + +# 1. .env file permissions +echo "--- File Permissions ---" +if [[ -f "$ENV_FILE" ]]; then + perms=$(stat -f '%A' "$ENV_FILE" 2>/dev/null || stat -c '%a' "$ENV_FILE" 2>/dev/null) + if [[ "$perms" == "600" ]]; then + pass ".env permissions are 600 (owner read/write only)" + else + fail ".env permissions are $perms (should be 600). Run: chmod 600 $ENV_FILE" + fi +else + warn ".env file not found at $ENV_FILE" +fi + +# 2. Default passwords check +echo "" +echo "--- Credentials ---" +INSECURE_PASSWORDS=("admin" "password" "change-me-on-first-login" "changeme" "testpass123" "testpass1234") +AUTH_PASS_VAL="${AUTH_PASS:-}" +if [[ -z "$AUTH_PASS_VAL" ]]; then + fail "AUTH_PASS is not set" +else + insecure=false + for bad in "${INSECURE_PASSWORDS[@]}"; do + if [[ "$AUTH_PASS_VAL" == "$bad" ]]; then + insecure=true; break + fi + done + if $insecure; then + fail "AUTH_PASS is set to a known insecure default" + elif [[ ${#AUTH_PASS_VAL} -lt 12 ]]; then + fail "AUTH_PASS is too short (${#AUTH_PASS_VAL} chars, minimum 12)" + else + pass "AUTH_PASS is set to a non-default value (${#AUTH_PASS_VAL} chars)" + fi +fi + +API_KEY_VAL="${API_KEY:-}" +if [[ -z "$API_KEY_VAL" || "$API_KEY_VAL" == "generate-a-random-key" ]]; then + fail "API_KEY is not set or uses the default value" +else + pass "API_KEY is configured" +fi + +# 3. Network config +echo "" +echo "--- Network Security ---" +MC_ALLOWED="${MC_ALLOWED_HOSTS:-}" +MC_ANY="${MC_ALLOW_ANY_HOST:-}" +if [[ "$MC_ANY" == "1" || "$MC_ANY" == "true" ]]; then + fail "MC_ALLOW_ANY_HOST is enabled (any host can connect)" +elif [[ -n "$MC_ALLOWED" ]]; then + pass "MC_ALLOWED_HOSTS is configured: $MC_ALLOWED" +else + warn "MC_ALLOWED_HOSTS is not set (defaults apply)" +fi + +# 4. Cookie/HTTPS config +echo "" +echo "--- HTTPS & Cookies ---" +COOKIE_SECURE="${MC_COOKIE_SECURE:-}" +if [[ "$COOKIE_SECURE" == "1" || "$COOKIE_SECURE" == "true" ]]; then + pass "MC_COOKIE_SECURE is enabled" +else + warn "MC_COOKIE_SECURE is not enabled (cookies sent over HTTP)" +fi + +SAMESITE="${MC_COOKIE_SAMESITE:-strict}" +if [[ "$SAMESITE" == "strict" ]]; then + pass "MC_COOKIE_SAMESITE is strict" +else + warn "MC_COOKIE_SAMESITE is '$SAMESITE' (strict recommended)" +fi + +HSTS="${MC_ENABLE_HSTS:-}" +if [[ "$HSTS" == "1" ]]; then + pass "HSTS is enabled" +else + warn "HSTS is not enabled (set MC_ENABLE_HSTS=1 for HTTPS deployments)" +fi + +# 5. Rate limiting +echo "" +echo "--- Rate Limiting ---" +RL_DISABLED="${MC_DISABLE_RATE_LIMIT:-}" +if [[ "$RL_DISABLED" == "1" ]]; then + fail "Rate limiting is disabled (MC_DISABLE_RATE_LIMIT=1)" +else + pass "Rate limiting is active" +fi + +# 6. Docker security (if running in Docker) +echo "" +echo "--- Docker Security ---" +if command -v docker &>/dev/null; then + if docker ps --filter name=mission-control --format '{{.Names}}' 2>/dev/null | grep -q mission-control; then + ro=$(docker inspect mission-control --format '{{.HostConfig.ReadonlyRootfs}}' 2>/dev/null || echo "false") + if [[ "$ro" == "true" ]]; then + pass "Container filesystem is read-only" + else + warn "Container filesystem is writable (use read_only: true)" + fi + + nnp=$(docker inspect mission-control --format '{{.HostConfig.SecurityOpt}}' 2>/dev/null || echo "[]") + if echo "$nnp" | grep -q "no-new-privileges"; then + pass "no-new-privileges is set" + else + warn "no-new-privileges not set" + fi + + user=$(docker inspect mission-control --format '{{.Config.User}}' 2>/dev/null || echo "") + if [[ -n "$user" && "$user" != "root" && "$user" != "0" ]]; then + pass "Container runs as non-root user ($user)" + else + warn "Container may be running as root" + fi + else + info "Mission Control container not running" + fi +else + info "Docker not installed (skipping container checks)" +fi + +# Summary +echo "" +echo "=== Security Score: $SCORE / $MAX_SCORE ===" +if [[ ${#ISSUES[@]} -gt 0 ]]; then + echo "" + echo "Issues to fix:" + for issue in "${ISSUES[@]}"; do + echo " - $issue" + done +fi + +if [[ $SCORE -eq $MAX_SCORE ]]; then + echo "All checks passed!" +elif [[ $SCORE -ge $((MAX_SCORE * 7 / 10)) ]]; then + echo "Good security posture with minor improvements needed." +else + echo "Security improvements recommended before production use." +fi diff --git a/scripts/smoke-staging.mjs b/scripts/smoke-staging.mjs new file mode 100755 index 0000000..d57d666 --- /dev/null +++ b/scripts/smoke-staging.mjs @@ -0,0 +1,168 @@ +#!/usr/bin/env node +const baseUrl = (process.env.STAGING_BASE_URL || process.env.BASE_URL || '').replace(/\/$/, '') +const apiKey = process.env.STAGING_API_KEY || process.env.API_KEY || '' +const authUser = process.env.STAGING_AUTH_USER || process.env.AUTH_USER || '' +const authPass = process.env.STAGING_AUTH_PASS || process.env.AUTH_PASS || '' + +if (!baseUrl) { + console.error('Missing STAGING_BASE_URL (or BASE_URL).') + process.exit(1) +} +if (!apiKey) { + console.error('Missing STAGING_API_KEY (or API_KEY).') + process.exit(1) +} +if (!authUser || !authPass) { + console.error('Missing STAGING_AUTH_USER/STAGING_AUTH_PASS (or AUTH_USER/AUTH_PASS).') + process.exit(1) +} + +const headers = { + 'x-api-key': apiKey, + 'content-type': 'application/json', +} + +let createdProjectId = null +let createdTaskId = null +let createdAgentId = null + +async function call(path, options = {}) { + const res = await fetch(`${baseUrl}${path}`, options) + const text = await res.text() + let body = null + try { + body = text ? JSON.parse(text) : null + } catch { + body = { raw: text } + } + return { res, body } +} + +function assertStatus(actual, expected, label) { + if (actual !== expected) { + throw new Error(`${label} failed: expected ${expected}, got ${actual}`) + } + console.log(`PASS ${label}`) +} + +async function run() { + const login = await call('/api/auth/login', { + method: 'POST', + headers: { 'content-type': 'application/json' }, + body: JSON.stringify({ username: authUser, password: authPass }), + }) + assertStatus(login.res.status, 200, 'login') + + const workspaces = await call('/api/workspaces', { headers }) + assertStatus(workspaces.res.status, 200, 'GET /api/workspaces') + + const suffix = `${Date.now()}-${Math.random().toString(36).slice(2, 7)}` + const ticketPrefix = `S${String(Date.now()).slice(-5)}` + + const projectCreate = await call('/api/projects', { + method: 'POST', + headers, + body: JSON.stringify({ + name: `staging-smoke-${suffix}`, + ticket_prefix: ticketPrefix, + }), + }) + assertStatus(projectCreate.res.status, 201, 'POST /api/projects') + createdProjectId = projectCreate.body?.project?.id + if (!createdProjectId) throw new Error('project id missing') + + const projectGet = await call(`/api/projects/${createdProjectId}`, { headers }) + assertStatus(projectGet.res.status, 200, 'GET /api/projects/[id]') + + const projectPatch = await call(`/api/projects/${createdProjectId}`, { + method: 'PATCH', + headers, + body: JSON.stringify({ description: 'staging smoke update' }), + }) + assertStatus(projectPatch.res.status, 200, 'PATCH /api/projects/[id]') + + const agentCreate = await call('/api/agents', { + method: 'POST', + headers, + body: JSON.stringify({ name: `smoke-agent-${suffix}`, role: 'tester' }), + }) + assertStatus(agentCreate.res.status, 201, 'POST /api/agents') + createdAgentId = agentCreate.body?.agent?.id + + const assign = await call(`/api/projects/${createdProjectId}/agents`, { + method: 'POST', + headers, + body: JSON.stringify({ agent_name: `smoke-agent-${suffix}`, role: 'member' }), + }) + assertStatus(assign.res.status, 201, 'POST /api/projects/[id]/agents') + + const projectTasksCreate = await call('/api/tasks', { + method: 'POST', + headers, + body: JSON.stringify({ + title: `smoke-task-${suffix}`, + project_id: createdProjectId, + priority: 'medium', + status: 'inbox', + }), + }) + assertStatus(projectTasksCreate.res.status, 201, 'POST /api/tasks (project scoped)') + createdTaskId = projectTasksCreate.body?.task?.id + + const projectTasksGet = await call(`/api/projects/${createdProjectId}/tasks`, { headers }) + assertStatus(projectTasksGet.res.status, 200, 'GET /api/projects/[id]/tasks') + + const unassign = await call(`/api/projects/${createdProjectId}/agents?agent_name=${encodeURIComponent(`smoke-agent-${suffix}`)}`, { + method: 'DELETE', + headers, + }) + assertStatus(unassign.res.status, 200, 'DELETE /api/projects/[id]/agents') + + if (createdTaskId) { + const deleteTask = await call(`/api/tasks/${createdTaskId}`, { + method: 'DELETE', + headers, + }) + assertStatus(deleteTask.res.status, 200, 'DELETE /api/tasks/[id]') + createdTaskId = null + } + + if (createdProjectId) { + const deleteProject = await call(`/api/projects/${createdProjectId}?mode=delete`, { + method: 'DELETE', + headers, + }) + assertStatus(deleteProject.res.status, 200, 'DELETE /api/projects/[id]?mode=delete') + createdProjectId = null + } + + if (createdAgentId) { + const deleteAgent = await call(`/api/agents/${createdAgentId}`, { + method: 'DELETE', + headers, + }) + if (deleteAgent.res.status !== 200 && deleteAgent.res.status !== 404) { + throw new Error(`DELETE /api/agents/[id] cleanup failed: ${deleteAgent.res.status}`) + } + createdAgentId = null + console.log('PASS cleanup agent') + } + + console.log(`\nSmoke test passed for ${baseUrl}`) +} + +run().catch(async (error) => { + console.error(`\nSmoke test failed: ${error.message}`) + + if (createdTaskId) { + await call(`/api/tasks/${createdTaskId}`, { method: 'DELETE', headers }).catch(() => {}) + } + if (createdProjectId) { + await call(`/api/projects/${createdProjectId}?mode=delete`, { method: 'DELETE', headers }).catch(() => {}) + } + if (createdAgentId) { + await call(`/api/agents/${createdAgentId}`, { method: 'DELETE', headers }).catch(() => {}) + } + + process.exit(1) +}) diff --git a/scripts/start-standalone.sh b/scripts/start-standalone.sh new file mode 100644 index 0000000..0263efe --- /dev/null +++ b/scripts/start-standalone.sh @@ -0,0 +1,33 @@ +#!/usr/bin/env bash + +set -euo pipefail + +SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)" +PROJECT_ROOT="$(cd "$SCRIPT_DIR/.." && pwd)" +STANDALONE_DIR="$PROJECT_ROOT/.next/standalone" +STANDALONE_NEXT_DIR="$STANDALONE_DIR/.next" +STANDALONE_STATIC_DIR="$STANDALONE_NEXT_DIR/static" +SOURCE_STATIC_DIR="$PROJECT_ROOT/.next/static" +SOURCE_PUBLIC_DIR="$PROJECT_ROOT/public" +STANDALONE_PUBLIC_DIR="$STANDALONE_DIR/public" + +if [[ ! -f "$STANDALONE_DIR/server.js" ]]; then + echo "error: standalone server missing at $STANDALONE_DIR/server.js" >&2 + echo "run 'pnpm build' first" >&2 + exit 1 +fi + +mkdir -p "$STANDALONE_NEXT_DIR" + +if [[ -d "$SOURCE_STATIC_DIR" ]]; then + rm -rf "$STANDALONE_STATIC_DIR" + cp -R "$SOURCE_STATIC_DIR" "$STANDALONE_STATIC_DIR" +fi + +if [[ -d "$SOURCE_PUBLIC_DIR" ]]; then + rm -rf "$STANDALONE_PUBLIC_DIR" + cp -R "$SOURCE_PUBLIC_DIR" "$STANDALONE_PUBLIC_DIR" +fi + +cd "$STANDALONE_DIR" +exec node server.js diff --git a/scripts/station-doctor.sh b/scripts/station-doctor.sh new file mode 100755 index 0000000..b746a0b --- /dev/null +++ b/scripts/station-doctor.sh @@ -0,0 +1,189 @@ +#!/usr/bin/env bash +# Mission Control Station Doctor +# Local diagnostics — no auth required, runs on the host. +# +# Usage: bash scripts/station-doctor.sh [--port PORT] + +set -euo pipefail + +MC_PORT="${1:-3000}" +SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)" +PROJECT_ROOT="$(cd "$SCRIPT_DIR/.." && pwd)" + +# Parse args +for arg in "$@"; do + case "$arg" in + --port) shift; MC_PORT="$1"; shift ;; + esac +done + +PASS=0 +WARN=0 +FAIL=0 + +pass() { echo " [PASS] $1"; ((PASS++)); } +warn() { echo " [WARN] $1"; ((WARN++)); } +fail() { echo " [FAIL] $1"; ((FAIL++)); } +info() { echo " [INFO] $1"; } + +echo "=== Mission Control Station Doctor ===" +echo "" + +# ── 1. Process / Container check ───────────────────────────────────────────── +echo "--- Service Status ---" + +RUNNING_IN_DOCKER=false +if command -v docker &>/dev/null; then + if docker ps --filter name=mission-control --format '{{.Names}}' 2>/dev/null | grep -q mission-control; then + RUNNING_IN_DOCKER=true + health=$(docker inspect mission-control --format '{{.State.Health.Status}}' 2>/dev/null || echo "none") + if [[ "$health" == "healthy" ]]; then + pass "Docker container is healthy" + elif [[ "$health" == "starting" ]]; then + warn "Docker container is starting" + else + fail "Docker container health: $health" + fi + fi +fi + +if ! $RUNNING_IN_DOCKER; then + if pgrep -f "node.*server.js" &>/dev/null || pgrep -f "next-server" &>/dev/null; then + pass "Mission Control process is running" + else + fail "Mission Control process not found" + fi +fi + +# ── 2. Port check ──────────────────────────────────────────────────────────── +echo "" +echo "--- Network ---" + +if curl -sf "http://localhost:$MC_PORT/login" &>/dev/null; then + pass "Port $MC_PORT is responding" +else + fail "Port $MC_PORT is not responding" +fi + +# ── 3. API health ───────────────────────────────────────────────────────────── +# Try unauthenticated — will get 401 but proves the server is up +http_code=$(curl -sf -o /dev/null -w "%{http_code}" "http://localhost:$MC_PORT/api/status?action=health" 2>/dev/null || echo "000") +if [[ "$http_code" == "200" ]]; then + pass "Health API responding (200)" +elif [[ "$http_code" == "401" ]]; then + pass "Health API responding (auth required — expected)" +elif [[ "$http_code" == "000" ]]; then + fail "Health API not reachable" +else + warn "Health API returned HTTP $http_code" +fi + +# ── 4. Disk space ───────────────────────────────────────────────────────────── +echo "" +echo "--- Disk ---" + +usage_pct=$(df -h "$PROJECT_ROOT" 2>/dev/null | tail -1 | awk '{for(i=1;i<=NF;i++) if($i ~ /%/) print $i}' | tr -d '%') +if [[ -n "$usage_pct" ]]; then + if [[ "$usage_pct" -lt 85 ]]; then + pass "Disk usage: ${usage_pct}%" + elif [[ "$usage_pct" -lt 95 ]]; then + warn "Disk usage: ${usage_pct}% (getting full)" + else + fail "Disk usage: ${usage_pct}% (critical)" + fi +fi + +# ── 5. Database integrity ──────────────────────────────────────────────────── +echo "" +echo "--- Database ---" + +DB_PATH="$PROJECT_ROOT/.data/mission-control.db" +if [[ -f "$DB_PATH" ]]; then + db_size=$(du -h "$DB_PATH" 2>/dev/null | cut -f1) + pass "Database exists ($db_size)" + + # SQLite integrity check + if command -v sqlite3 &>/dev/null; then + integrity=$(sqlite3 "$DB_PATH" "PRAGMA integrity_check;" 2>/dev/null || echo "error") + if [[ "$integrity" == "ok" ]]; then + pass "Database integrity check passed" + else + fail "Database integrity check failed: $integrity" + fi + + # WAL mode check + journal=$(sqlite3 "$DB_PATH" "PRAGMA journal_mode;" 2>/dev/null || echo "unknown") + if [[ "$journal" == "wal" ]]; then + pass "WAL mode enabled" + else + warn "Journal mode: $journal (WAL recommended)" + fi + else + info "sqlite3 not found — skipping integrity check" + fi +else + if $RUNNING_IN_DOCKER; then + info "Database is inside Docker volume (cannot check directly)" + else + warn "Database not found at $DB_PATH" + fi +fi + +# ── 6. Backup age ──────────────────────────────────────────────────────────── +echo "" +echo "--- Backups ---" + +BACKUP_DIR="$PROJECT_ROOT/.data/backups" +if [[ -d "$BACKUP_DIR" ]]; then + latest_backup=$(find "$BACKUP_DIR" -name "*.db" -type f 2>/dev/null | sort -r | head -1) + if [[ -n "$latest_backup" ]]; then + if [[ "$(uname)" == "Darwin" ]]; then + backup_age_days=$(( ($(date +%s) - $(stat -f %m "$latest_backup")) / 86400 )) + else + backup_age_days=$(( ($(date +%s) - $(stat -c %Y "$latest_backup")) / 86400 )) + fi + backup_name=$(basename "$latest_backup") + if [[ "$backup_age_days" -lt 1 ]]; then + pass "Latest backup: $backup_name (today)" + elif [[ "$backup_age_days" -lt 7 ]]; then + pass "Latest backup: $backup_name (${backup_age_days}d ago)" + elif [[ "$backup_age_days" -lt 30 ]]; then + warn "Latest backup: $backup_name (${backup_age_days}d ago — consider more frequent backups)" + else + fail "Latest backup: $backup_name (${backup_age_days}d ago — stale!)" + fi + else + warn "No backups found in $BACKUP_DIR" + fi +else + warn "No backup directory at $BACKUP_DIR" +fi + +# ── 7. OpenClaw gateway ───────────────────────────────────────────────────── +echo "" +echo "--- OpenClaw Gateway ---" + +GW_HOST="${OPENCLAW_GATEWAY_HOST:-127.0.0.1}" +GW_PORT="${OPENCLAW_GATEWAY_PORT:-18789}" + +if nc -z "$GW_HOST" "$GW_PORT" 2>/dev/null || (echo > "/dev/tcp/$GW_HOST/$GW_PORT") 2>/dev/null; then + pass "Gateway reachable at $GW_HOST:$GW_PORT" +else + info "Gateway not reachable at $GW_HOST:$GW_PORT" +fi + +# ── Summary ────────────────────────────────────────────────────────────────── +echo "" +TOTAL=$((PASS + WARN + FAIL)) +echo "=== Results: $PASS passed, $WARN warnings, $FAIL failures (of $TOTAL checks) ===" + +if [[ $FAIL -gt 0 ]]; then + echo "Status: UNHEALTHY" + exit 1 +elif [[ $WARN -gt 0 ]]; then + echo "Status: DEGRADED" + exit 0 +else + echo "Status: HEALTHY" + exit 0 +fi diff --git a/skills/mission-control-installer/README.md b/skills/mission-control-installer/README.md new file mode 100644 index 0000000..e42e721 --- /dev/null +++ b/skills/mission-control-installer/README.md @@ -0,0 +1,68 @@ +# Mission Control Installer Skill + +Install and configure Mission Control on any Linux or macOS system. + +## What This Skill Does + +1. Detects the target OS and available runtimes (Docker or Node.js 20+) +2. Clones or updates the Mission Control repository +3. Generates a secure `.env` with random credentials +4. Starts the dashboard via Docker Compose or local Node.js +5. Runs an OpenClaw fleet health check (cleans stale PIDs, old logs, validates gateway) +6. Prints the access URL and admin credentials + +## Usage + +Run the installer script: + +```bash +# Auto-detect deployment mode (prefers Docker) +bash install.sh + +# Force Docker deployment +bash install.sh --docker + +# Force local deployment (Node.js + pnpm) +bash install.sh --local + +# Custom port +bash install.sh --port 8080 + +# Skip OpenClaw fleet check +bash install.sh --skip-openclaw +``` + +Or as a one-liner: + +```bash +curl -fsSL https://raw.githubusercontent.com/builderz-labs/mission-control/main/install.sh | bash +``` + +## Prerequisites + +- **Docker mode**: Docker Engine with Docker Compose v2 +- **Local mode**: Node.js 20+, pnpm (auto-installed via corepack if missing) +- **Both**: git (to clone the repository) + +## Post-Install + +After installation: + +1. Open `http://localhost:3000` (or your configured port) +2. Log in with the credentials printed by the installer (also in `.env`) +3. Configure your OpenClaw gateway connection in Settings +4. Register agents via the Agents panel + +## Environment Configuration + +The installer generates a `.env` from `.env.example` with secure random values for: + +- `AUTH_PASS` — 24-character random password +- `API_KEY` — 32-character hex API key +- `AUTH_SECRET` — 32-character session secret + +To regenerate credentials independently: + +```bash +bash scripts/generate-env.sh --force +``` diff --git a/skills/mission-control-installer/skill.json b/skills/mission-control-installer/skill.json new file mode 100644 index 0000000..ae18bf4 --- /dev/null +++ b/skills/mission-control-installer/skill.json @@ -0,0 +1,27 @@ +{ + "name": "mission-control-installer", + "version": "1.0.0", + "description": "Install and configure Mission Control — the OpenClaw agent orchestration dashboard", + "author": "Builderz Labs", + "license": "MIT", + "tools": ["exec", "fs"], + "parameters": { + "deployment_mode": { + "type": "string", + "enum": ["docker", "local"], + "default": "docker", + "description": "How to deploy Mission Control" + }, + "port": { + "type": "number", + "default": 3000, + "description": "Port for the Mission Control dashboard" + }, + "install_dir": { + "type": "string", + "default": "", + "description": "Installation directory (defaults to ./mission-control)" + } + }, + "tags": ["mission-control", "dashboard", "installer", "docker"] +} diff --git a/skills/mission-control-manage/README.md b/skills/mission-control-manage/README.md new file mode 100644 index 0000000..428d4bb --- /dev/null +++ b/skills/mission-control-manage/README.md @@ -0,0 +1,104 @@ +# Mission Control Management Skill + +Manage a running Mission Control instance programmatically. + +## API Endpoints + +All endpoints require authentication via `x-api-key` header or session cookie. + +### Health Check + +```bash +# Quick health status +curl -H "x-api-key: $API_KEY" http://localhost:3000/api/status?action=health + +# Response: { "status": "healthy", "version": "1.3.0", "checks": [...] } +``` + +Possible statuses: `healthy`, `degraded`, `unhealthy` + +### System Overview + +```bash +# Full system status (memory, disk, sessions, processes) +curl -H "x-api-key: $API_KEY" http://localhost:3000/api/status?action=overview +``` + +### Diagnostics (Admin Only) + +```bash +# Comprehensive diagnostics including security posture +curl -H "x-api-key: $API_KEY" http://localhost:3000/api/diagnostics + +# Response includes: +# - system: node version, platform, memory, docker detection +# - security: score (0-100) with individual checks +# - database: size, WAL mode, migration version +# - gateway: configured, reachable, host/port +# - agents: total count, by status +# - retention: configured retention policies +``` + +### Check for Updates + +```bash +curl -H "x-api-key: $API_KEY" http://localhost:3000/api/releases/check + +# Response: { "updateAvailable": true, "currentVersion": "1.3.0", "latestVersion": "1.4.0", ... } +``` + +### Trigger Update + +```bash +# Apply available update (bare-metal only; Docker returns instructions) +curl -X POST -H "x-api-key: $API_KEY" http://localhost:3000/api/releases/update +``` + +### Database Backup + +```bash +curl -X POST -H "x-api-key: $API_KEY" http://localhost:3000/api/backup +``` + +### Agent Management + +```bash +# List agents +curl -H "x-api-key: $API_KEY" http://localhost:3000/api/agents + +# Register an agent +curl -X POST -H "x-api-key: $API_KEY" \ + -H "Content-Type: application/json" \ + -d '{"name": "my-agent", "type": "openclaw"}' \ + http://localhost:3000/api/agents +``` + +## Station Doctor + +For local diagnostics without API access: + +```bash +bash scripts/station-doctor.sh +``` + +Checks: Docker health, port availability, disk space, DB integrity, backup age. + +## Common Workflows + +### Automated Health Monitoring + +```bash +# Check health and alert if unhealthy +STATUS=$(curl -sf -H "x-api-key: $API_KEY" http://localhost:3000/api/status?action=health | jq -r '.status') +if [ "$STATUS" != "healthy" ]; then + echo "ALERT: Mission Control is $STATUS" +fi +``` + +### Pre-Upgrade Checklist + +1. Check for updates: `GET /api/releases/check` +2. Create backup: `POST /api/backup` +3. Run diagnostics: `GET /api/diagnostics` (verify no active tasks) +4. Apply update: `POST /api/releases/update` (or `docker pull` + recreate for Docker) +5. Verify health: `GET /api/status?action=health` diff --git a/skills/mission-control-manage/skill.json b/skills/mission-control-manage/skill.json new file mode 100644 index 0000000..ce84ce4 --- /dev/null +++ b/skills/mission-control-manage/skill.json @@ -0,0 +1,20 @@ +{ + "name": "mission-control-manage", + "version": "1.0.0", + "description": "Manage a running Mission Control instance — health checks, diagnostics, upgrades, backups", + "author": "Builderz Labs", + "license": "MIT", + "tools": ["exec", "http"], + "parameters": { + "base_url": { + "type": "string", + "default": "http://localhost:3000", + "description": "Mission Control base URL" + }, + "api_key": { + "type": "string", + "description": "API key for authentication (x-api-key header)" + } + }, + "tags": ["mission-control", "management", "health", "upgrade", "backup"] +} diff --git a/src/app/[[...panel]]/page.tsx b/src/app/[[...panel]]/page.tsx index 3ac8bf2..308a52d 100644 --- a/src/app/[[...panel]]/page.tsx +++ b/src/app/[[...panel]]/page.tsx @@ -1,18 +1,15 @@ 'use client' -import { useEffect, useState } from 'react' +import { createElement, useEffect, useState } from 'react' import { usePathname, useRouter } from 'next/navigation' import { NavRail } from '@/components/layout/nav-rail' import { HeaderBar } from '@/components/layout/header-bar' import { LiveFeed } from '@/components/layout/live-feed' import { Dashboard } from '@/components/dashboard/dashboard' -import { AgentSpawnPanel } from '@/components/panels/agent-spawn-panel' import { LogViewerPanel } from '@/components/panels/log-viewer-panel' import { CronManagementPanel } from '@/components/panels/cron-management-panel' import { MemoryBrowserPanel } from '@/components/panels/memory-browser-panel' -import { TokenDashboardPanel } from '@/components/panels/token-dashboard-panel' -import { AgentCostPanel } from '@/components/panels/agent-cost-panel' -import { SessionDetailsPanel } from '@/components/panels/session-details-panel' +import { CostTrackerPanel } from '@/components/panels/cost-tracker-panel' import { TaskBoardPanel } from '@/components/panels/task-board-panel' import { ActivityFeedPanel } from '@/components/panels/activity-feed-panel' import { AgentSquadPanelPhase3 } from '@/components/panels/agent-squad-panel-phase3' @@ -22,7 +19,6 @@ import { OrchestrationBar } from '@/components/panels/orchestration-bar' import { NotificationsPanel } from '@/components/panels/notifications-panel' import { UserManagementPanel } from '@/components/panels/user-management-panel' import { AuditTrailPanel } from '@/components/panels/audit-trail-panel' -import { AgentHistoryPanel } from '@/components/panels/agent-history-panel' import { WebhookPanel } from '@/components/panels/webhook-panel' import { SettingsPanel } from '@/components/panels/settings-panel' import { GatewayConfigPanel } from '@/components/panels/gateway-config-panel' @@ -32,36 +28,176 @@ import { MultiGatewayPanel } from '@/components/panels/multi-gateway-panel' import { SuperAdminPanel } from '@/components/panels/super-admin-panel' import { OfficePanel } from '@/components/panels/office-panel' import { GitHubSyncPanel } from '@/components/panels/github-sync-panel' -import { DocumentsPanel } from '@/components/panels/documents-panel' +import { SkillsPanel } from '@/components/panels/skills-panel' +import { LocalAgentsDocPanel } from '@/components/panels/local-agents-doc-panel' +import { ChannelsPanel } from '@/components/panels/channels-panel' +import { DebugPanel } from '@/components/panels/debug-panel' +import { SecurityAuditPanel } from '@/components/panels/security-audit-panel' +import { NodesPanel } from '@/components/panels/nodes-panel' +import { ExecApprovalPanel } from '@/components/panels/exec-approval-panel' +import { ChatPagePanel } from '@/components/panels/chat-page-panel' import { ChatPanel } from '@/components/chat/chat-panel' +import { getPluginPanel } from '@/lib/plugins' import { ErrorBoundary } from '@/components/ErrorBoundary' import { LocalModeBanner } from '@/components/layout/local-mode-banner' import { UpdateBanner } from '@/components/layout/update-banner' -import { PromoBanner } from '@/components/layout/promo-banner' +import { OpenClawUpdateBanner } from '@/components/layout/openclaw-update-banner' +import { OpenClawDoctorBanner } from '@/components/layout/openclaw-doctor-banner' +import { OnboardingWizard } from '@/components/onboarding/onboarding-wizard' +import { Loader } from '@/components/ui/loader' +import { ProjectManagerModal } from '@/components/modals/project-manager-modal' +import { ExecApprovalOverlay } from '@/components/modals/exec-approval-overlay' import { useWebSocket } from '@/lib/websocket' import { useServerEvents } from '@/lib/use-server-events' +import { completeNavigationTiming } from '@/lib/navigation-metrics' +import { panelHref, useNavigateToPanel } from '@/lib/navigation' +import { clearOnboardingDismissedThisSession, clearOnboardingReplayFromStart, getOnboardingSessionDecision, markOnboardingReplayFromStart, readOnboardingDismissedThisSession } from '@/lib/onboarding-session' +import { Button } from '@/components/ui/button' import { useMissionControl } from '@/store' +interface GatewaySummary { + id: number + is_primary: number +} + +function renderPluginPanel(panelId: string) { + const pluginPanel = getPluginPanel(panelId) + return pluginPanel ? createElement(pluginPanel) : +} + +function isLocalHost(hostname: string): boolean { + return hostname === 'localhost' || hostname === '127.0.0.1' || hostname === '::1' +} + export default function Home() { const router = useRouter() const { connect } = useWebSocket() - const { activeTab, setActiveTab, setCurrentUser, setDashboardMode, setGatewayAvailable, setSubscription, setUpdateAvailable, liveFeedOpen, toggleLiveFeed } = useMissionControl() + const { activeTab, setActiveTab, setCurrentUser, setDashboardMode, setGatewayAvailable, setCapabilitiesChecked, setSubscription, setDefaultOrgName, setUpdateAvailable, setOpenclawUpdate, showOnboarding, setShowOnboarding, liveFeedOpen, toggleLiveFeed, showProjectManagerModal, setShowProjectManagerModal, fetchProjects, setChatPanelOpen, bootComplete, setBootComplete, setAgents, setSessions, setProjects, setInterfaceMode, setMemoryGraphAgents, setSkillsData } = useMissionControl() // Sync URL → Zustand activeTab const pathname = usePathname() const panelFromUrl = pathname === '/' ? 'overview' : pathname.slice(1) + const normalizedPanel = panelFromUrl === 'sessions' ? 'chat' : panelFromUrl useEffect(() => { - setActiveTab(panelFromUrl) - }, [panelFromUrl, setActiveTab]) + completeNavigationTiming(pathname) + }, [pathname]) + + useEffect(() => { + completeNavigationTiming(panelHref(activeTab)) + }, [activeTab]) + + useEffect(() => { + setActiveTab(normalizedPanel) + if (normalizedPanel === 'chat') { + setChatPanelOpen(false) + } + if (panelFromUrl === 'sessions') { + router.replace('/chat') + } + }, [panelFromUrl, normalizedPanel, router, setActiveTab, setChatPanelOpen]) // Connect to SSE for real-time local DB events (tasks, agents, chat, etc.) useServerEvents() const [isClient, setIsClient] = useState(false) + const [initSteps, setInitSteps] = useState>([ + { key: 'auth', label: 'Authenticating operator', status: 'pending' }, + { key: 'capabilities', label: 'Detecting station mode', status: 'pending' }, + { key: 'config', label: 'Loading control config', status: 'pending' }, + { key: 'connect', label: 'Connecting runtime links', status: 'pending' }, + { key: 'agents', label: 'Syncing agent registry', status: 'pending' }, + { key: 'sessions', label: 'Loading active sessions', status: 'pending' }, + { key: 'projects', label: 'Hydrating workspace board', status: 'pending' }, + { key: 'memory', label: 'Mapping memory graph', status: 'pending' }, + { key: 'skills', label: 'Indexing skill catalog', status: 'pending' }, + ]) + + const markStep = (key: string) => { + setInitSteps(prev => prev.map(s => s.key === key ? { ...s, status: 'done' } : s)) + } + + useEffect(() => { + if (!bootComplete && initSteps.every(s => s.status === 'done')) { + const t = setTimeout(() => setBootComplete(), 400) + return () => clearTimeout(t) + } + }, [initSteps, bootComplete, setBootComplete]) + + // Security console warning (anti-self-XSS) + useEffect(() => { + if (!bootComplete) return + if (typeof window === 'undefined') return + const key = 'mc-console-warning' + if (sessionStorage.getItem(key)) return + sessionStorage.setItem(key, '1') + + console.log( + '%c Stop! ', + 'color: #fff; background: #e53e3e; font-size: 40px; font-weight: bold; padding: 4px 16px; border-radius: 4px;' + ) + console.log( + '%cThis is a browser feature intended for developers.\n\nIf someone told you to copy-paste something here to enable a feature or "hack" an account, it is a scam and will give them access to your account.', + 'font-size: 14px; color: #e2e8f0; padding: 8px 0;' + ) + console.log( + '%cLearn more: https://en.wikipedia.org/wiki/Self-XSS', + 'font-size: 12px; color: #718096;' + ) + }, [bootComplete]) useEffect(() => { setIsClient(true) + // OpenClaw control-ui device identity requires a secure browser context. + // Redirect remote HTTP sessions to HTTPS automatically to avoid handshake failures. + if (window.location.protocol === 'http:' && !isLocalHost(window.location.hostname)) { + const secureUrl = new URL(window.location.href) + secureUrl.protocol = 'https:' + window.location.replace(secureUrl.toString()) + return + } + + const connectWithEnvFallback = () => { + const explicitWsUrl = process.env.NEXT_PUBLIC_GATEWAY_URL || '' + const gatewayPort = process.env.NEXT_PUBLIC_GATEWAY_PORT || '18789' + const gatewayHost = process.env.NEXT_PUBLIC_GATEWAY_HOST || window.location.hostname + const gatewayProto = + process.env.NEXT_PUBLIC_GATEWAY_PROTOCOL || + (window.location.protocol === 'https:' ? 'wss' : 'ws') + const wsUrl = explicitWsUrl || `${gatewayProto}://${gatewayHost}:${gatewayPort}` + connect(wsUrl) + } + + const connectWithPrimaryGateway = async (): Promise<{ attempted: boolean; connected: boolean }> => { + try { + const gatewaysRes = await fetch('/api/gateways') + if (!gatewaysRes.ok) return { attempted: false, connected: false } + const gatewaysJson = await gatewaysRes.json().catch(() => ({})) + const gateways = Array.isArray(gatewaysJson?.gateways) ? gatewaysJson.gateways as GatewaySummary[] : [] + if (gateways.length === 0) return { attempted: false, connected: false } + + const primaryGateway = gateways.find(gw => Number(gw?.is_primary) === 1) || gateways[0] + if (!primaryGateway?.id) return { attempted: true, connected: false } + + const connectRes = await fetch('/api/gateways/connect', { + method: 'POST', + headers: { 'Content-Type': 'application/json' }, + body: JSON.stringify({ id: primaryGateway.id }), + }) + if (!connectRes.ok) return { attempted: true, connected: false } + + const payload = await connectRes.json().catch(() => ({})) + const wsUrl = typeof payload?.ws_url === 'string' ? payload.ws_url : '' + const wsToken = typeof payload?.token === 'string' ? payload.token : '' + if (!wsUrl) return { attempted: true, connected: false } + + connect(wsUrl, wsToken) + return { attempted: true, connected: true } + } catch { + return { attempted: false, connected: false } + } + } + // Fetch current user fetch('/api/auth/me') .then(async (res) => { @@ -71,8 +207,8 @@ export default function Home() { } return null }) - .then(data => { if (data?.user) setCurrentUser(data.user) }) - .catch(() => {}) + .then(data => { if (data?.user) setCurrentUser(data.user); markStep('auth') }) + .catch(() => { markStep('auth') }) // Check for available updates fetch('/api/releases/check') @@ -88,16 +224,41 @@ export default function Home() { }) .catch(() => {}) + // Check for OpenClaw updates + fetch('/api/openclaw/version') + .then(res => res.ok ? res.json() : null) + .then(data => { + if (data?.updateAvailable) { + setOpenclawUpdate({ + installed: data.installed, + latest: data.latest, + releaseUrl: data.releaseUrl, + releaseNotes: data.releaseNotes, + updateCommand: data.updateCommand, + }) + } + }) + .catch(() => {}) + // Check capabilities, then conditionally connect to gateway fetch('/api/status?action=capabilities') .then(res => res.ok ? res.json() : null) - .then(data => { + .then(async data => { if (data?.subscription) { setSubscription(data.subscription) } + if (data?.processUser) { + setDefaultOrgName(data.processUser) + } + if (data?.interfaceMode === 'essential' || data?.interfaceMode === 'full') { + setInterfaceMode(data.interfaceMode) + } if (data && data.gateway === false) { setDashboardMode('local') setGatewayAvailable(false) + setCapabilitiesChecked(true) + markStep('capabilities') + markStep('connect') // Skip WebSocket connect — no gateway to talk to return } @@ -105,45 +266,86 @@ export default function Home() { setDashboardMode('full') setGatewayAvailable(true) } - // Connect to gateway WebSocket - const wsToken = process.env.NEXT_PUBLIC_GATEWAY_TOKEN || process.env.NEXT_PUBLIC_WS_TOKEN || '' - const explicitWsUrl = process.env.NEXT_PUBLIC_GATEWAY_URL || '' - const gatewayPort = process.env.NEXT_PUBLIC_GATEWAY_PORT || '18789' - const gatewayHost = process.env.NEXT_PUBLIC_GATEWAY_HOST || window.location.hostname - const gatewayProto = - process.env.NEXT_PUBLIC_GATEWAY_PROTOCOL || - (window.location.protocol === 'https:' ? 'wss' : 'ws') - const wsUrl = explicitWsUrl || `${gatewayProto}://${gatewayHost}:${gatewayPort}` - connect(wsUrl, wsToken) + setCapabilitiesChecked(true) + markStep('capabilities') + + const primaryConnect = await connectWithPrimaryGateway() + if (!primaryConnect.connected && !primaryConnect.attempted) { + connectWithEnvFallback() + } + markStep('connect') }) .catch(() => { // If capabilities check fails, still try to connect - const wsToken = process.env.NEXT_PUBLIC_GATEWAY_TOKEN || process.env.NEXT_PUBLIC_WS_TOKEN || '' - const explicitWsUrl = process.env.NEXT_PUBLIC_GATEWAY_URL || '' - const gatewayPort = process.env.NEXT_PUBLIC_GATEWAY_PORT || '18789' - const gatewayHost = process.env.NEXT_PUBLIC_GATEWAY_HOST || window.location.hostname - const gatewayProto = - process.env.NEXT_PUBLIC_GATEWAY_PROTOCOL || - (window.location.protocol === 'https:' ? 'wss' : 'ws') - const wsUrl = explicitWsUrl || `${gatewayProto}://${gatewayHost}:${gatewayPort}` - connect(wsUrl, wsToken) + setCapabilitiesChecked(true) + markStep('capabilities') + markStep('connect') + connectWithEnvFallback() }) - }, [connect, pathname, router, setCurrentUser, setDashboardMode, setGatewayAvailable, setSubscription, setUpdateAvailable]) - if (!isClient) { - return ( -
-
-
- MC -
-
-
- Loading Mission Control... -
-
-
- ) + // Check onboarding state + fetch('/api/onboarding') + .then(res => res.ok ? res.json() : null) + .then(data => { + const decision = getOnboardingSessionDecision({ + isAdmin: data?.isAdmin === true, + serverShowOnboarding: data?.showOnboarding === true, + completed: data?.completed === true, + skipped: data?.skipped === true, + dismissedThisSession: readOnboardingDismissedThisSession(), + }) + + if (decision.shouldOpen) { + clearOnboardingDismissedThisSession() + if (decision.replayFromStart) { + markOnboardingReplayFromStart() + } else { + clearOnboardingReplayFromStart() + } + setShowOnboarding(true) + } + markStep('config') + }) + .catch(() => { markStep('config') }) + // Preload workspace data in parallel + Promise.allSettled([ + fetch('/api/agents') + .then(r => r.ok ? r.json() : null) + .then((agentsData) => { + if (agentsData?.agents) setAgents(agentsData.agents) + }) + .finally(() => { markStep('agents') }), + fetch('/api/sessions') + .then(r => r.ok ? r.json() : null) + .then((sessionsData) => { + if (sessionsData?.sessions) setSessions(sessionsData.sessions) + }) + .finally(() => { markStep('sessions') }), + fetch('/api/projects') + .then(r => r.ok ? r.json() : null) + .then((projectsData) => { + if (projectsData?.projects) setProjects(projectsData.projects) + }) + .finally(() => { markStep('projects') }), + fetch('/api/memory/graph?agent=all') + .then(r => r.ok ? r.json() : null) + .then((graphData) => { + if (graphData?.agents) setMemoryGraphAgents(graphData.agents) + }) + .finally(() => { markStep('memory') }), + fetch('/api/skills') + .then(r => r.ok ? r.json() : null) + .then((skillsData) => { + if (skillsData?.skills) setSkillsData(skillsData.skills, skillsData.groups || [], skillsData.total || 0) + }) + .finally(() => { markStep('skills') }), + ]).catch(() => { /* panels will lazy-load as fallback */ }) + + // eslint-disable-next-line react-hooks/exhaustive-deps -- boot once on mount, not on every pathname change + }, [connect, router, setCurrentUser, setDashboardMode, setGatewayAvailable, setCapabilitiesChecked, setSubscription, setUpdateAvailable, setShowOnboarding, setAgents, setSessions, setProjects, setInterfaceMode, setMemoryGraphAgents, setSkillsData]) + + if (!isClient || !bootComplete) { + return } return ( @@ -151,33 +353,49 @@ export default function Home() { Skip to main content + {/* Left: Icon rail navigation (hidden on mobile, shown as bottom bar instead) */} - + {!showOnboarding && } {/* Center: Header + Content */}
- - - - -
-
+ {!showOnboarding && ( + <> + + + + + + + )} +
+
+
+

+ Built with care by nyk. +

+
{/* Right: Live feed (hidden on mobile) */} - {liveFeedOpen && ( + {!showOnboarding && liveFeedOpen && (
)} {/* Floating button to reopen LiveFeed when closed */} - {!liveFeedOpen && ( + {!showOnboarding && !liveFeedOpen && (
) } +const ESSENTIAL_PANELS = new Set([ + 'overview', 'agents', 'tasks', 'chat', 'activity', 'logs', 'settings', +]) + function ContentRouter({ tab }: { tab: string }) { - const { dashboardMode } = useMissionControl() + const { dashboardMode, interfaceMode, setInterfaceMode } = useMissionControl() + const navigateToPanel = useNavigateToPanel() const isLocal = dashboardMode === 'local' + // Guard: show nudge for non-essential panels in essential mode + if (interfaceMode === 'essential' && !ESSENTIAL_PANELS.has(tab)) { + return ( +
+

+ {tab.replace(/-/g, ' ')} is available in Full mode. +

+
+ + +
+
+ ) + } + switch (tab) { case 'overview': return ( <> {!isLocal && ( -
+
)} @@ -217,38 +483,31 @@ function ContentRouter({ tab }: { tab: string }) { return ( <> + {isLocal && } - {!isLocal && ( -
- -
- )} ) - case 'activity': - return case 'notifications': return case 'standup': return - case 'spawn': - return case 'sessions': - return + return case 'logs': return case 'cron': return case 'memory': return + case 'cost-tracker': case 'tokens': - return case 'agent-costs': - return + return case 'users': return case 'history': - return + case 'activity': + return case 'audit': return case 'webhooks': @@ -256,24 +515,53 @@ function ContentRouter({ tab }: { tab: string }) { case 'alerts': return case 'gateways': + if (isLocal) return return case 'gateway-config': + if (isLocal) return return case 'integrations': return case 'settings': return + case 'super-admin': + return case 'github': return case 'office': return - case 'documents': - return - case 'super-admin': - return - case 'workspaces': - return - default: - return + case 'skills': + return + case 'channels': + if (isLocal) return + return + case 'nodes': + if (isLocal) return + return + case 'security': + return + case 'debug': + return + case 'exec-approvals': + if (isLocal) return + return + case 'chat': + return + default: { + return renderPluginPanel(tab) + } } } + +function LocalModeUnavailable({ panel }: { panel: string }) { + return ( +
+

+ {panel} requires an OpenClaw gateway connection. +

+

+ Configure a gateway to enable this panel. +

+
+ ) +} diff --git a/src/app/api/activities/route.ts b/src/app/api/activities/route.ts index 92baff4..aa9c0d3 100644 --- a/src/app/api/activities/route.ts +++ b/src/app/api/activities/route.ts @@ -49,8 +49,14 @@ async function handleActivitiesRequest(request: NextRequest, workspaceId: number const params: any[] = [workspaceId]; if (type) { - query += ' AND type = ?'; - params.push(type); + const types = type.split(',').map(t => t.trim()).filter(Boolean); + if (types.length === 1) { + query += ' AND type = ?'; + params.push(types[0]); + } else if (types.length > 1) { + query += ` AND type IN (${types.map(() => '?').join(',')})`; + params.push(...types); + } } if (actor) { @@ -132,8 +138,14 @@ async function handleActivitiesRequest(request: NextRequest, workspaceId: number const countParams: any[] = [workspaceId]; if (type) { - countQuery += ' AND type = ?'; - countParams.push(type); + const types = type.split(',').map(t => t.trim()).filter(Boolean); + if (types.length === 1) { + countQuery += ' AND type = ?'; + countParams.push(types[0]); + } else if (types.length > 1) { + countQuery += ` AND type IN (${types.map(() => '?').join(',')})`; + countParams.push(...types); + } } if (actor) { diff --git a/src/app/api/adapters/route.ts b/src/app/api/adapters/route.ts new file mode 100644 index 0000000..5bada19 --- /dev/null +++ b/src/app/api/adapters/route.ts @@ -0,0 +1,118 @@ +import { NextRequest, NextResponse } from 'next/server' +import { requireRole } from '@/lib/auth' +import { getAdapter, listAdapters } from '@/lib/adapters' +import { agentHeartbeatLimiter } from '@/lib/rate-limit' +import { logger } from '@/lib/logger' + +/** + * GET /api/adapters — List available framework adapters. + */ +export async function GET(request: NextRequest) { + const auth = requireRole(request, 'viewer') + if ('error' in auth) return NextResponse.json({ error: auth.error }, { status: auth.status }) + + return NextResponse.json({ adapters: listAdapters() }) +} + +/** + * POST /api/adapters — Framework-agnostic agent action dispatcher. + * + * Body: { framework, action, payload } + * + * Actions: + * register — Register an agent via its framework adapter + * heartbeat — Send a heartbeat/status update + * report — Report task progress + * assignments — Get pending task assignments + * disconnect — Disconnect an agent + */ +export async function POST(request: NextRequest) { + const auth = requireRole(request, 'operator') + if ('error' in auth) return NextResponse.json({ error: auth.error }, { status: auth.status }) + + const rateLimited = agentHeartbeatLimiter(request) + if (rateLimited) return rateLimited + + let body: any + try { + body = await request.json() + } catch { + return NextResponse.json({ error: 'Invalid JSON body' }, { status: 400 }) + } + + const framework = typeof body?.framework === 'string' ? body.framework.trim() : '' + const action = typeof body?.action === 'string' ? body.action.trim() : '' + const payload = body?.payload ?? {} + + if (!framework || !action) { + return NextResponse.json({ error: 'framework and action are required' }, { status: 400 }) + } + + let adapter + try { + adapter = getAdapter(framework) + } catch { + return NextResponse.json({ + error: `Unknown framework: ${framework}. Available: ${listAdapters().join(', ')}`, + }, { status: 400 }) + } + + try { + switch (action) { + case 'register': { + const { agentId, name, metadata } = payload + if (!agentId || !name) { + return NextResponse.json({ error: 'payload.agentId and payload.name required' }, { status: 400 }) + } + await adapter.register({ agentId, name, framework, metadata }) + return NextResponse.json({ ok: true, action: 'register', framework }) + } + + case 'heartbeat': { + const { agentId, status, metrics } = payload + if (!agentId) { + return NextResponse.json({ error: 'payload.agentId required' }, { status: 400 }) + } + await adapter.heartbeat({ agentId, status: status || 'online', metrics }) + return NextResponse.json({ ok: true, action: 'heartbeat', framework }) + } + + case 'report': { + const { taskId, agentId, progress, status: taskStatus, output } = payload + if (!taskId || !agentId) { + return NextResponse.json({ error: 'payload.taskId and payload.agentId required' }, { status: 400 }) + } + await adapter.reportTask({ taskId, agentId, progress: progress ?? 0, status: taskStatus || 'in_progress', output }) + return NextResponse.json({ ok: true, action: 'report', framework }) + } + + case 'assignments': { + const { agentId } = payload + if (!agentId) { + return NextResponse.json({ error: 'payload.agentId required' }, { status: 400 }) + } + const assignments = await adapter.getAssignments(agentId) + return NextResponse.json({ assignments, framework }) + } + + case 'disconnect': { + const { agentId } = payload + if (!agentId) { + return NextResponse.json({ error: 'payload.agentId required' }, { status: 400 }) + } + await adapter.disconnect(agentId) + return NextResponse.json({ ok: true, action: 'disconnect', framework }) + } + + default: + return NextResponse.json({ + error: `Unknown action: ${action}. Use: register, heartbeat, report, assignments, disconnect`, + }, { status: 400 }) + } + } catch (error) { + logger.error({ err: error, framework, action }, 'POST /api/adapters error') + return NextResponse.json({ error: 'Adapter action failed' }, { status: 500 }) + } +} + +export const dynamic = 'force-dynamic' diff --git a/src/app/api/agents/[id]/files/route.ts b/src/app/api/agents/[id]/files/route.ts new file mode 100644 index 0000000..22c73f0 --- /dev/null +++ b/src/app/api/agents/[id]/files/route.ts @@ -0,0 +1,153 @@ +import { NextRequest, NextResponse } from 'next/server' +import { getDatabase, db_helpers } from '@/lib/db' +import { requireRole } from '@/lib/auth' +import { config } from '@/lib/config' +import { existsSync, mkdirSync, readFileSync, writeFileSync } from 'node:fs' +import { dirname, isAbsolute, resolve } from 'node:path' +import { resolveWithin } from '@/lib/paths' +import { getAgentWorkspaceCandidates, readAgentWorkspaceFile } from '@/lib/agent-workspace' +import { logger } from '@/lib/logger' + +const ALLOWED_FILES = new Set([ + 'agent.md', + 'identity.md', + 'soul.md', + 'WORKING.md', + 'MEMORY.md', + 'TOOLS.md', + 'AGENTS.md', + 'MISSION.md', + 'USER.md', +]) +const FILE_ALIASES: Record = { + 'agent.md': ['agent.md', 'AGENT.md', 'MISSION.md', 'USER.md'], + 'identity.md': ['identity.md', 'IDENTITY.md'], + 'soul.md': ['soul.md', 'SOUL.md'], + 'WORKING.md': ['WORKING.md', 'working.md'], + 'MEMORY.md': ['MEMORY.md', 'memory.md'], + 'TOOLS.md': ['TOOLS.md', 'tools.md'], + 'AGENTS.md': ['AGENTS.md', 'agents.md'], + 'MISSION.md': ['MISSION.md', 'mission.md'], + 'USER.md': ['USER.md', 'user.md'], +} + +function resolveAgentWorkspacePath(workspace: string): string { + if (isAbsolute(workspace)) return resolve(workspace) + if (!config.openclawStateDir) throw new Error('OPENCLAW_STATE_DIR not configured') + return resolveWithin(config.openclawStateDir, workspace) +} + +function getAgentByIdOrName(db: ReturnType, id: string, workspaceId: number): any | undefined { + if (isNaN(Number(id))) { + return db.prepare('SELECT * FROM agents WHERE name = ? AND workspace_id = ?').get(id, workspaceId) + } + return db.prepare('SELECT * FROM agents WHERE id = ? AND workspace_id = ?').get(Number(id), workspaceId) +} + +export async function GET( + request: NextRequest, + { params }: { params: Promise<{ id: string }> } +) { + const auth = requireRole(request, 'viewer') + if ('error' in auth) return NextResponse.json({ error: auth.error }, { status: auth.status }) + + try { + const { id } = await params + const db = getDatabase() + const workspaceId = auth.user.workspace_id ?? 1 + const agent = getAgentByIdOrName(db, id, workspaceId) + if (!agent) return NextResponse.json({ error: 'Agent not found' }, { status: 404 }) + + const agentConfig = agent.config ? JSON.parse(agent.config) : {} + const candidates = getAgentWorkspaceCandidates(agentConfig, agent.name) + if (candidates.length === 0) { + return NextResponse.json({ error: 'Agent workspace is not configured' }, { status: 400 }) + } + const safeWorkspace = candidates[0] + const requested = (new URL(request.url).searchParams.get('file') || '').trim() + const files = requested + ? [requested] + : ['agent.md', 'identity.md', 'soul.md', 'WORKING.md', 'MEMORY.md', 'TOOLS.md', 'AGENTS.md', 'MISSION.md', 'USER.md'] + + const payload: Record = {} + for (const file of files) { + if (!ALLOWED_FILES.has(file)) { + return NextResponse.json({ error: `Unsupported file: ${file}` }, { status: 400 }) + } + const aliases = FILE_ALIASES[file] || [file] + const match = readAgentWorkspaceFile(candidates, aliases) + payload[file] = { exists: match.exists, content: match.content } + } + + return NextResponse.json({ + agent: { id: agent.id, name: agent.name }, + workspace: safeWorkspace, + files: payload, + }) + } catch (error) { + logger.error({ err: error }, 'GET /api/agents/[id]/files error') + return NextResponse.json({ error: 'Failed to load workspace files' }, { status: 500 }) + } +} + +export async function PUT( + request: NextRequest, + { params }: { params: Promise<{ id: string }> } +) { + const auth = requireRole(request, 'operator') + if ('error' in auth) return NextResponse.json({ error: auth.error }, { status: auth.status }) + + try { + const { id } = await params + const body = await request.json() + const file = String(body?.file || '').trim() + const content = String(body?.content || '') + const MAX_WORKSPACE_FILE_SIZE = 1024 * 1024 // 1 MB + if (content.length > MAX_WORKSPACE_FILE_SIZE) { + return NextResponse.json({ error: `File content too large (max ${MAX_WORKSPACE_FILE_SIZE} bytes)` }, { status: 413 }) + } + if (!ALLOWED_FILES.has(file)) { + return NextResponse.json({ error: `Unsupported file: ${file}` }, { status: 400 }) + } + + const db = getDatabase() + const workspaceId = auth.user.workspace_id ?? 1 + const agent = getAgentByIdOrName(db, id, workspaceId) + if (!agent) return NextResponse.json({ error: 'Agent not found' }, { status: 404 }) + + const agentConfig = agent.config ? JSON.parse(agent.config) : {} + const candidates = getAgentWorkspaceCandidates(agentConfig, agent.name) + const safeWorkspace = candidates[0] + if (!safeWorkspace) { + return NextResponse.json({ error: 'Agent workspace is not configured' }, { status: 400 }) + } + + const safePath = resolveWithin(safeWorkspace, file) + mkdirSync(dirname(safePath), { recursive: true }) + writeFileSync(safePath, content, 'utf-8') + + if (file === 'soul.md') { + db.prepare('UPDATE agents SET soul_content = ?, updated_at = unixepoch() WHERE id = ? AND workspace_id = ?') + .run(content, agent.id, workspaceId) + } + if (file === 'WORKING.md') { + db.prepare('UPDATE agents SET working_memory = ?, updated_at = unixepoch() WHERE id = ? AND workspace_id = ?') + .run(content, agent.id, workspaceId) + } + + db_helpers.logActivity( + 'agent_workspace_file_updated', + 'agent', + agent.id, + auth.user.username, + `${file} updated for ${agent.name}`, + { file, size: content.length }, + workspaceId + ) + + return NextResponse.json({ success: true, file, size: content.length }) + } catch (error) { + logger.error({ err: error }, 'PUT /api/agents/[id]/files error') + return NextResponse.json({ error: 'Failed to save workspace file' }, { status: 500 }) + } +} diff --git a/src/app/api/agents/[id]/heartbeat/route.ts b/src/app/api/agents/[id]/heartbeat/route.ts index 3fad965..6ee4eac 100644 --- a/src/app/api/agents/[id]/heartbeat/route.ts +++ b/src/app/api/agents/[id]/heartbeat/route.ts @@ -1,6 +1,7 @@ import { NextRequest, NextResponse } from 'next/server'; import { getDatabase, db_helpers } from '@/lib/db'; import { requireRole } from '@/lib/auth'; +import { agentHeartbeatLimiter } from '@/lib/rate-limit'; import { logger } from '@/lib/logger'; /** @@ -189,6 +190,9 @@ export async function POST( const auth = requireRole(request, 'operator'); if ('error' in auth) return NextResponse.json({ error: auth.error }, { status: auth.status }); + const rateLimited = agentHeartbeatLimiter(request); + if (rateLimited) return rateLimited; + let body: any = {}; try { body = await request.json(); diff --git a/src/app/api/agents/[id]/memory/route.ts b/src/app/api/agents/[id]/memory/route.ts index 56277b2..619291c 100644 --- a/src/app/api/agents/[id]/memory/route.ts +++ b/src/app/api/agents/[id]/memory/route.ts @@ -2,6 +2,17 @@ import { NextRequest, NextResponse } from 'next/server'; import { getDatabase, db_helpers } from '@/lib/db'; import { requireRole } from '@/lib/auth'; import { logger } from '@/lib/logger'; +import { config } from '@/lib/config'; +import { existsSync, mkdirSync, readFileSync, writeFileSync } from 'node:fs'; +import { dirname, isAbsolute, resolve } from 'node:path'; +import { resolveWithin } from '@/lib/paths'; +import { getAgentWorkspaceCandidates, readAgentWorkspaceFile } from '@/lib/agent-workspace'; + +function resolveAgentWorkspacePath(workspace: string): string { + if (isAbsolute(workspace)) return resolve(workspace) + if (!config.openclawStateDir) throw new Error('OPENCLAW_STATE_DIR not configured') + return resolveWithin(config.openclawStateDir, workspace) +} /** * GET /api/agents/[id]/memory - Get agent's working memory @@ -43,11 +54,28 @@ export async function GET( db.exec("ALTER TABLE agents ADD COLUMN working_memory TEXT DEFAULT ''"); } + // Prefer workspace WORKING.md, fall back to DB working_memory + let workingMemory = ''; + let source: 'workspace' | 'database' | 'none' = 'none'; + try { + const agentConfig = agent.config ? JSON.parse(agent.config) : {}; + const candidates = getAgentWorkspaceCandidates(agentConfig, agent.name); + const match = readAgentWorkspaceFile(candidates, ['WORKING.md', 'working.md', 'MEMORY.md', 'memory.md']); + if (match.exists) { + workingMemory = match.content; + source = 'workspace'; + } + } catch (err) { + logger.warn({ err, agent: agent.name }, 'Failed to read WORKING.md from workspace'); + } + // Get working memory content const memoryStmt = db.prepare(`SELECT working_memory FROM agents WHERE ${isNaN(Number(agentId)) ? 'name' : 'id'} = ? AND workspace_id = ?`); const result = memoryStmt.get(agentId, workspaceId) as any; - - const workingMemory = result?.working_memory || ''; + if (!workingMemory) { + workingMemory = result?.working_memory || ''; + source = workingMemory ? 'database' : 'none'; + } return NextResponse.json({ agent: { @@ -56,6 +84,7 @@ export async function GET( role: agent.role }, working_memory: workingMemory, + source, updated_at: agent.updated_at, size: workingMemory.length }); @@ -118,6 +147,22 @@ export async function PUT( } const now = Math.floor(Date.now() / 1000); + + // Best effort: sync workspace WORKING.md if agent workspace is configured + let savedToWorkspace = false; + try { + const agentConfig = agent.config ? JSON.parse(agent.config) : {}; + const candidates = getAgentWorkspaceCandidates(agentConfig, agent.name); + const safeWorkspace = candidates[0]; + if (safeWorkspace) { + const safeWorkingPath = resolveWithin(safeWorkspace, 'WORKING.md'); + mkdirSync(dirname(safeWorkingPath), { recursive: true }); + writeFileSync(safeWorkingPath, newContent, 'utf-8'); + savedToWorkspace = true; + } + } catch (err) { + logger.warn({ err, agent: agent.name }, 'Failed to write WORKING.md to workspace'); + } // Update working memory const updateStmt = db.prepare(` @@ -135,10 +180,11 @@ export async function PUT( agent.id, agent.name, `Working memory ${append ? 'appended' : 'updated'} for agent ${agent.name}`, - { + { content_length: newContent.length, append_mode: append || false, - timestamp: now + timestamp: now, + saved_to_workspace: savedToWorkspace }, workspaceId ); @@ -147,6 +193,7 @@ export async function PUT( success: true, message: `Working memory ${append ? 'appended' : 'updated'} for ${agent.name}`, working_memory: newContent, + saved_to_workspace: savedToWorkspace, updated_at: now, size: newContent.length }); @@ -185,6 +232,20 @@ export async function DELETE( } const now = Math.floor(Date.now() / 1000); + + // Best effort: clear workspace WORKING.md if agent workspace is configured + try { + const agentConfig = agent.config ? JSON.parse(agent.config) : {}; + const candidates = getAgentWorkspaceCandidates(agentConfig, agent.name); + const safeWorkspace = candidates[0]; + if (safeWorkspace) { + const safeWorkingPath = resolveWithin(safeWorkspace, 'WORKING.md'); + mkdirSync(dirname(safeWorkingPath), { recursive: true }); + writeFileSync(safeWorkingPath, '', 'utf-8'); + } + } catch (err) { + logger.warn({ err, agent: agent.name }, 'Failed to clear WORKING.md in workspace'); + } // Clear working memory const updateStmt = db.prepare(` diff --git a/src/app/api/agents/[id]/route.ts b/src/app/api/agents/[id]/route.ts index 8a0c1f2..a21de32 100644 --- a/src/app/api/agents/[id]/route.ts +++ b/src/app/api/agents/[id]/route.ts @@ -1,9 +1,10 @@ import { NextRequest, NextResponse } from 'next/server' import { getDatabase, db_helpers, logAuditEvent } from '@/lib/db' import { requireRole } from '@/lib/auth' -import { writeAgentToConfig, enrichAgentConfigFromWorkspace } from '@/lib/agent-sync' +import { writeAgentToConfig, enrichAgentConfigFromWorkspace, removeAgentFromConfig } from '@/lib/agent-sync' import { eventBus } from '@/lib/event-bus' import { logger } from '@/lib/logger' +import { runOpenClaw } from '@/lib/command' /** * GET /api/agents/[id] - Get a single agent by ID or name @@ -102,20 +103,9 @@ export async function PUT( return writeBack } - // Unified save: gateway first, then DB. If DB fails after gateway write, attempt rollback. - if (shouldWriteToGateway) { - try { - await writeAgentToConfig(getWriteBackPayload(gateway_config)) - } catch (err: any) { - return NextResponse.json( - { error: `Save failed: unable to update gateway config: ${err.message}` }, - { status: 502 } - ) - } - } - + // Unified save: DB first (transactional, easy to revert), then gateway file. + // If gateway write fails after DB succeeds, revert DB to keep consistency. try { - // Build update const fields: string[] = ['updated_at = ?'] const values: any[] = [now] @@ -132,21 +122,33 @@ export async function PUT( values.push(agent.id, workspaceId) db.prepare(`UPDATE agents SET ${fields.join(', ')} WHERE id = ? AND workspace_id = ?`).run(...values) } catch (err: any) { - if (shouldWriteToGateway) { - try { - // Best-effort rollback to preserve consistency if DB update fails after gateway write. - await writeAgentToConfig(getWriteBackPayload(existingConfig)) - } catch (rollbackErr: any) { - logger.error({ err: rollbackErr, agent: agent.name }, 'Failed to rollback gateway config after DB failure') - return NextResponse.json( - { error: `Save failed after gateway update and rollback failed: ${err.message}` }, - { status: 500 } - ) - } - } return NextResponse.json({ error: `Save failed: ${err.message}` }, { status: 500 }) } + if (shouldWriteToGateway) { + try { + await writeAgentToConfig(getWriteBackPayload(gateway_config)) + } catch (err: any) { + // Gateway write failed — revert DB to previous state + try { + const revertFields: string[] = ['updated_at = ?'] + const revertValues: any[] = [agent.updated_at] + revertFields.push('role = ?') + revertValues.push(agent.role) + revertFields.push('config = ?') + revertValues.push(agent.config || '{}') + revertValues.push(agent.id, workspaceId) + db.prepare(`UPDATE agents SET ${revertFields.join(', ')} WHERE id = ? AND workspace_id = ?`).run(...revertValues) + } catch (revertErr: any) { + logger.error({ err: revertErr, agent: agent.name }, 'Failed to revert DB after gateway write failure') + } + return NextResponse.json( + { error: `Save failed: unable to update gateway config: ${err.message}` }, + { status: 502 } + ) + } + } + if (shouldWriteToGateway) { const ipAddress = request.headers.get('x-forwarded-for') || request.headers.get('x-real-ip') || 'unknown' logAuditEvent({ @@ -205,6 +207,13 @@ export async function DELETE( const db = getDatabase() const { id } = await params const workspaceId = auth.user.workspace_id ?? 1; + let removeWorkspace = false + try { + const body = await request.json() + removeWorkspace = Boolean(body?.remove_workspace) + } catch { + // Optional body + } let agent if (isNaN(Number(id))) { @@ -217,6 +226,38 @@ export async function DELETE( return NextResponse.json({ error: 'Agent not found' }, { status: 404 }) } + if (removeWorkspace) { + const agentConfig = agent.config ? JSON.parse(agent.config) : {} + const openclawId = + String(agentConfig?.openclawId || agent.name || '') + .toLowerCase() + .replace(/[^a-z0-9._-]+/g, '-') + .replace(/^-+|-+$/g, '') || agent.name + try { + await runOpenClaw(['agents', 'delete', openclawId, '--force'], { timeoutMs: 30000 }) + } catch (err: any) { + logger.error({ err, openclawId, agent: agent.name }, 'Failed to remove OpenClaw agent/workspace') + return NextResponse.json( + { error: `Failed to remove OpenClaw workspace for ${agent.name}: ${err?.message || 'unknown error'}` }, + { status: 502 } + ) + } + } + + let configCleanupWarning: string | null = null + try { + const agentConfig = agent.config ? JSON.parse(agent.config) : {} + const openclawId = + String(agentConfig?.openclawId || agent.name || '') + .toLowerCase() + .replace(/[^a-z0-9._-]+/g, '-') + .replace(/^-+|-+$/g, '') || agent.name + await removeAgentFromConfig({ id: openclawId, name: agent.name }) + } catch (err: any) { + configCleanupWarning = `OpenClaw config cleanup skipped for ${agent.name}: ${err?.message || 'unknown error'}` + logger.warn({ err, agent: agent.name }, 'Failed to remove OpenClaw agent config entry') + } + db.prepare('DELETE FROM agents WHERE id = ? AND workspace_id = ?').run(agent.id, workspaceId) db_helpers.logActivity( @@ -225,13 +266,18 @@ export async function DELETE( agent.id, auth.user.username, `Deleted agent: ${agent.name}`, - { name: agent.name, role: agent.role }, + { name: agent.name, role: agent.role, remove_workspace: removeWorkspace }, workspaceId ) eventBus.broadcast('agent.deleted', { id: agent.id, name: agent.name }) - return NextResponse.json({ success: true, deleted: agent.name }) + return NextResponse.json({ + success: true, + deleted: agent.name, + remove_workspace: removeWorkspace, + ...(configCleanupWarning ? { warning: configCleanupWarning } : {}), + }) } catch (error) { logger.error({ err: error }, 'DELETE /api/agents/[id] error') return NextResponse.json({ error: 'Failed to delete agent' }, { status: 500 }) diff --git a/src/app/api/agents/[id]/soul/route.ts b/src/app/api/agents/[id]/soul/route.ts index 5e506ad..3080880 100644 --- a/src/app/api/agents/[id]/soul/route.ts +++ b/src/app/api/agents/[id]/soul/route.ts @@ -4,6 +4,7 @@ import { readFileSync, existsSync, readdirSync, writeFileSync, mkdirSync } from import { join, dirname, isAbsolute, resolve } from 'path'; import { config } from '@/lib/config'; import { resolveWithin } from '@/lib/paths'; +import { getAgentWorkspaceCandidates, readAgentWorkspaceFile } from '@/lib/agent-workspace'; import { requireRole } from '@/lib/auth'; import { logger } from '@/lib/logger'; @@ -47,13 +48,11 @@ export async function GET( try { const agentConfig = agent.config ? JSON.parse(agent.config) : {} - if (agentConfig.workspace) { - const safeWorkspace = resolveAgentWorkspacePath(agentConfig.workspace) - const safeSoulPath = resolveWithin(safeWorkspace, 'soul.md') - if (existsSync(safeSoulPath)) { - soulContent = readFileSync(safeSoulPath, 'utf-8') - source = 'workspace' - } + const candidates = getAgentWorkspaceCandidates(agentConfig, agent.name) + const match = readAgentWorkspaceFile(candidates, ['soul.md', 'SOUL.md']) + if (match.exists) { + soulContent = match.content + source = 'workspace' } } catch (err) { logger.warn({ err, agent: agent.name }, 'Failed to read soul.md from workspace') @@ -163,8 +162,9 @@ export async function PUT( let savedToWorkspace = false try { const agentConfig = agent.config ? JSON.parse(agent.config) : {} - if (agentConfig.workspace) { - const safeWorkspace = resolveAgentWorkspacePath(agentConfig.workspace) + const candidates = getAgentWorkspaceCandidates(agentConfig, agent.name) + const safeWorkspace = candidates[0] + if (safeWorkspace) { const safeSoulPath = resolveWithin(safeWorkspace, 'soul.md') mkdirSync(dirname(safeSoulPath), { recursive: true }) writeFileSync(safeSoulPath, newSoulContent || '', 'utf-8') diff --git a/src/app/api/agents/comms/route.ts b/src/app/api/agents/comms/route.ts index 36a2ef4..b5aef23 100644 --- a/src/app/api/agents/comms/route.ts +++ b/src/app/api/agents/comms/route.ts @@ -21,31 +21,46 @@ export async function GET(request: NextRequest) { const since = searchParams.get("since") const agent = searchParams.get("agent") - // Filter out human/system messages - only agent-to-agent + // Session-thread comms feed used by coordinator + runtime sessions + const commsPredicate = ` + ( + conversation_id LIKE 'a2a:%' + OR conversation_id LIKE 'coord:%' + OR conversation_id LIKE 'session:%' + OR conversation_id LIKE 'agent_%' + OR (json_valid(metadata) AND json_extract(metadata, '$.channel') = 'coordinator-inbox') + ) + ` + const humanNames = ["human", "system", "operator"] const humanPlaceholders = humanNames.map(() => "?").join(",") - // 1. Get inter-agent messages - let messagesQuery = ` - SELECT * FROM messages + // 1. Get timeline messages (page latest rows but render chronologically) + let messagesWhere = ` + FROM messages WHERE workspace_id = ? - AND to_agent IS NOT NULL - AND from_agent NOT IN (${humanPlaceholders}) - AND to_agent NOT IN (${humanPlaceholders}) + AND ${commsPredicate} ` - const messagesParams: any[] = [workspaceId, ...humanNames, ...humanNames] + const messagesParams: any[] = [workspaceId] if (since) { - messagesQuery += " AND created_at > ?" - messagesParams.push(parseInt(since)) + messagesWhere += " AND created_at > ?" + messagesParams.push(parseInt(since, 10)) } if (agent) { - messagesQuery += " AND (from_agent = ? OR to_agent = ?)" + messagesWhere += " AND (from_agent = ? OR to_agent = ?)" messagesParams.push(agent, agent) } - // Deterministic chronological ordering prevents visual jumps in UI - messagesQuery += " ORDER BY created_at ASC, id ASC LIMIT ? OFFSET ?" + const messagesQuery = ` + SELECT * FROM ( + SELECT * + ${messagesWhere} + ORDER BY created_at DESC, id DESC + LIMIT ? OFFSET ? + ) recent + ORDER BY created_at ASC, id ASC + ` messagesParams.push(limit, offset) const messages = db.prepare(messagesQuery).all(...messagesParams) as Message[] @@ -58,14 +73,15 @@ export async function GET(request: NextRequest) { MAX(created_at) as last_message_at FROM messages WHERE workspace_id = ? + AND ${commsPredicate} AND to_agent IS NOT NULL - AND from_agent NOT IN (${humanPlaceholders}) - AND to_agent NOT IN (${humanPlaceholders}) + AND lower(from_agent) NOT IN (${humanPlaceholders}) + AND lower(to_agent) NOT IN (${humanPlaceholders}) ` const graphParams: any[] = [workspaceId, ...humanNames, ...humanNames] if (since) { graphQuery += " AND created_at > ?" - graphParams.push(parseInt(since)) + graphParams.push(parseInt(since, 10)) } graphQuery += " GROUP BY from_agent, to_agent ORDER BY message_count DESC" @@ -75,15 +91,19 @@ export async function GET(request: NextRequest) { const statsQuery = ` SELECT agent, SUM(sent) as sent, SUM(received) as received FROM ( SELECT from_agent as agent, COUNT(*) as sent, 0 as received - FROM messages WHERE workspace_id = ? AND to_agent IS NOT NULL - AND from_agent NOT IN (${humanPlaceholders}) - AND to_agent NOT IN (${humanPlaceholders}) + FROM messages WHERE workspace_id = ? + AND ${commsPredicate} + AND to_agent IS NOT NULL + AND lower(from_agent) NOT IN (${humanPlaceholders}) + AND lower(to_agent) NOT IN (${humanPlaceholders}) GROUP BY from_agent UNION ALL SELECT to_agent as agent, 0 as sent, COUNT(*) as received - FROM messages WHERE workspace_id = ? AND to_agent IS NOT NULL - AND from_agent NOT IN (${humanPlaceholders}) - AND to_agent NOT IN (${humanPlaceholders}) + FROM messages WHERE workspace_id = ? + AND ${commsPredicate} + AND to_agent IS NOT NULL + AND lower(from_agent) NOT IN (${humanPlaceholders}) + AND lower(to_agent) NOT IN (${humanPlaceholders}) GROUP BY to_agent ) GROUP BY agent ORDER BY (sent + received) DESC ` @@ -94,14 +114,12 @@ export async function GET(request: NextRequest) { let countQuery = ` SELECT COUNT(*) as total FROM messages WHERE workspace_id = ? - AND to_agent IS NOT NULL - AND from_agent NOT IN (${humanPlaceholders}) - AND to_agent NOT IN (${humanPlaceholders}) + AND ${commsPredicate} ` - const countParams: any[] = [workspaceId, ...humanNames, ...humanNames] + const countParams: any[] = [workspaceId] if (since) { countQuery += " AND created_at > ?" - countParams.push(parseInt(since)) + countParams.push(parseInt(since, 10)) } if (agent) { countQuery += " AND (from_agent = ? OR to_agent = ?)" @@ -112,15 +130,13 @@ export async function GET(request: NextRequest) { let seededCountQuery = ` SELECT COUNT(*) as seeded FROM messages WHERE workspace_id = ? - AND to_agent IS NOT NULL - AND from_agent NOT IN (${humanPlaceholders}) - AND to_agent NOT IN (${humanPlaceholders}) + AND ${commsPredicate} AND conversation_id LIKE ? ` - const seededParams: any[] = [workspaceId, ...humanNames, ...humanNames, "conv-multi-%"] + const seededParams: any[] = [workspaceId, "conv-multi-%"] if (since) { seededCountQuery += " AND created_at > ?" - seededParams.push(parseInt(since)) + seededParams.push(parseInt(since, 10)) } if (agent) { seededCountQuery += " AND (from_agent = ? OR to_agent = ?)" @@ -142,7 +158,6 @@ export async function GET(request: NextRequest) { try { parsedMetadata = JSON.parse(msg.metadata) } catch { - // Keep endpoint resilient even if one legacy row has bad metadata parsedMetadata = null } } diff --git a/src/app/api/agents/evals/route.ts b/src/app/api/agents/evals/route.ts new file mode 100644 index 0000000..9d8ed3d --- /dev/null +++ b/src/app/api/agents/evals/route.ts @@ -0,0 +1,171 @@ +import { NextRequest, NextResponse } from 'next/server' +import { getDatabase } from '@/lib/db' +import { requireRole } from '@/lib/auth' +import { readLimiter, mutationLimiter } from '@/lib/rate-limit' +import { logger } from '@/lib/logger' +import { + runOutputEvals, + evalReasoningCoherence, + evalToolReliability, + runDriftCheck, + getDriftTimeline, + type EvalResult, +} from '@/lib/agent-evals' + +export async function GET(request: NextRequest) { + const auth = requireRole(request, 'operator') + if ('error' in auth) return NextResponse.json({ error: auth.error }, { status: auth.status }) + + const rateCheck = readLimiter(request) + if (rateCheck) return rateCheck + + try { + const { searchParams } = new URL(request.url) + const agent = searchParams.get('agent') + const action = searchParams.get('action') + const workspaceId = auth.user.workspace_id ?? 1 + + if (!agent) { + return NextResponse.json({ error: 'Missing required parameter: agent' }, { status: 400 }) + } + + // History mode + if (action === 'history') { + const weeks = parseInt(searchParams.get('weeks') || '4', 10) + const db = getDatabase() + + const history = db.prepare(` + SELECT eval_layer, score, passed, detail, created_at + FROM eval_runs + WHERE agent_name = ? AND workspace_id = ? + ORDER BY created_at DESC + LIMIT ? + `).all(agent, workspaceId, weeks * 7) as any[] + + const driftTimeline = getDriftTimeline(agent, weeks, workspaceId) + + return NextResponse.json({ + agent, + history, + driftTimeline, + }) + } + + // Default: latest eval results per layer + const db = getDatabase() + const latestByLayer = db.prepare(` + SELECT e.eval_layer, e.score, e.passed, e.detail, e.created_at + FROM eval_runs e + INNER JOIN ( + SELECT eval_layer, MAX(created_at) as max_created + FROM eval_runs + WHERE agent_name = ? AND workspace_id = ? + GROUP BY eval_layer + ) latest ON e.eval_layer = latest.eval_layer AND e.created_at = latest.max_created + WHERE e.agent_name = ? AND e.workspace_id = ? + `).all(agent, workspaceId, agent, workspaceId) as any[] + + const driftResults = runDriftCheck(agent, workspaceId) + const hasDrift = driftResults.some(d => d.drifted) + + return NextResponse.json({ + agent, + layers: latestByLayer, + drift: { + hasDrift, + metrics: driftResults, + }, + }) + } catch (error) { + logger.error({ err: error }, 'GET /api/agents/evals error') + return NextResponse.json({ error: 'Internal server error' }, { status: 500 }) + } +} + +export async function POST(request: NextRequest) { + try { + const body = await request.json() + const { action } = body + + if (action === 'run') { + const auth = requireRole(request, 'operator') + if ('error' in auth) return NextResponse.json({ error: auth.error }, { status: auth.status }) + + const rateCheck = mutationLimiter(request) + if (rateCheck) return rateCheck + + const { agent, layer } = body + if (!agent) return NextResponse.json({ error: 'Missing: agent' }, { status: 400 }) + + const workspaceId = auth.user.workspace_id ?? 1 + const db = getDatabase() + const results: EvalResult[] = [] + + const layers = layer ? [layer] : ['output', 'trace', 'component', 'drift'] + + for (const l of layers) { + let evalResults: EvalResult[] = [] + switch (l) { + case 'output': + evalResults = runOutputEvals(agent, 168, workspaceId) + break + case 'trace': + evalResults = [evalReasoningCoherence(agent, 24, workspaceId)] + break + case 'component': + evalResults = [evalToolReliability(agent, 24, workspaceId)] + break + case 'drift': { + const driftResults = runDriftCheck(agent, workspaceId) + const driftScore = driftResults.filter(d => !d.drifted).length / Math.max(driftResults.length, 1) + evalResults = [{ + layer: 'drift', + score: Math.round(driftScore * 100) / 100, + passed: !driftResults.some(d => d.drifted), + detail: driftResults.map(d => `${d.metric}: ${d.drifted ? 'DRIFTED' : 'stable'} (delta=${d.delta})`).join('; '), + }] + break + } + } + + for (const r of evalResults) { + db.prepare(` + INSERT INTO eval_runs (agent_name, eval_layer, score, passed, detail, workspace_id) + VALUES (?, ?, ?, ?, ?, ?) + `).run(agent, r.layer, r.score, r.passed ? 1 : 0, r.detail, workspaceId) + results.push(r) + } + } + + return NextResponse.json({ agent, results }) + } + + if (action === 'golden-set') { + const auth = requireRole(request, 'admin') + if ('error' in auth) return NextResponse.json({ error: auth.error }, { status: auth.status }) + + const rateCheck = mutationLimiter(request) + if (rateCheck) return rateCheck + + const { name, entries } = body + if (!name) return NextResponse.json({ error: 'Missing: name' }, { status: 400 }) + + const workspaceId = auth.user.workspace_id ?? 1 + const db = getDatabase() + + db.prepare(` + INSERT INTO eval_golden_sets (name, entries, created_by, workspace_id) + VALUES (?, ?, ?, ?) + ON CONFLICT(name, workspace_id) + DO UPDATE SET entries = excluded.entries, updated_at = unixepoch() + `).run(name, JSON.stringify(entries || []), auth.user.username, workspaceId) + + return NextResponse.json({ success: true, name }) + } + + return NextResponse.json({ error: 'Unknown action' }, { status: 400 }) + } catch (error) { + logger.error({ err: error }, 'POST /api/agents/evals error') + return NextResponse.json({ error: 'Internal server error' }, { status: 500 }) + } +} diff --git a/src/app/api/agents/message/route.ts b/src/app/api/agents/message/route.ts index dcbcc4e..32cc84f 100644 --- a/src/app/api/agents/message/route.ts +++ b/src/app/api/agents/message/route.ts @@ -5,6 +5,9 @@ import { requireRole } from '@/lib/auth' import { validateBody, createMessageSchema } from '@/lib/validation' import { mutationLimiter } from '@/lib/rate-limit' import { logger } from '@/lib/logger' +import { scanForInjection } from '@/lib/injection-guard' +import { scanForSecrets } from '@/lib/secret-scanner' +import { logSecurityEvent } from '@/lib/security-events' export async function POST(request: NextRequest) { const auth = requireRole(request, 'operator') @@ -19,6 +22,24 @@ export async function POST(request: NextRequest) { const { to, message } = result.data const from = auth.user.display_name || auth.user.username || 'system' + // Scan message for injection — this gets forwarded directly to an agent + const injectionReport = scanForInjection(message, { context: 'prompt' }) + if (!injectionReport.safe) { + const criticals = injectionReport.matches.filter(m => m.severity === 'critical') + if (criticals.length > 0) { + logger.warn({ to, rules: criticals.map(m => m.rule) }, 'Blocked agent message: injection detected') + return NextResponse.json( + { error: 'Message blocked: potentially unsafe content detected', injection: criticals.map(m => ({ rule: m.rule, description: m.description })) }, + { status: 422 } + ) + } + } + + const secretHits = scanForSecrets(message) + if (secretHits.length > 0) { + try { logSecurityEvent({ event_type: 'secret_exposure', severity: 'critical', source: 'agent-message', agent_name: from, detail: JSON.stringify({ count: secretHits.length, types: secretHits.map(s => s.type) }), workspace_id: auth.user.workspace_id ?? 1, tenant_id: 1 }) } catch {} + } + const db = getDatabase() const workspaceId = auth.user.workspace_id ?? 1; const agent = db diff --git a/src/app/api/agents/optimize/route.ts b/src/app/api/agents/optimize/route.ts new file mode 100644 index 0000000..7aab7f3 --- /dev/null +++ b/src/app/api/agents/optimize/route.ts @@ -0,0 +1,102 @@ +import { NextRequest, NextResponse } from 'next/server' +import { requireRole } from '@/lib/auth' +import { readLimiter } from '@/lib/rate-limit' +import { logger } from '@/lib/logger' +import { + analyzeTokenEfficiency, + analyzeToolPatterns, + getFleetBenchmarks, + generateRecommendations, +} from '@/lib/agent-optimizer' + +export async function GET(request: NextRequest) { + const auth = requireRole(request, 'operator') + if ('error' in auth) return NextResponse.json({ error: auth.error }, { status: auth.status }) + + const rateCheck = readLimiter(request) + if (rateCheck) return rateCheck + + try { + const { searchParams } = new URL(request.url) + const agent = searchParams.get('agent') + const hours = parseInt(searchParams.get('hours') || '24', 10) + const workspaceId = auth.user.workspace_id ?? 1 + + if (!agent) { + return NextResponse.json({ error: 'Missing required parameter: agent' }, { status: 400 }) + } + + const efficiency = analyzeTokenEfficiency(agent, hours, workspaceId) + const toolPatterns = analyzeToolPatterns(agent, hours, workspaceId) + const fleet = getFleetBenchmarks(workspaceId) + const recommendations = generateRecommendations(agent, workspaceId) + + // Calculate fleet percentile for tokens per session + const fleetTokens = fleet + .map(f => f.tokensPerTask) + .filter(t => t > 0) + .sort((a, b) => a - b) + const agentTokensPerTask = efficiency.sessionsCount > 0 ? efficiency.avgTokensPerSession : 0 + const percentile = fleetTokens.length > 0 + ? Math.round((fleetTokens.filter(t => t >= agentTokensPerTask).length / fleetTokens.length) * 100) + : 50 + + // Fleet average cost + const fleetAvgCost = fleet.length > 0 + ? fleet.reduce((sum, f) => sum + f.costPerTask, 0) / fleet.length + : 0 + + // Tool analysis + const mostUsed = toolPatterns.topTools.slice(0, 5) + const leastEffective = toolPatterns.topTools + .filter(t => t.successRate < 80) + .sort((a, b) => a.successRate - b.successRate) + .slice(0, 5) + + // Performance from fleet benchmarks + const agentBenchmark = fleet.find(f => f.agentName === agent) + + return NextResponse.json({ + agent, + analyzedAt: new Date().toISOString(), + efficiency: { + tokensPerTask: agentTokensPerTask, + fleetAverage: fleetTokens.length > 0 + ? Math.round(fleetTokens.reduce((a, b) => a + b, 0) / fleetTokens.length) + : 0, + percentile, + trend: efficiency.totalTokens, + costPerTask: efficiency.avgCostPerSession, + }, + toolPatterns: { + mostUsed: mostUsed.map(t => ({ + name: t.toolName, + count: t.count, + successRate: t.successRate, + })), + leastEffective: leastEffective.map(t => ({ + name: t.toolName, + count: t.count, + successRate: t.successRate, + })), + unusedCapabilities: [], + }, + performance: { + taskCompletionRate: agentBenchmark?.tasksCompleted ?? 0, + avgTaskDuration: toolPatterns.avgDurationMs, + errorRate: toolPatterns.failureRate, + fleetRanking: fleet.findIndex(f => f.agentName === agent) + 1 || fleet.length + 1, + }, + recommendations: recommendations.map(r => ({ + category: r.category, + priority: r.severity, + title: r.category.charAt(0).toUpperCase() + r.category.slice(1) + ' issue', + description: r.message, + expectedImpact: r.metric ?? null, + })), + }) + } catch (error) { + logger.error({ err: error }, 'GET /api/agents/optimize error') + return NextResponse.json({ error: 'Internal server error' }, { status: 500 }) + } +} diff --git a/src/app/api/agents/register/route.ts b/src/app/api/agents/register/route.ts new file mode 100644 index 0000000..56a22b8 --- /dev/null +++ b/src/app/api/agents/register/route.ts @@ -0,0 +1,137 @@ +import { NextRequest, NextResponse } from 'next/server' +import { getDatabase, db_helpers } from '@/lib/db' +import { requireRole } from '@/lib/auth' +import { selfRegisterLimiter } from '@/lib/rate-limit' +import { logAuditEvent } from '@/lib/db' +import { eventBus } from '@/lib/event-bus' +import { logger } from '@/lib/logger' + +const NAME_RE = /^[a-zA-Z0-9][a-zA-Z0-9._-]{0,62}$/ +const VALID_ROLES = ['coder', 'reviewer', 'tester', 'devops', 'researcher', 'assistant', 'agent'] + +/** + * POST /api/agents/register — Agent self-registration. + * + * Allows agents to register themselves with minimal auth (viewer role). + * If an agent with the same name already exists, returns the existing agent + * (idempotent upsert on status/last_seen). + * + * Body: { name, role?, capabilities?, framework? } + * + * Rate-limited to 5 registrations/min per IP to prevent spam. + */ +export async function POST(request: NextRequest) { + const auth = requireRole(request, 'viewer') + if ('error' in auth) return NextResponse.json({ error: auth.error }, { status: auth.status }) + + const limited = selfRegisterLimiter(request) + if (limited) return limited + + let body: any + try { + body = await request.json() + } catch { + return NextResponse.json({ error: 'Request body required' }, { status: 400 }) + } + + const name = typeof body?.name === 'string' ? body.name.trim() : '' + const role = typeof body?.role === 'string' ? body.role.trim() : 'agent' + const capabilities = Array.isArray(body?.capabilities) ? body.capabilities.filter((c: any) => typeof c === 'string') : [] + const framework = typeof body?.framework === 'string' ? body.framework.trim() : null + + if (!name || !NAME_RE.test(name)) { + return NextResponse.json({ + error: 'Invalid agent name. Use 1-63 alphanumeric characters, dots, hyphens, or underscores. Must start with alphanumeric.', + }, { status: 400 }) + } + + if (!VALID_ROLES.includes(role)) { + return NextResponse.json({ + error: `Invalid role. Use: ${VALID_ROLES.join(', ')}`, + }, { status: 400 }) + } + + try { + const db = getDatabase() + const workspaceId = auth.user.workspace_id ?? 1 + const now = Math.floor(Date.now() / 1000) + + // Check if agent already exists — idempotent: update last_seen and status + const existing = db.prepare( + 'SELECT * FROM agents WHERE name = ? AND workspace_id = ?' + ).get(name, workspaceId) as any | undefined + + if (existing) { + db.prepare( + 'UPDATE agents SET status = ?, last_seen = ?, updated_at = ? WHERE id = ? AND workspace_id = ?' + ).run('idle', now, now, existing.id, workspaceId) + + return NextResponse.json({ + agent: { + id: existing.id, + name: existing.name, + role: existing.role, + status: 'idle', + created_at: existing.created_at, + }, + registered: false, + message: 'Agent already registered, status updated', + }) + } + + // Create new agent + const config: Record = {} + if (capabilities.length > 0) config.capabilities = capabilities + if (framework) config.framework = framework + + const result = db.prepare(` + INSERT INTO agents (name, role, status, config, created_at, updated_at, last_seen, workspace_id) + VALUES (?, ?, 'idle', ?, ?, ?, ?, ?) + `).run(name, role, JSON.stringify(config), now, now, now, workspaceId) + + const agentId = Number(result.lastInsertRowid) + + db_helpers.logActivity( + 'agent_created', + 'agent', + agentId, + name, + `Agent self-registered: ${name} (${role})${framework ? ` via ${framework}` : ''}`, + { name, role, framework, capabilities, self_registered: true }, + workspaceId, + ) + + logAuditEvent({ + action: 'agent_self_register', + actor: auth.user.username, + actor_id: auth.user.id, + target_type: 'agent', + target_id: agentId, + detail: { name, role, framework, self_registered: true }, + ip_address: request.headers.get('x-forwarded-for') || request.headers.get('x-real-ip') || 'unknown', + }) + + eventBus.broadcast('agent.created', { id: agentId, name, role, status: 'idle' }) + + return NextResponse.json({ + agent: { + id: agentId, + name, + role, + status: 'idle', + created_at: now, + }, + registered: true, + message: 'Agent registered successfully', + }, { status: 201 }) + } catch (error: any) { + if (error.message?.includes('UNIQUE constraint')) { + // Race condition — another request registered the same name + return NextResponse.json({ error: 'Agent name already exists' }, { status: 409 }) + } + logger.error({ err: error }, 'POST /api/agents/register error') + return NextResponse.json({ error: 'Registration failed' }, { status: 500 }) + } +} + +export const dynamic = 'force-dynamic' diff --git a/src/app/api/agents/route.ts b/src/app/api/agents/route.ts index 1f52638..dc061af 100644 --- a/src/app/api/agents/route.ts +++ b/src/app/api/agents/route.ts @@ -64,7 +64,8 @@ export async function GET(request: NextRequest) { COUNT(*) as total, SUM(CASE WHEN status = 'assigned' THEN 1 ELSE 0 END) as assigned, SUM(CASE WHEN status = 'in_progress' THEN 1 ELSE 0 END) as in_progress, - SUM(CASE WHEN status = 'done' THEN 1 ELSE 0 END) as completed + SUM(CASE WHEN status = 'quality_review' THEN 1 ELSE 0 END) as quality_review, + SUM(CASE WHEN status = 'done' THEN 1 ELSE 0 END) as done FROM tasks WHERE assigned_to = ? AND workspace_id = ? `); @@ -78,7 +79,9 @@ export async function GET(request: NextRequest) { total: taskStats.total || 0, assigned: taskStats.assigned || 0, in_progress: taskStats.in_progress || 0, - completed: taskStats.completed || 0 + quality_review: taskStats.quality_review || 0, + done: taskStats.done || 0, + completed: taskStats.done || 0 } }; }); @@ -185,7 +188,7 @@ export async function POST(request: NextRequest) { try { await runOpenClaw( - ['agents', 'add', openclawId, '--name', name, '--workspace', workspacePath, '--non-interactive'], + ['agents', 'add', openclawId, '--workspace', workspacePath, '--non-interactive'], { timeoutMs: 20000 } ); } catch (provisionError: any) { @@ -244,7 +247,7 @@ export async function POST(request: NextRequest) { const parsedAgent = { ...createdAgent, config: JSON.parse(createdAgent.config || '{}'), - taskStats: { total: 0, assigned: 0, in_progress: 0, completed: 0 } + taskStats: { total: 0, assigned: 0, in_progress: 0, quality_review: 0, done: 0, completed: 0 } }; // Broadcast to SSE clients diff --git a/src/app/api/agents/sync/route.ts b/src/app/api/agents/sync/route.ts index 96e2588..be0ebd2 100644 --- a/src/app/api/agents/sync/route.ts +++ b/src/app/api/agents/sync/route.ts @@ -1,17 +1,27 @@ import { NextRequest, NextResponse } from 'next/server' import { requireRole } from '@/lib/auth' import { syncAgentsFromConfig, previewSyncDiff } from '@/lib/agent-sync' +import { syncLocalAgents } from '@/lib/local-agent-sync' import { logger } from '@/lib/logger' /** - * POST /api/agents/sync - Trigger agent config sync from openclaw.json + * POST /api/agents/sync - Trigger agent config sync + * ?source=local triggers local disk scan instead of openclaw.json sync. * Requires admin role. */ export async function POST(request: NextRequest) { const auth = requireRole(request, 'admin') if ('error' in auth) return NextResponse.json({ error: auth.error }, { status: auth.status }) + const { searchParams } = new URL(request.url) + const source = searchParams.get('source') + try { + if (source === 'local') { + const result = await syncLocalAgents() + return NextResponse.json(result) + } + const result = await syncAgentsFromConfig(auth.user.username) if (result.error) { diff --git a/src/app/api/auth/google/disconnect/route.ts b/src/app/api/auth/google/disconnect/route.ts new file mode 100644 index 0000000..6c18786 --- /dev/null +++ b/src/app/api/auth/google/disconnect/route.ts @@ -0,0 +1,43 @@ +import { NextResponse } from 'next/server' +import { getUserFromRequest } from '@/lib/auth' +import { getDatabase, logAuditEvent } from '@/lib/db' + +export async function POST(request: Request) { + const user = getUserFromRequest(request) + if (!user || user.id === 0) { + return NextResponse.json({ error: 'Authentication required' }, { status: 401 }) + } + + if (user.provider !== 'google') { + return NextResponse.json({ error: 'Account is not connected to Google' }, { status: 400 }) + } + + const db = getDatabase() + + // Check that the user has a password set so they can still log in after disconnect + const row = db.prepare('SELECT password_hash FROM users WHERE id = ?').get(user.id) as { password_hash?: string } | undefined + if (!row?.password_hash) { + return NextResponse.json( + { error: 'Cannot disconnect Google — no password set. Set a password first to avoid being locked out.' }, + { status: 400 } + ) + } + + db.prepare(` + UPDATE users + SET provider = 'local', provider_user_id = NULL, updated_at = (unixepoch()) + WHERE id = ? + `).run(user.id) + + const ipAddress = request.headers.get('x-forwarded-for') || request.headers.get('x-real-ip') || 'unknown' + const userAgent = request.headers.get('user-agent') || undefined + logAuditEvent({ + action: 'google_disconnect', + actor: user.username, + actor_id: user.id, + ip_address: ipAddress, + user_agent: userAgent, + }) + + return NextResponse.json({ ok: true }) +} diff --git a/src/app/api/auth/google/route.ts b/src/app/api/auth/google/route.ts index 6b6fd44..e9596f0 100644 --- a/src/app/api/auth/google/route.ts +++ b/src/app/api/auth/google/route.ts @@ -1,9 +1,10 @@ import { randomBytes } from 'crypto' -import { NextResponse } from 'next/server' +import { NextRequest, NextResponse } from 'next/server' import { createSession } from '@/lib/auth' import { getDatabase, logAuditEvent } from '@/lib/db' import { verifyGoogleIdToken } from '@/lib/google-auth' import { getMcSessionCookieOptions } from '@/lib/session-cookie' +import { loginLimiter } from '@/lib/rate-limit' function upsertAccessRequest(input: { email: string @@ -25,7 +26,10 @@ function upsertAccessRequest(input: { `).run(input.email.toLowerCase(), input.providerUserId, input.displayName, input.avatarUrl || null) } -export async function POST(request: Request) { +export async function POST(request: NextRequest) { + const rateCheck = loginLimiter(request) + if (rateCheck) return rateCheck + try { const body = await request.json().catch(() => ({})) const credential = String(body?.credential || '') @@ -38,8 +42,10 @@ export async function POST(request: Request) { const avatar = profile.picture ? String(profile.picture) : null const row = db.prepare(` - SELECT id, username, display_name, role, provider, email, avatar_url, is_approved, created_at, updated_at, last_login_at, workspace_id - FROM users + SELECT u.id, u.username, u.display_name, u.role, u.provider, u.email, u.avatar_url, u.is_approved, + u.created_at, u.updated_at, u.last_login_at, u.workspace_id, COALESCE(w.tenant_id, 1) as tenant_id + FROM users u + LEFT JOIN workspaces w ON w.id = u.workspace_id WHERE (provider = 'google' AND provider_user_id = ?) OR lower(email) = ? ORDER BY id ASC LIMIT 1 @@ -90,6 +96,7 @@ export async function POST(request: Request) { email, avatar_url: avatar, workspace_id: row.workspace_id ?? 1, + tenant_id: row.tenant_id ?? 1, }, }) diff --git a/src/app/api/auth/login/route.ts b/src/app/api/auth/login/route.ts index a3979d4..0269861 100644 --- a/src/app/api/auth/login/route.ts +++ b/src/app/api/auth/login/route.ts @@ -39,6 +39,7 @@ export async function POST(request: Request) { email: user.email || null, avatar_url: user.avatar_url || null, workspace_id: user.workspace_id ?? 1, + tenant_id: user.tenant_id ?? 1, }, }) diff --git a/src/app/api/auth/me/route.ts b/src/app/api/auth/me/route.ts index d650fdd..27c6698 100644 --- a/src/app/api/auth/me/route.ts +++ b/src/app/api/auth/me/route.ts @@ -1,7 +1,8 @@ import { NextRequest, NextResponse } from 'next/server' -import { getUserFromRequest, updateUser, requireRole } from '@/lib/auth' +import { getUserFromRequest, updateUser, requireRole, destroyAllUserSessions, createSession } from '@/lib/auth' import { logAuditEvent } from '@/lib/db' import { verifyPassword } from '@/lib/password' +import { getMcSessionCookieOptions } from '@/lib/session-cookie' import { logger } from '@/lib/logger' export async function GET(request: Request) { @@ -24,6 +25,7 @@ export async function GET(request: Request) { email: user.email || null, avatar_url: user.avatar_url || null, workspace_id: user.workspace_id ?? 1, + tenant_id: user.tenant_id ?? 1, }, }) } @@ -87,14 +89,17 @@ export async function PATCH(request: NextRequest) { } const ipAddress = request.headers.get('x-forwarded-for') || request.headers.get('x-real-ip') || 'unknown' + const userAgent = request.headers.get('user-agent') || undefined if (updates.password) { logAuditEvent({ action: 'password_change', actor: user.username, actor_id: user.id, ip_address: ipAddress }) + // Revoke all existing sessions and issue a fresh one for this request + destroyAllUserSessions(user.id) } if (updates.display_name) { logAuditEvent({ action: 'profile_update', actor: user.username, actor_id: user.id, detail: { display_name: updates.display_name }, ip_address: ipAddress }) } - return NextResponse.json({ + const response = NextResponse.json({ success: true, user: { id: updated.id, @@ -105,8 +110,21 @@ export async function PATCH(request: NextRequest) { email: updated.email || null, avatar_url: updated.avatar_url || null, workspace_id: updated.workspace_id ?? 1, + tenant_id: updated.tenant_id ?? 1, }, }) + + // Issue a fresh session cookie after password change (old ones were just revoked) + if (updates.password) { + const { token, expiresAt } = createSession(user.id, ipAddress, userAgent, user.workspace_id ?? 1) + const isSecureRequest = request.headers.get('x-forwarded-proto') === 'https' + || new URL(request.url).protocol === 'https:' + response.cookies.set('mc-session', token, { + ...getMcSessionCookieOptions({ maxAgeSeconds: expiresAt - Math.floor(Date.now() / 1000), isSecureRequest }), + }) + } + + return response } catch (error) { logger.error({ err: error }, 'PATCH /api/auth/me error') return NextResponse.json({ error: 'Failed to update profile' }, { status: 500 }) diff --git a/src/app/api/auth/users/route.ts b/src/app/api/auth/users/route.ts index c2e7467..05762d8 100644 --- a/src/app/api/auth/users/route.ts +++ b/src/app/api/auth/users/route.ts @@ -64,6 +64,7 @@ export async function POST(request: NextRequest) { avatar_url: newUser.avatar_url || null, is_approved: newUser.is_approved ?? 1, workspace_id: newUser.workspace_id ?? 1, + tenant_id: newUser.tenant_id ?? 1, } }, { status: 201 }) } catch (error: any) { @@ -130,6 +131,7 @@ export async function PUT(request: NextRequest) { avatar_url: updated.avatar_url || null, is_approved: updated.is_approved ?? 1, workspace_id: updated.workspace_id ?? 1, + tenant_id: updated.tenant_id ?? 1, } }) } catch (error) { diff --git a/src/app/api/backup/route.ts b/src/app/api/backup/route.ts index bbec610..f409e74 100644 --- a/src/app/api/backup/route.ts +++ b/src/app/api/backup/route.ts @@ -6,6 +6,7 @@ import { join, dirname } from 'path' import { readdirSync, statSync, unlinkSync } from 'fs' import { heavyLimiter } from '@/lib/rate-limit' import { logger } from '@/lib/logger' +import { runOpenClaw } from '@/lib/command' const BACKUP_DIR = join(dirname(config.dbPath), 'backups') const MAX_BACKUPS = 10 @@ -48,6 +49,49 @@ export async function POST(request: NextRequest) { const rateCheck = heavyLimiter(request) if (rateCheck) return rateCheck + const target = request.nextUrl.searchParams.get('target') + + // Gateway state backup via `openclaw backup create` + if (target === 'gateway') { + ensureDirExists(BACKUP_DIR) + const ipAddress = request.headers.get('x-forwarded-for') || request.headers.get('x-real-ip') || 'unknown' + try { + let stdout: string + let stderr: string + try { + const result = await runOpenClaw(['backup', 'create', '--output', BACKUP_DIR], { timeoutMs: 60000 }) + stdout = result.stdout + stderr = result.stderr + } catch (error: any) { + // openclaw backup may exit non-zero despite success — check output + stdout = error.stdout || '' + stderr = error.stderr || '' + const combined = `${stdout}\n${stderr}` + if (!combined.includes('Created')) { + const message = stderr || error.message || 'Unknown error' + logger.error({ err: error }, 'Gateway backup failed') + return NextResponse.json({ error: `Gateway backup failed: ${message}` }, { status: 500 }) + } + } + + const output = (stdout || stderr).trim() + + logAuditEvent({ + action: 'openclaw.backup', + actor: auth.user.username, + actor_id: auth.user.id, + detail: { output }, + ip_address: ipAddress, + }) + + return NextResponse.json({ success: true, output }) + } catch (error: any) { + logger.error({ err: error }, 'Gateway backup failed') + return NextResponse.json({ error: `Gateway backup failed: ${error.message}` }, { status: 500 }) + } + } + + // Default: MC SQLite backup ensureDirExists(BACKUP_DIR) const timestamp = new Date().toISOString().replace(/[:.]/g, '-').replace('T', '_').slice(0, 19) diff --git a/src/app/api/channels/route.ts b/src/app/api/channels/route.ts new file mode 100644 index 0000000..270e5f1 --- /dev/null +++ b/src/app/api/channels/route.ts @@ -0,0 +1,436 @@ +import { NextRequest, NextResponse } from 'next/server' +import { requireRole } from '@/lib/auth' +import { config } from '@/lib/config' +import { logger } from '@/lib/logger' +import { getDetectedGatewayToken } from '@/lib/gateway-runtime' +import { callOpenClawGateway } from '@/lib/openclaw-gateway' + +const gatewayInternalUrl = `http://${config.gatewayHost}:${config.gatewayPort}` + +function gatewayHeaders(): Record { + const token = getDetectedGatewayToken() + const headers: Record = { 'Content-Type': 'application/json' } + if (token) headers['Authorization'] = `Bearer ${token}` + return headers +} + +type GatewayData = unknown + +function asRecord(value: unknown): Record | null { + return value && typeof value === 'object' ? (value as Record) : null +} + +function readBoolean(value: unknown): boolean | undefined { + return typeof value === 'boolean' ? value : undefined +} + +function readString(value: unknown): string | undefined { + return typeof value === 'string' ? value : undefined +} + +function readNumber(value: unknown): number | undefined { + return typeof value === 'number' ? value : undefined +} + +interface ChannelStatus { + configured: boolean + linked?: boolean + running: boolean + connected?: boolean + lastConnectedAt?: number | null + lastMessageAt?: number | null + lastStartAt?: number | null + lastError?: string | null + authAgeMs?: number | null + mode?: string | null + baseUrl?: string | null + publicKey?: string | null + probe?: GatewayData + profile?: GatewayData +} + +interface ChannelAccount { + accountId: string + name?: string | null + configured?: boolean | null + linked?: boolean | null + running?: boolean | null + connected?: boolean | null + lastConnectedAt?: number | null + lastInboundAt?: number | null + lastOutboundAt?: number | null + lastError?: string | null + lastStartAt?: number | null + mode?: string | null + probe?: GatewayData + publicKey?: string | null + profile?: GatewayData +} + +interface ChannelsSnapshot { + channels: Record + channelAccounts: Record + channelOrder: string[] + channelLabels: Record + connected: boolean + updatedAt?: number +} + +function transformGatewayChannels(data: GatewayData): ChannelsSnapshot { + const parsed = asRecord(data) + const rawChannels = asRecord(parsed?.channels) ?? {} + const rawAccounts = asRecord(parsed?.channelAccounts) ?? {} + const channelLabels = asRecord(parsed?.channelLabels) + const order = Array.isArray(parsed?.channelOrder) + ? parsed.channelOrder.filter((value): value is string => typeof value === 'string') + : Object.keys(rawChannels) + + const channels: Record = {} + const channelAccounts: Record = {} + const labels: Record = Object.fromEntries( + Object.entries(channelLabels ?? {}).flatMap(([key, value]) => typeof value === 'string' ? [[key, value]] : []) + ) + + for (const key of order) { + const ch = asRecord(rawChannels[key]) + if (!ch) continue + + channels[key] = { + configured: !!readBoolean(ch.configured), + linked: readBoolean(ch.linked), + running: !!readBoolean(ch.running), + connected: readBoolean(ch.connected), + lastConnectedAt: readNumber(ch.lastConnectedAt) ?? null, + lastMessageAt: readNumber(ch.lastMessageAt) ?? null, + lastStartAt: readNumber(ch.lastStartAt) ?? null, + lastError: readString(ch.lastError) ?? null, + authAgeMs: readNumber(ch.authAgeMs) ?? null, + mode: readString(ch.mode) ?? null, + baseUrl: readString(ch.baseUrl) ?? null, + publicKey: readString(ch.publicKey) ?? null, + probe: ch.probe ?? null, + profile: ch.profile ?? null, + } + + const accounts = rawAccounts[key] || [] + const accountEntries = (Array.isArray(accounts) ? accounts : Object.values(accounts)) as GatewayData[] + channelAccounts[key] = accountEntries.map((acct) => { + const parsedAccount = asRecord(acct) ?? {} + return { + accountId: readString(parsedAccount.accountId) ?? 'default', + name: readString(parsedAccount.name) ?? null, + configured: readBoolean(parsedAccount.configured) ?? null, + linked: readBoolean(parsedAccount.linked) ?? null, + running: readBoolean(parsedAccount.running) ?? null, + connected: readBoolean(parsedAccount.connected) ?? null, + lastConnectedAt: readNumber(parsedAccount.lastConnectedAt) ?? null, + lastInboundAt: readNumber(parsedAccount.lastInboundAt) ?? null, + lastOutboundAt: readNumber(parsedAccount.lastOutboundAt) ?? null, + lastError: readString(parsedAccount.lastError) ?? null, + lastStartAt: readNumber(parsedAccount.lastStartAt) ?? null, + mode: readString(parsedAccount.mode) ?? null, + probe: parsedAccount.probe ?? null, + publicKey: readString(parsedAccount.publicKey) ?? null, + profile: parsedAccount.profile ?? null, + } + }) + } + + return { + channels, + channelAccounts, + channelOrder: order, + channelLabels: labels, + connected: true, + updatedAt: readNumber(parsed?.ts), + } +} + +async function loadChannelsViaRpc(probe = false): Promise { + const payload = await callOpenClawGateway( + 'channels.status', + { probe, timeoutMs: 8000 }, + probe ? 20000 : 15000, + ) + return { + ...transformGatewayChannels(payload), + connected: true, + } +} + +async function loadChannelsViaCli(probe = false): Promise { + const payload = await callOpenClawGateway( + 'channels.status', + { probe, timeoutMs: 8000 }, + probe ? 20000 : 15000, + ).catch(() => null) + + if (payload) { + return { + ...transformGatewayChannels(payload), + connected: true, + } + } + + const { runOpenClaw } = await import('@/lib/command') + const args = ['channels', 'status', '--json', '--timeout', '5000'] + if (probe) args.push('--probe') + const { stdout } = await runOpenClaw(args, { timeoutMs: probe ? 20000 : 15000 }) + return { + ...transformGatewayChannels(JSON.parse(stdout)), + connected: true, + } +} + +async function isGatewayReachable(): Promise { + try { + const controller = new AbortController() + const timeout = setTimeout(() => controller.abort(), 2000) + const res = await fetch(`${gatewayInternalUrl}/api/health`, { + headers: gatewayHeaders(), + signal: controller.signal, + }) + clearTimeout(timeout) + return res.ok + } catch { + return false + } +} + +/** + * GET /api/channels - Fetch channel status from the gateway + * Supports ?action=probe&channel= to probe a specific channel + */ +export async function GET(request: NextRequest) { + const auth = requireRole(request, 'viewer') + if ('error' in auth) return NextResponse.json({ error: auth.error }, { status: auth.status }) + + const { searchParams } = new URL(request.url) + const action = searchParams.get('action') + + // Probe a specific channel + if (action === 'probe') { + const channel = searchParams.get('channel') + if (!channel) { + return NextResponse.json({ error: 'channel parameter required' }, { status: 400 }) + } + + try { + const controller = new AbortController() + const timeout = setTimeout(() => controller.abort(), 5000) + + const res = await fetch(`${gatewayInternalUrl}/api/channels/probe`, { + method: 'POST', + headers: gatewayHeaders(), + body: JSON.stringify({ channel }), + signal: controller.signal, + }) + clearTimeout(timeout) + + if (!res.ok) { + if (res.status === 404) { + return NextResponse.json(await loadChannelsViaRpc(true).catch(() => loadChannelsViaCli(true))) + } + throw new Error(`Gateway channel probe failed with status ${res.status}`) + } + + const data = await res.json() + return NextResponse.json(data) + } catch (err) { + try { + return NextResponse.json(await loadChannelsViaRpc(true).catch(() => loadChannelsViaCli(true))) + } catch (cliErr) { + logger.warn({ err, cliErr, channel }, 'Channel probe failed') + return NextResponse.json( + { ok: false, error: 'Gateway unreachable' }, + { status: 502 }, + ) + } + } + } + + // Default: fetch all channel statuses + try { + const controller = new AbortController() + const timeout = setTimeout(() => controller.abort(), 5000) + + const res = await fetch(`${gatewayInternalUrl}/api/channels/status`, { + headers: gatewayHeaders(), + signal: controller.signal, + }) + clearTimeout(timeout) + + if (!res.ok) { + if (res.status === 404) { + return NextResponse.json(await loadChannelsViaRpc(false).catch(() => loadChannelsViaCli(false))) + } + throw new Error(`Gateway channel status failed with status ${res.status}`) + } + + const data = await res.json() + return NextResponse.json(transformGatewayChannels(data)) + } catch (err) { + try { + return NextResponse.json(await loadChannelsViaRpc(false).catch(() => loadChannelsViaCli(false))) + } catch (cliErr) { + logger.warn({ err, cliErr }, 'Gateway unreachable for channel status') + const reachable = await isGatewayReachable() + return NextResponse.json({ + channels: {}, + channelAccounts: {}, + channelOrder: [], + channelLabels: {}, + connected: reachable, + } satisfies ChannelsSnapshot) + } + } +} + +/** + * POST /api/channels - Platform-specific actions + * Body: { action: string, ...params } + */ +export async function POST(request: NextRequest) { + const auth = requireRole(request, 'operator') + if ('error' in auth) return NextResponse.json({ error: auth.error }, { status: auth.status }) + + const body = await request.json().catch(() => null) + if (!body || !body.action) { + return NextResponse.json({ error: 'action required' }, { status: 400 }) + } + + const { action } = body + + try { + switch (action) { + case 'whatsapp-link': { + const force = body.force === true + try { + const controller = new AbortController() + const timeout = setTimeout(() => controller.abort(), 30000) + const res = await fetch(`${gatewayInternalUrl}/api/channels/whatsapp/link`, { + method: 'POST', + headers: gatewayHeaders(), + body: JSON.stringify({ force }), + signal: controller.signal, + }) + clearTimeout(timeout) + if (res.ok) { + const data = await res.json() + return NextResponse.json(data) + } + if (res.status !== 404) { + const data = await res.json().catch(() => ({})) + return NextResponse.json(data, { status: res.status }) + } + } catch { + // Fallback to RPC below. + } + return NextResponse.json( + await callOpenClawGateway('web.login.start', { force, timeoutMs: 30000 }, 32000) + ) + } + + case 'whatsapp-wait': { + try { + const controller = new AbortController() + const timeout = setTimeout(() => controller.abort(), 120000) + const res = await fetch(`${gatewayInternalUrl}/api/channels/whatsapp/wait`, { + method: 'POST', + headers: gatewayHeaders(), + signal: controller.signal, + }) + clearTimeout(timeout) + if (res.ok) { + const data = await res.json() + return NextResponse.json(data) + } + if (res.status !== 404) { + const data = await res.json().catch(() => ({})) + return NextResponse.json(data, { status: res.status }) + } + } catch { + // Fallback to RPC below. + } + return NextResponse.json( + await callOpenClawGateway('web.login.wait', { timeoutMs: 120000 }, 122000) + ) + } + + case 'whatsapp-logout': { + try { + const controller = new AbortController() + const timeout = setTimeout(() => controller.abort(), 10000) + const res = await fetch(`${gatewayInternalUrl}/api/channels/whatsapp/logout`, { + method: 'POST', + headers: gatewayHeaders(), + signal: controller.signal, + }) + clearTimeout(timeout) + if (res.ok) { + const data = await res.json() + return NextResponse.json(data) + } + if (res.status !== 404) { + const data = await res.json().catch(() => ({})) + return NextResponse.json(data, { status: res.status }) + } + } catch { + // Fallback to RPC below. + } + return NextResponse.json( + await callOpenClawGateway('channels.logout', { channel: 'whatsapp' }, 12000) + ) + } + + case 'nostr-profile-save': { + const accountId = body.accountId || 'default' + const profile = body.profile + if (!profile) { + return NextResponse.json({ error: 'profile required' }, { status: 400 }) + } + const controller = new AbortController() + const timeout = setTimeout(() => controller.abort(), 10000) + const res = await fetch( + `${gatewayInternalUrl}/api/channels/nostr/${encodeURIComponent(accountId)}/profile`, + { + method: 'PUT', + headers: gatewayHeaders(), + body: JSON.stringify(profile), + signal: controller.signal, + }, + ) + clearTimeout(timeout) + const data = await res.json() + return NextResponse.json(data, { status: res.ok ? 200 : res.status }) + } + + case 'nostr-profile-import': { + const accountId = body.accountId || 'default' + const controller = new AbortController() + const timeout = setTimeout(() => controller.abort(), 15000) + const res = await fetch( + `${gatewayInternalUrl}/api/channels/nostr/${encodeURIComponent(accountId)}/profile/import`, + { + method: 'POST', + headers: gatewayHeaders(), + body: JSON.stringify({ autoMerge: true }), + signal: controller.signal, + }, + ) + clearTimeout(timeout) + const data = await res.json() + return NextResponse.json(data, { status: res.ok ? 200 : res.status }) + } + + default: + return NextResponse.json({ error: `Unknown action: ${action}` }, { status: 400 }) + } + } catch (err) { + logger.warn({ err, action }, 'Channel action failed') + return NextResponse.json( + { ok: false, error: 'Gateway unreachable' }, + { status: 502 }, + ) + } +} diff --git a/src/app/api/chat/messages/route.ts b/src/app/api/chat/messages/route.ts index 0b28760..89a6f5f 100644 --- a/src/app/api/chat/messages/route.ts +++ b/src/app/api/chat/messages/route.ts @@ -5,6 +5,8 @@ import { getAllGatewaySessions } from '@/lib/sessions' import { eventBus } from '@/lib/event-bus' import { requireRole } from '@/lib/auth' import { logger } from '@/lib/logger' +import { scanForInjection, sanitizeForPrompt } from '@/lib/injection-guard' +import { callOpenClawGateway } from '@/lib/openclaw-gateway' type ForwardInfo = { attempted: boolean @@ -14,6 +16,19 @@ type ForwardInfo = { runId?: string } +type ToolEvent = { + name: string + input?: string + output?: string + status?: string +} + +type ChatAttachmentInput = { + name?: string + type?: string + dataUrl?: string +} + const COORDINATOR_AGENT = String(process.env.MC_COORDINATOR_AGENT || process.env.NEXT_PUBLIC_COORDINATOR_AGENT || 'coordinator').trim() || 'coordinator' @@ -31,6 +46,35 @@ function parseGatewayJson(raw: string): any | null { } } +function toGatewayAttachments(value: unknown): Array<{ type: 'image'; mimeType: string; fileName?: string; content: string }> | undefined { + if (!Array.isArray(value)) return undefined + + const attachments = value.flatMap((entry) => { + const file = entry as ChatAttachmentInput + if (!file || typeof file !== 'object' || typeof file.dataUrl !== 'string') return [] + const match = /^data:([^;]+);base64,(.+)$/.exec(file.dataUrl) + if (!match) return [] + if (!match[1].startsWith('image/')) return [] + return [{ + type: 'image' as const, + mimeType: match[1], + fileName: typeof file.name === 'string' ? file.name : undefined, + content: match[2], + }] + }) + + return attachments.length > 0 ? attachments : undefined +} + +function safeParseMetadata(raw: string | null | undefined): any | null { + if (!raw) return null + try { + return JSON.parse(raw) + } catch { + return null + } +} + function createChatReply( db: ReturnType, workspaceId: number, @@ -38,7 +82,7 @@ function createChatReply( fromAgent: string, toAgent: string, content: string, - messageType: 'text' | 'status' = 'status', + messageType: 'text' | 'status' | 'tool_call' = 'status', metadata: Record | null = null ) { const replyInsert = db @@ -62,7 +106,7 @@ function createChatReply( eventBus.broadcast('chat.message', { ...row, - metadata: row.metadata ? JSON.parse(row.metadata) : null, + metadata: safeParseMetadata(row.metadata), }) } @@ -91,9 +135,108 @@ function extractReplyText(waitPayload: any): string | null { } } + if (Array.isArray(waitPayload.output)) { + const parts: string[] = [] + for (const item of waitPayload.output) { + if (!item || typeof item !== 'object') continue + if (typeof item.text === 'string' && item.text.trim()) parts.push(item.text.trim()) + if (item.type === 'message' && Array.isArray(item.content)) { + for (const block of item.content) { + if (!block || typeof block !== 'object') continue + const blockType = String(block.type || '') + if ((blockType === 'text' || blockType === 'output_text' || blockType === 'input_text') && typeof block.text === 'string' && block.text.trim()) { + parts.push(block.text.trim()) + } + } + } + } + if (parts.length > 0) return parts.join('\n').slice(0, 8000) + } + return null } +function normalizeToolEvent(raw: any): ToolEvent | null { + if (!raw || typeof raw !== 'object') return null + const name = String(raw.name || raw.tool || raw.toolName || raw.function || raw.call || '').trim() + if (!name) return null + + const inputRaw = raw.input ?? raw.args ?? raw.arguments ?? raw.params + const outputRaw = raw.output ?? raw.result ?? raw.response + const statusRaw = + raw.status ?? + (raw.isError === true ? 'error' : undefined) ?? + (raw.ok === false ? 'error' : undefined) ?? + (raw.success === true ? 'ok' : undefined) + + const input = + typeof inputRaw === 'string' + ? inputRaw.slice(0, 2000) + : inputRaw !== undefined + ? JSON.stringify(inputRaw).slice(0, 2000) + : undefined + const output = + typeof outputRaw === 'string' + ? outputRaw.slice(0, 4000) + : outputRaw !== undefined + ? JSON.stringify(outputRaw).slice(0, 4000) + : undefined + const status = statusRaw !== undefined ? String(statusRaw).slice(0, 60) : undefined + return { name, input, output, status } +} + +function extractToolEvents(waitPayload: any): ToolEvent[] { + if (!waitPayload || typeof waitPayload !== 'object') return [] + + const candidates = [ + waitPayload.toolCalls, + waitPayload.tools, + waitPayload.calls, + waitPayload.events, + waitPayload.output?.toolCalls, + waitPayload.output?.tools, + waitPayload.output?.events, + ] + + const events: ToolEvent[] = [] + for (const list of candidates) { + if (!Array.isArray(list)) continue + for (const item of list) { + const evt = normalizeToolEvent(item) + if (evt) events.push(evt) + if (events.length >= 20) return events + } + } + + // OpenAI Responses-style output array + if (Array.isArray(waitPayload.output)) { + for (const item of waitPayload.output) { + if (!item || typeof item !== 'object') continue + const itemType = String(item.type || '').toLowerCase() + if (itemType === 'function_call' || itemType === 'tool_call') { + const evt = normalizeToolEvent({ + name: item.name || item.tool_name || item.toolName, + arguments: item.arguments || item.input, + output: item.output || item.result, + status: item.status, + }) + if (evt) events.push(evt) + } else if (itemType === 'message' && Array.isArray(item.content)) { + for (const block of item.content) { + const blockType = String(block?.type || '').toLowerCase() + if (blockType === 'tool_use' || blockType === 'tool_call' || blockType === 'function_call') { + const evt = normalizeToolEvent(block) + if (evt) events.push(evt) + } + } + } + if (events.length >= 20) return events + } + } + + return events +} + /** * GET /api/chat/messages - List messages with filters * Query params: conversation_id, from_agent, to_agent, limit, offset, since @@ -144,7 +287,7 @@ export async function GET(request: NextRequest) { const parsed = messages.map((msg) => ({ ...msg, - metadata: msg.metadata ? JSON.parse(msg.metadata) : null + metadata: safeParseMetadata(msg.metadata), })) // Get total count for pagination @@ -189,7 +332,11 @@ export async function POST(request: NextRequest) { const workspaceId = auth.user.workspace_id ?? 1 const body = await request.json() - const from = auth.user.display_name || auth.user.username || 'system' + const requestedFrom = typeof body.from === 'string' ? body.from.trim() : '' + const isCoordinatorOverride = requestedFrom.toLowerCase() === COORDINATOR_AGENT.toLowerCase() + const from = isCoordinatorOverride + ? COORDINATOR_AGENT + : (auth.user.display_name || auth.user.username || 'system') const to = body.to ? (body.to as string).trim() : null const content = (body.content || '').trim() const message_type = body.message_type || 'text' @@ -203,6 +350,21 @@ export async function POST(request: NextRequest) { ) } + // Scan content for injection when it will be forwarded to an agent + if (body.forward && to) { + const injectionReport = scanForInjection(content, { context: 'prompt' }) + if (!injectionReport.safe) { + const criticals = injectionReport.matches.filter(m => m.severity === 'critical') + if (criticals.length > 0) { + logger.warn({ to, rules: criticals.map(m => m.rule) }, 'Blocked chat message: injection detected') + return NextResponse.json( + { error: 'Message blocked: potentially unsafe content detected', injection: criticals.map(m => ({ rule: m.rule, description: m.description })) }, + { status: 422 } + ) + } + } + } + const stmt = db.prepare(` INSERT INTO messages (conversation_id, from_agent, to_agent, content, message_type, metadata, workspace_id) VALUES (?, ?, ?, ?, ?, ?, ?) @@ -253,7 +415,10 @@ export async function POST(request: NextRequest) { .prepare('SELECT * FROM agents WHERE lower(name) = lower(?) AND workspace_id = ?') .get(to, workspaceId) as any - let sessionKey: string | null = agent?.session_key || null + // Use explicit session key from caller if provided, then DB, then on-disk lookup + let sessionKey: string | null = typeof body.sessionKey === 'string' && body.sessionKey + ? body.sessionKey + : agent?.session_key || null // Fallback: derive session from on-disk gateway session stores if (!sessionKey) { @@ -302,32 +467,53 @@ export async function POST(request: NextRequest) { } } else { try { - const invokeParams: any = { - message: `Message from ${from}: ${content}`, - idempotencyKey: `mc-${messageId}-${Date.now()}`, - deliver: false, - } - if (sessionKey) invokeParams.sessionKey = sessionKey - else invokeParams.agentId = openclawAgentId + const idempotencyKey = `mc-${messageId}-${Date.now()}` - const invokeResult = await runOpenClaw( - [ - 'gateway', - 'call', - 'agent', - '--timeout', - '10000', - '--params', - JSON.stringify(invokeParams), - '--json', - ], - { timeoutMs: 12000 } - ) - const acceptedPayload = parseGatewayJson(invokeResult.stdout) - forwardInfo.delivered = true - forwardInfo.session = sessionKey || openclawAgentId || undefined - if (typeof acceptedPayload?.runId === 'string' && acceptedPayload.runId) { - forwardInfo.runId = acceptedPayload.runId + if (sessionKey) { + const acceptedPayload = await callOpenClawGateway( + 'chat.send', + { + sessionKey, + message: content, + idempotencyKey, + deliver: false, + attachments: toGatewayAttachments(body.attachments), + }, + 12000, + ) + const status = String(acceptedPayload?.status || '').toLowerCase() + forwardInfo.delivered = status === 'started' || status === 'ok' || status === 'in_flight' + forwardInfo.session = sessionKey + if (typeof acceptedPayload?.runId === 'string' && acceptedPayload.runId) { + forwardInfo.runId = acceptedPayload.runId + } + } else { + const invokeParams: any = { + message: `Message from ${from}: ${content}`, + idempotencyKey, + deliver: false, + } + invokeParams.agentId = openclawAgentId + + const invokeResult = await runOpenClaw( + [ + 'gateway', + 'call', + 'agent', + '--timeout', + '10000', + '--params', + JSON.stringify(invokeParams), + '--json', + ], + { timeoutMs: 12000 } + ) + const acceptedPayload = parseGatewayJson(invokeResult.stdout) + forwardInfo.delivered = true + forwardInfo.session = openclawAgentId || undefined + if (typeof acceptedPayload?.runId === 'string' && acceptedPayload.runId) { + forwardInfo.runId = acceptedPayload.runId + } } } catch (err) { // OpenClaw may return accepted JSON on stdout but still emit a late stderr warning. @@ -404,6 +590,29 @@ export async function POST(request: NextRequest) { const waitPayload = parseGatewayJson(waitResult.stdout) const waitStatus = String(waitPayload?.status || '').toLowerCase() + const toolEvents = extractToolEvents(waitPayload) + + if (toolEvents.length > 0) { + for (const evt of toolEvents) { + createChatReply( + db, + workspaceId, + conversation_id, + COORDINATOR_AGENT, + from, + evt.name, + 'tool_call', + { + event: 'tool_call', + toolName: evt.name, + input: evt.input || null, + output: evt.output || null, + status: evt.status || null, + runId: forwardInfo.runId || null, + } + ) + } + } if (waitStatus === 'error') { const reason = @@ -486,7 +695,10 @@ export async function POST(request: NextRequest) { const created = db.prepare('SELECT * FROM messages WHERE id = ? AND workspace_id = ?').get(messageId, workspaceId) as Message const parsedMessage = { ...created, - metadata: created.metadata ? JSON.parse(created.metadata) : null + metadata: { + ...(safeParseMetadata(created.metadata) || {}), + forwardInfo: forwardInfo || undefined, + }, } // Broadcast to SSE clients diff --git a/src/app/api/chat/session-prefs/route.ts b/src/app/api/chat/session-prefs/route.ts new file mode 100644 index 0000000..6e1d563 --- /dev/null +++ b/src/app/api/chat/session-prefs/route.ts @@ -0,0 +1,108 @@ +import { NextRequest, NextResponse } from 'next/server' +import { getDatabase } from '@/lib/db' +import { requireRole } from '@/lib/auth' +import { logger } from '@/lib/logger' + +const PREFS_KEY = 'chat.session_prefs.v1' +const ALLOWED_COLORS = new Set(['slate', 'blue', 'green', 'amber', 'red', 'purple', 'pink', 'teal']) + +type SessionPref = { + name?: string + color?: string +} + +type SessionPrefs = Record + +function loadPrefs(): SessionPrefs { + const db = getDatabase() + const row = db.prepare('SELECT value FROM settings WHERE key = ?').get(PREFS_KEY) as { value: string } | undefined + if (!row?.value) return {} + try { + const parsed = JSON.parse(row.value) + return parsed && typeof parsed === 'object' ? parsed : {} + } catch { + return {} + } +} + +function savePrefs(prefs: SessionPrefs, username: string) { + const db = getDatabase() + const now = Math.floor(Date.now() / 1000) + db.prepare(` + INSERT INTO settings (key, value, description, category, updated_by, updated_at) + VALUES (?, ?, ?, ?, ?, ?) + ON CONFLICT(key) DO UPDATE SET + value = excluded.value, + updated_by = excluded.updated_by, + updated_at = excluded.updated_at + `).run( + PREFS_KEY, + JSON.stringify(prefs), + 'Chat local session preferences (rename + color tags)', + 'chat', + username, + now, + ) +} + +export async function GET(request: NextRequest) { + const auth = requireRole(request, 'viewer') + if ('error' in auth) return NextResponse.json({ error: auth.error }, { status: auth.status }) + + try { + return NextResponse.json({ prefs: loadPrefs() }) + } catch (error) { + logger.error({ err: error }, 'GET /api/chat/session-prefs error') + return NextResponse.json({ error: 'Failed to load preferences' }, { status: 500 }) + } +} + +/** + * PATCH /api/chat/session-prefs + * Body: { key: "claude-code:", name?: string, color?: string | null } + */ +export async function PATCH(request: NextRequest) { + const auth = requireRole(request, 'operator') + if ('error' in auth) return NextResponse.json({ error: auth.error }, { status: auth.status }) + + try { + const body = await request.json().catch(() => ({})) + const key = typeof body?.key === 'string' ? body.key.trim() : '' + if (!key || !/^[a-zA-Z0-9_-]+:[a-zA-Z0-9._:-]+$/.test(key)) { + return NextResponse.json({ error: 'Invalid key' }, { status: 400 }) + } + + const nextName = body?.name === null ? '' : (typeof body?.name === 'string' ? body.name.trim() : undefined) + const nextColor = body?.color === null ? '' : (typeof body?.color === 'string' ? body.color.trim().toLowerCase() : undefined) + + if (typeof nextName === 'string' && nextName.length > 80) { + return NextResponse.json({ error: 'name must be <= 80 chars' }, { status: 400 }) + } + if (typeof nextColor === 'string' && nextColor && !ALLOWED_COLORS.has(nextColor)) { + return NextResponse.json({ error: 'Invalid color' }, { status: 400 }) + } + + const prefs = loadPrefs() + const existing = prefs[key] || {} + const updated: SessionPref = { + ...existing, + ...(typeof nextName === 'string' ? { name: nextName || undefined } : {}), + ...(typeof nextColor === 'string' ? { color: nextColor || undefined } : {}), + } + + if (!updated.name && !updated.color) { + delete prefs[key] + } else { + prefs[key] = updated + } + + savePrefs(prefs, auth.user.username) + + return NextResponse.json({ ok: true, pref: prefs[key] || null }) + } catch (error) { + logger.error({ err: error }, 'PATCH /api/chat/session-prefs error') + return NextResponse.json({ error: 'Failed to update preferences' }, { status: 500 }) + } +} + +export const dynamic = 'force-dynamic' diff --git a/src/app/api/claude-tasks/route.ts b/src/app/api/claude-tasks/route.ts new file mode 100644 index 0000000..d8b73fc --- /dev/null +++ b/src/app/api/claude-tasks/route.ts @@ -0,0 +1,17 @@ +import { NextRequest, NextResponse } from 'next/server' +import { requireRole } from '@/lib/auth' +import { getClaudeCodeTasks } from '@/lib/claude-tasks' + +/** + * GET /api/claude-tasks — Returns Claude Code teams and tasks + * Read-only bridge: MC reads from ~/.claude/tasks/ and ~/.claude/teams/ + */ +export async function GET(request: NextRequest) { + const auth = requireRole(request, 'viewer') + if ('error' in auth) return NextResponse.json({ error: auth.error }, { status: auth.status }) + + const force = request.nextUrl.searchParams.get('force') === 'true' + const result = getClaudeCodeTasks(force) + + return NextResponse.json(result) +} diff --git a/src/app/api/cleanup/route.ts b/src/app/api/cleanup/route.ts index 3cd13fc..8dcfe28 100644 --- a/src/app/api/cleanup/route.ts +++ b/src/app/api/cleanup/route.ts @@ -20,19 +20,22 @@ export async function GET(request: NextRequest) { if ('error' in auth) return NextResponse.json({ error: auth.error }, { status: auth.status }) const db = getDatabase() + const workspaceId = auth.user.workspace_id ?? 1 const now = Math.floor(Date.now() / 1000) const ret = config.retention const preview = [] - for (const { table, column, days, label } of getRetentionTargets()) { + for (const { table, column, days, label, scoped } of getRetentionTargets()) { if (days <= 0) { preview.push({ table: label, retention_days: 0, stale_count: 0, note: 'Retention disabled (keep forever)' }) continue } const cutoff = now - days * 86400 try { - const row = db.prepare(`SELECT COUNT(*) as c FROM ${table} WHERE ${column} < ?`).get(cutoff) as any + const wsClause = scoped ? ' AND workspace_id = ?' : '' + const params: any[] = scoped ? [cutoff, workspaceId] : [cutoff] + const row = db.prepare(`SELECT COUNT(*) as c FROM ${table} WHERE ${column} < ?${wsClause}`).get(...params) as any preview.push({ table: label, retention_days: days, @@ -89,17 +92,20 @@ export async function POST(request: NextRequest) { const dryRun = body.dry_run === true const db = getDatabase() + const workspaceId = auth.user.workspace_id ?? 1 const now = Math.floor(Date.now() / 1000) const results: CleanupResult[] = [] let totalDeleted = 0 - for (const { table, column, days, label } of getRetentionTargets()) { + for (const { table, column, days, label, scoped } of getRetentionTargets()) { if (days <= 0) continue const cutoff = now - days * 86400 + const wsClause = scoped ? ' AND workspace_id = ?' : '' + const params: any[] = scoped ? [cutoff, workspaceId] : [cutoff] try { if (dryRun) { - const row = db.prepare(`SELECT COUNT(*) as c FROM ${table} WHERE ${column} < ?`).get(cutoff) as any + const row = db.prepare(`SELECT COUNT(*) as c FROM ${table} WHERE ${column} < ?${wsClause}`).get(...params) as any results.push({ table: label, deleted: row.c, @@ -108,7 +114,7 @@ export async function POST(request: NextRequest) { }) totalDeleted += row.c } else { - const res = db.prepare(`DELETE FROM ${table} WHERE ${column} < ?`).run(cutoff) + const res = db.prepare(`DELETE FROM ${table} WHERE ${column} < ?${wsClause}`).run(...params) results.push({ table: label, deleted: res.changes, @@ -183,9 +189,9 @@ export async function POST(request: NextRequest) { function getRetentionTargets() { const ret = config.retention return [ - { table: 'activities', column: 'created_at', days: ret.activities, label: 'Activities' }, - { table: 'audit_log', column: 'created_at', days: ret.auditLog, label: 'Audit Log' }, - { table: 'notifications', column: 'created_at', days: ret.notifications, label: 'Notifications' }, - { table: 'pipeline_runs', column: 'created_at', days: ret.pipelineRuns, label: 'Pipeline Runs' }, + { table: 'activities', column: 'created_at', days: ret.activities, label: 'Activities', scoped: true }, + { table: 'audit_log', column: 'created_at', days: ret.auditLog, label: 'Audit Log', scoped: false }, // instance-global, admin-only + { table: 'notifications', column: 'created_at', days: ret.notifications, label: 'Notifications', scoped: true }, + { table: 'pipeline_runs', column: 'created_at', days: ret.pipelineRuns, label: 'Pipeline Runs', scoped: true }, ] } diff --git a/src/app/api/cron/route.ts b/src/app/api/cron/route.ts index 69e10ee..8ca589a 100644 --- a/src/app/api/cron/route.ts +++ b/src/app/api/cron/route.ts @@ -188,6 +188,71 @@ export async function GET(request: NextRequest) { return NextResponse.json({ logs }) } + if (action === 'history') { + const jobId = searchParams.get('jobId') + if (!jobId) { + return NextResponse.json({ error: 'Job ID required' }, { status: 400 }) + } + + const page = parseInt(searchParams.get('page') || '1', 10) + const query = searchParams.get('query') || '' + + // Try to load run history from the cron runs log file + const openclawStateDir = config.openclawStateDir + if (!openclawStateDir) { + return NextResponse.json({ entries: [], total: 0, hasMore: false }) + } + + try { + const runsPath = path.join(openclawStateDir, 'cron', 'runs.json') + const raw = await readFile(runsPath, 'utf-8') + const runsData = JSON.parse(raw) + let entries: any[] = Array.isArray(runsData.runs) ? runsData.runs : Array.isArray(runsData) ? runsData : [] + + // Filter to this job + entries = entries.filter((r: any) => r.jobId === jobId || r.id === jobId) + + // Apply search filter + if (query) { + const q = query.toLowerCase() + entries = entries.filter((r: any) => + (r.status || '').toLowerCase().includes(q) || + (r.error || '').toLowerCase().includes(q) || + (r.deliveryStatus || '').toLowerCase().includes(q) + ) + } + + // Sort by timestamp descending + entries.sort((a: any, b: any) => (b.timestamp || b.startedAtMs || 0) - (a.timestamp || a.startedAtMs || 0)) + + const pageSize = 20 + const start = (page - 1) * pageSize + const paged = entries.slice(start, start + pageSize) + + return NextResponse.json({ + entries: paged, + total: entries.length, + hasMore: start + pageSize < entries.length, + page, + }) + } catch { + // No runs file — fall back to state-based info + const cronFile = await loadCronFile() + const job = cronFile?.jobs.find(j => j.id === jobId || j.name === jobId) + const entries: any[] = [] + if (job?.state?.lastRunAtMs) { + entries.push({ + jobId: job.id, + status: job.state.lastStatus || 'unknown', + timestamp: job.state.lastRunAtMs, + durationMs: job.state.lastDurationMs, + error: job.state.lastError, + }) + } + return NextResponse.json({ entries, total: entries.length, hasMore: false, page: 1 }) + } + } + return NextResponse.json({ error: 'Invalid action' }, { status: 400 }) } catch (error) { logger.error({ err: error }, 'Cron API error') @@ -249,11 +314,14 @@ export async function POST(request: NextRequest) { } // For OpenClaw cron jobs, trigger via the openclaw CLI + const triggerMode = body.mode || 'force' const { runCommand } = await import('@/lib/command') try { - const { stdout, stderr } = await runCommand(config.openclawBin, [ - 'cron', 'trigger', job.id - ], { timeoutMs: 30000 }) + const args = ['cron', 'trigger', job.id] + if (triggerMode === 'due') { + args.push('--if-due') + } + const { stdout, stderr } = await runCommand(config.openclawBin, args, { timeoutMs: 30000 }) return NextResponse.json({ success: true, @@ -296,7 +364,7 @@ export async function POST(request: NextRequest) { } if (action === 'add') { - const { schedule, command, model, description } = body + const { schedule, command, model, description, staggerSeconds } = body const name = jobName || body.name if (!schedule || !command || !name) { return NextResponse.json( @@ -320,6 +388,9 @@ export async function POST(request: NextRequest) { schedule: { kind: 'cron', expr: schedule, + ...(typeof staggerSeconds === 'number' && staggerSeconds > 0 + ? { staggerMs: staggerSeconds * 1000 } as any + : {}), }, payload: { kind: 'agentTurn', @@ -341,6 +412,49 @@ export async function POST(request: NextRequest) { return NextResponse.json({ success: true }) } + if (action === 'clone') { + const id = jobId || jobName + if (!id) { + return NextResponse.json({ error: 'Job ID required' }, { status: 400 }) + } + + const cronFile = await loadCronFile() + if (!cronFile) { + return NextResponse.json({ error: 'Cron file not found' }, { status: 404 }) + } + + const sourceJob = cronFile.jobs.find(j => j.id === id || j.name === id) + if (!sourceJob) { + return NextResponse.json({ error: 'Job not found' }, { status: 404 }) + } + + // Generate unique clone name + const existingNames = new Set(cronFile.jobs.map(j => j.name.toLowerCase())) + let cloneName = `${sourceJob.name} (copy)` + let counter = 2 + while (existingNames.has(cloneName.toLowerCase())) { + cloneName = `${sourceJob.name} (copy ${counter})` + counter++ + } + + const clonedJob: OpenClawCronJob = { + ...JSON.parse(JSON.stringify(sourceJob)), + id: `mc-${Date.now().toString(36)}`, + name: cloneName, + createdAtMs: Date.now(), + updatedAtMs: Date.now(), + state: {}, + } + + cronFile.jobs.push(clonedJob) + + if (!(await saveCronFile(cronFile))) { + return NextResponse.json({ error: 'Failed to save cron file' }, { status: 500 }) + } + + return NextResponse.json({ success: true, clonedName: cloneName }) + } + return NextResponse.json({ error: 'Invalid action' }, { status: 400 }) } catch (error) { logger.error({ err: error }, 'Cron management error') diff --git a/src/app/api/debug/route.ts b/src/app/api/debug/route.ts new file mode 100644 index 0000000..2e89c10 --- /dev/null +++ b/src/app/api/debug/route.ts @@ -0,0 +1,146 @@ +import { NextResponse } from 'next/server' +import { requireRole } from '@/lib/auth' +import { config } from '@/lib/config' +import { logger } from '@/lib/logger' + +const GATEWAY_BASE = `http://${config.gatewayHost}:${config.gatewayPort}` + +async function gatewayFetch( + path: string, + options: { method?: string; body?: string; timeoutMs?: number } = {} +): Promise { + const { method = 'GET', body, timeoutMs = 5000 } = options + const controller = new AbortController() + const timer = setTimeout(() => controller.abort(), timeoutMs) + + try { + const res = await fetch(`${GATEWAY_BASE}${path}`, { + method, + signal: controller.signal, + headers: body ? { 'Content-Type': 'application/json' } : undefined, + body, + }) + return res + } finally { + clearTimeout(timer) + } +} + +export async function GET(request: Request) { + const auth = requireRole(request, 'admin') + if ('error' in auth) return NextResponse.json({ error: auth.error }, { status: auth.status }) + + const { searchParams } = new URL(request.url) + const action = searchParams.get('action') || 'status' + + try { + switch (action) { + case 'status': { + try { + const res = await gatewayFetch('/api/status') + const data = await res.json() + return NextResponse.json(data) + } catch (err) { + logger.warn({ err }, 'debug: gateway unreachable for status') + return NextResponse.json({ gatewayReachable: false }) + } + } + + case 'health': { + try { + const res = await gatewayFetch('/api/health') + const data = await res.json() + return NextResponse.json(data) + } catch (err) { + logger.warn({ err }, 'debug: gateway unreachable for health') + return NextResponse.json({ healthy: false, error: 'Gateway unreachable' }) + } + } + + case 'models': { + try { + const res = await gatewayFetch('/api/models') + const data = await res.json() + return NextResponse.json(data) + } catch (err) { + logger.warn({ err }, 'debug: gateway unreachable for models') + return NextResponse.json({ models: [] }) + } + } + + case 'heartbeat': { + const start = performance.now() + try { + const res = await gatewayFetch('/api/heartbeat', { timeoutMs: 3000 }) + const latencyMs = Math.round(performance.now() - start) + const ok = res.ok + return NextResponse.json({ ok, latencyMs, timestamp: Date.now() }) + } catch { + const latencyMs = Math.round(performance.now() - start) + return NextResponse.json({ ok: false, latencyMs, timestamp: Date.now() }) + } + } + + default: + return NextResponse.json({ error: `Unknown action: ${action}` }, { status: 400 }) + } + } catch (err) { + logger.error({ err }, 'debug: unexpected error') + return NextResponse.json({ error: 'Internal error' }, { status: 500 }) + } +} + +export async function POST(request: Request) { + const auth = requireRole(request, 'admin') + if ('error' in auth) return NextResponse.json({ error: auth.error }, { status: auth.status }) + + const { searchParams } = new URL(request.url) + const action = searchParams.get('action') + + if (action !== 'call') { + return NextResponse.json({ error: 'POST only supports action=call' }, { status: 400 }) + } + + let body: { method?: string; path?: string; body?: any } + try { + body = await request.json() + } catch { + return NextResponse.json({ error: 'Invalid JSON body' }, { status: 400 }) + } + + const { method, path, body: callBody } = body + + if (!method || !['GET', 'POST'].includes(method)) { + return NextResponse.json({ error: 'method must be GET or POST' }, { status: 400 }) + } + + if (!path || typeof path !== 'string' || !path.startsWith('/api/')) { + return NextResponse.json({ error: 'path must start with /api/' }, { status: 400 }) + } + + try { + const res = await gatewayFetch(path, { + method, + body: callBody ? JSON.stringify(callBody) : undefined, + timeoutMs: 5000, + }) + + let responseBody: any + const contentType = res.headers.get('content-type') || '' + if (contentType.includes('application/json')) { + responseBody = await res.json() + } else { + responseBody = await res.text() + } + + return NextResponse.json({ + status: res.status, + statusText: res.statusText, + contentType, + body: responseBody, + }) + } catch (err) { + logger.warn({ err, path }, 'debug: gateway call failed') + return NextResponse.json({ error: 'Gateway unreachable', path }, { status: 502 }) + } +} diff --git a/src/app/api/diagnostics/route.ts b/src/app/api/diagnostics/route.ts new file mode 100644 index 0000000..658394c --- /dev/null +++ b/src/app/api/diagnostics/route.ts @@ -0,0 +1,211 @@ +import { NextRequest, NextResponse } from 'next/server' +import net from 'node:net' +import { existsSync, statSync } from 'node:fs' +import { requireRole } from '@/lib/auth' +import { config } from '@/lib/config' +import { getDatabase } from '@/lib/db' +import { runOpenClaw } from '@/lib/command' +import { logger } from '@/lib/logger' +import { APP_VERSION } from '@/lib/version' + +const INSECURE_PASSWORDS = new Set([ + 'admin', + 'password', + 'change-me-on-first-login', + 'changeme', + 'testpass123', +]) + +export async function GET(request: NextRequest) { + const auth = requireRole(request, 'admin') + if ('error' in auth) return NextResponse.json({ error: auth.error }, { status: auth.status }) + + try { + const [version, security, database, agents, sessions, gateway] = await Promise.all([ + getVersionInfo(), + getSecurityInfo(), + getDatabaseInfo(), + getAgentInfo(), + getSessionInfo(), + getGatewayInfo(), + ]) + + return NextResponse.json({ + system: { + nodeVersion: process.version, + platform: process.platform, + arch: process.arch, + processMemory: process.memoryUsage(), + processUptime: process.uptime(), + isDocker: existsSync('/.dockerenv'), + }, + version, + security, + database, + agents, + sessions, + gateway, + retention: config.retention, + }) + } catch (error) { + logger.error({ err: error }, 'Diagnostics API error') + return NextResponse.json({ error: 'Internal server error' }, { status: 500 }) + } +} + +async function getVersionInfo() { + let openclaw: string | null = null + try { + const { stdout } = await runOpenClaw(['--version'], { timeoutMs: 3000 }) + openclaw = stdout.trim() + } catch { + // openclaw not available + } + return { app: APP_VERSION, openclaw } +} + +function getSecurityInfo() { + const checks: Array<{ name: string; pass: boolean; detail: string }> = [] + + const apiKey = process.env.API_KEY || '' + checks.push({ + name: 'API key configured', + pass: Boolean(apiKey) && apiKey !== 'generate-a-random-key', + detail: !apiKey ? 'API_KEY is not set' : apiKey === 'generate-a-random-key' ? 'API_KEY is default value' : 'API_KEY is set', + }) + + const authPass = process.env.AUTH_PASS || '' + checks.push({ + name: 'Auth password secure', + pass: Boolean(authPass) && !INSECURE_PASSWORDS.has(authPass), + detail: !authPass ? 'AUTH_PASS is not set' : INSECURE_PASSWORDS.has(authPass) ? 'AUTH_PASS is a known insecure password' : 'AUTH_PASS is not a common default', + }) + + const allowedHosts = process.env.MC_ALLOWED_HOSTS || '' + checks.push({ + name: 'Allowed hosts configured', + pass: Boolean(allowedHosts.trim()), + detail: allowedHosts.trim() ? 'MC_ALLOWED_HOSTS is configured' : 'MC_ALLOWED_HOSTS is not set', + }) + + const sameSite = process.env.MC_COOKIE_SAMESITE || '' + checks.push({ + name: 'Cookie SameSite strict', + pass: sameSite.toLowerCase() === 'strict', + detail: sameSite ? `MC_COOKIE_SAMESITE is '${sameSite}'` : 'MC_COOKIE_SAMESITE is not set', + }) + + const hsts = process.env.MC_ENABLE_HSTS || '' + checks.push({ + name: 'HSTS enabled', + pass: hsts === '1', + detail: hsts === '1' ? 'HSTS is enabled' : 'MC_ENABLE_HSTS is not set to 1', + }) + + const rateLimitDisabled = process.env.MC_DISABLE_RATE_LIMIT || '' + checks.push({ + name: 'Rate limiting enabled', + pass: !rateLimitDisabled, + detail: rateLimitDisabled ? 'Rate limiting is disabled' : 'Rate limiting is active', + }) + + const gwHost = config.gatewayHost + checks.push({ + name: 'Gateway bound to localhost', + pass: gwHost === '127.0.0.1' || gwHost === 'localhost', + detail: `Gateway host is '${gwHost}'`, + }) + + const passing = checks.filter(c => c.pass).length + const score = Math.round((passing / checks.length) * 100) + + return { score, checks } +} + +function getDatabaseInfo() { + try { + const db = getDatabase() + + let sizeBytes = 0 + try { + sizeBytes = statSync(config.dbPath).size + } catch { + // ignore + } + + const journalRow = db.prepare('PRAGMA journal_mode').get() as { journal_mode: string } | undefined + const walMode = journalRow?.journal_mode === 'wal' + + let migrationVersion: string | null = null + try { + const row = db.prepare( + "SELECT name FROM sqlite_master WHERE type='table' AND name='migrations'" + ).get() as { name?: string } | undefined + if (row?.name) { + const latest = db.prepare( + 'SELECT version FROM migrations ORDER BY rowid DESC LIMIT 1' + ).get() as { version: string } | undefined + migrationVersion = latest?.version ?? null + } + } catch { + // migrations table may not exist + } + + return { sizeBytes, walMode, migrationVersion } + } catch (err) { + logger.error({ err }, 'Diagnostics: database info error') + return { sizeBytes: 0, walMode: false, migrationVersion: null } + } +} + +function getAgentInfo() { + try { + const db = getDatabase() + const rows = db.prepare( + 'SELECT status, COUNT(*) as count FROM agents GROUP BY status' + ).all() as Array<{ status: string; count: number }> + + const byStatus: Record = {} + let total = 0 + for (const row of rows) { + byStatus[row.status] = row.count + total += row.count + } + return { total, byStatus } + } catch { + return { total: 0, byStatus: {} } + } +} + +function getSessionInfo() { + try { + const db = getDatabase() + const totalRow = db.prepare('SELECT COUNT(*) as c FROM claude_sessions').get() as { c: number } | undefined + const activeRow = db.prepare( + "SELECT COUNT(*) as c FROM claude_sessions WHERE is_active = 1" + ).get() as { c: number } | undefined + return { active: activeRow?.c ?? 0, total: totalRow?.c ?? 0 } + } catch { + return { active: 0, total: 0 } + } +} + +async function getGatewayInfo() { + const host = config.gatewayHost + const port = config.gatewayPort + const configured = Boolean(host && port) + + let reachable = false + if (configured) { + reachable = await new Promise((resolve) => { + const socket = new net.Socket() + socket.setTimeout(1500) + socket.once('connect', () => { socket.destroy(); resolve(true) }) + socket.once('timeout', () => { socket.destroy(); resolve(false) }) + socket.once('error', () => { socket.destroy(); resolve(false) }) + socket.connect(port, host) + }) + } + + return { configured, reachable, host, port } +} diff --git a/src/app/api/events/route.ts b/src/app/api/events/route.ts index d25b8c8..1e4c0a8 100644 --- a/src/app/api/events/route.ts +++ b/src/app/api/events/route.ts @@ -25,8 +25,11 @@ export async function GET(request: NextRequest) { encoder.encode(`data: ${JSON.stringify({ type: 'connected', data: null, timestamp: Date.now() })}\n\n`) ) - // Forward all server events to this SSE client + // Forward workspace-scoped server events to this SSE client + const userWorkspaceId = auth.user.workspace_id ?? 1 const handler = (event: ServerEvent) => { + // Skip events from other workspaces (if event carries workspace_id) + if (event.data?.workspace_id && event.data.workspace_id !== userWorkspaceId) return try { controller.enqueue( encoder.encode(`data: ${JSON.stringify(event)}\n\n`) diff --git a/src/app/api/exec-approvals/route.ts b/src/app/api/exec-approvals/route.ts new file mode 100644 index 0000000..784145d --- /dev/null +++ b/src/app/api/exec-approvals/route.ts @@ -0,0 +1,210 @@ +import { NextRequest, NextResponse } from 'next/server' +import { createHash } from 'node:crypto' +import { requireRole } from '@/lib/auth' +import { config } from '@/lib/config' +import { logger } from '@/lib/logger' +import path from 'node:path' + +function gatewayUrl(p: string): string { + return `http://${config.gatewayHost}:${config.gatewayPort}${p}` +} + +function execApprovalsPath(): string { + return path.join(config.openclawHome, 'exec-approvals.json') +} + +function computeHash(raw: string): string { + return createHash('sha256').update(raw, 'utf8').digest('hex') +} + +/** + * GET /api/exec-approvals - Fetch pending execution approval requests + * GET /api/exec-approvals?action=allowlist - Fetch per-agent allowlists + */ +export async function GET(request: NextRequest) { + const auth = requireRole(request, 'operator') + if ('error' in auth) return NextResponse.json({ error: auth.error }, { status: auth.status }) + + const action = request.nextUrl.searchParams.get('action') + + if (action === 'allowlist') { + return getAllowlist() + } + + const controller = new AbortController() + const timeout = setTimeout(() => controller.abort(), 5000) + + try { + const res = await fetch(gatewayUrl('/api/exec-approvals'), { + signal: controller.signal, + headers: { 'Accept': 'application/json' }, + }) + clearTimeout(timeout) + + if (!res.ok) { + logger.warn({ status: res.status }, 'Gateway exec-approvals endpoint returned error') + return NextResponse.json({ approvals: [] }) + } + + const data = await res.json() + return NextResponse.json(data) + } catch (err: any) { + clearTimeout(timeout) + if (err.name === 'AbortError') { + logger.warn('Gateway exec-approvals request timed out') + } else { + logger.warn({ err }, 'Gateway exec-approvals unreachable') + } + return NextResponse.json({ approvals: [] }) + } +} + +async function getAllowlist(): Promise { + const filePath = execApprovalsPath() + try { + const { readFile } = require('fs/promises') + const raw = await readFile(filePath, 'utf-8') + const parsed = JSON.parse(raw) + const agents: Record = {} + if (parsed?.agents && typeof parsed.agents === 'object') { + for (const [agentId, agentConfig] of Object.entries(parsed.agents)) { + const cfg = agentConfig as any + if (Array.isArray(cfg?.allowlist)) { + agents[agentId] = cfg.allowlist.map((e: any) => ({ pattern: String(e?.pattern ?? '') })) + } else { + agents[agentId] = [] + } + } + } + return NextResponse.json({ agents, hash: computeHash(raw) }) + } catch (err: any) { + if (err.code === 'ENOENT') { + return NextResponse.json({ agents: {}, hash: computeHash('') }) + } + logger.warn({ err }, 'Failed to read exec-approvals config') + return NextResponse.json({ error: `Failed to read config: ${err.message}` }, { status: 500 }) + } +} + +/** + * PUT /api/exec-approvals - Save allowlist changes + * Body: { agents: Record, hash?: string } + */ +export async function PUT(request: NextRequest) { + const auth = requireRole(request, 'operator') + if ('error' in auth) return NextResponse.json({ error: auth.error }, { status: auth.status }) + + let body: { agents: Record; hash?: string } + try { + body = await request.json() + } catch { + return NextResponse.json({ error: 'Invalid JSON body' }, { status: 400 }) + } + + if (!body.agents || typeof body.agents !== 'object') { + return NextResponse.json({ error: 'Missing required field: agents' }, { status: 400 }) + } + + const filePath = execApprovalsPath() + try { + const { readFile, writeFile, mkdir } = require('fs/promises') + const { existsSync } = require('fs') + + let parsed: any = { version: 1, agents: {} } + try { + const raw = await readFile(filePath, 'utf-8') + parsed = JSON.parse(raw) + + if (body.hash) { + const serverHash = computeHash(raw) + if (body.hash !== serverHash) { + return NextResponse.json( + { error: 'Config has been modified. Please reload and try again.', code: 'CONFLICT' }, + { status: 409 }, + ) + } + } + } catch (err: any) { + if (err.code !== 'ENOENT') throw err + } + + if (!parsed.agents) parsed.agents = {} + + for (const [agentId, patterns] of Object.entries(body.agents)) { + if (!parsed.agents[agentId]) parsed.agents[agentId] = {} + if (patterns.length === 0) { + delete parsed.agents[agentId].allowlist + } else { + parsed.agents[agentId].allowlist = patterns.map((p: { pattern: string }) => ({ + pattern: String(p.pattern ?? ''), + })) + } + } + + const dir = path.dirname(filePath) + if (!existsSync(dir)) { + await mkdir(dir, { recursive: true }) + } + + const newRaw = JSON.stringify(parsed, null, 2) + '\n' + await writeFile(filePath, newRaw, { mode: 0o600 }) + + return NextResponse.json({ ok: true, hash: computeHash(newRaw) }) + } catch (err: any) { + logger.error({ err }, 'Failed to save exec-approvals config') + return NextResponse.json({ error: `Failed to save: ${err.message}` }, { status: 500 }) + } +} + +/** + * POST /api/exec-approvals - Respond to an execution approval request + * Body: { id: string, action: 'approve' | 'deny' | 'always_allow', reason?: string } + */ +export async function POST(request: NextRequest) { + const auth = requireRole(request, 'operator') + if ('error' in auth) return NextResponse.json({ error: auth.error }, { status: auth.status }) + + let body: { id: string; action: string; reason?: string } + try { + body = await request.json() + } catch { + return NextResponse.json({ error: 'Invalid JSON body' }, { status: 400 }) + } + + if (!body.id || typeof body.id !== 'string') { + return NextResponse.json({ error: 'Missing required field: id' }, { status: 400 }) + } + + const validActions = ['approve', 'deny', 'always_allow'] + if (!validActions.includes(body.action)) { + return NextResponse.json({ error: `Invalid action. Must be one of: ${validActions.join(', ')}` }, { status: 400 }) + } + + const controller = new AbortController() + const timeout = setTimeout(() => controller.abort(), 5000) + + try { + const res = await fetch(gatewayUrl('/api/exec-approvals/respond'), { + method: 'POST', + signal: controller.signal, + headers: { 'Content-Type': 'application/json' }, + body: JSON.stringify({ + id: body.id, + action: body.action, + reason: body.reason, + }), + }) + clearTimeout(timeout) + + const data = await res.json() + return NextResponse.json(data, { status: res.status }) + } catch (err: any) { + clearTimeout(timeout) + if (err.name === 'AbortError') { + logger.error('Gateway exec-approvals respond request timed out') + return NextResponse.json({ error: 'Gateway request timed out' }, { status: 504 }) + } + logger.error({ err }, 'Gateway exec-approvals respond failed') + return NextResponse.json({ error: 'Gateway unreachable' }, { status: 502 }) + } +} diff --git a/src/app/api/export/route.ts b/src/app/api/export/route.ts index ff137df..b68ad10 100644 --- a/src/app/api/export/route.ts +++ b/src/app/api/export/route.ts @@ -53,6 +53,7 @@ export async function GET(request: NextRequest) { switch (type) { case 'audit': { + // audit_log is instance-global (no workspace_id column); export is admin-only so this is safe rows = db.prepare(`SELECT * FROM audit_log ${where} ORDER BY created_at DESC LIMIT ?`).all(...params, limit) headers = ['id', 'action', 'actor', 'actor_id', 'target_type', 'target_id', 'detail', 'ip_address', 'user_agent', 'created_at'] filename = 'audit-log' @@ -77,7 +78,10 @@ export async function GET(request: NextRequest) { break } case 'pipelines': { - rows = db.prepare(`SELECT pr.*, wp.name as pipeline_name FROM pipeline_runs pr LEFT JOIN workflow_pipelines wp ON pr.pipeline_id = wp.id ${where ? where.replace('created_at', 'pr.created_at') : ''} ORDER BY pr.created_at DESC LIMIT ?`).all(...params, limit) + conditions.unshift('pr.workspace_id = ?') + params.unshift(workspaceId) + const scopedWhere = conditions.length > 0 ? `WHERE ${conditions.map(c => c.replace(/^created_at/, 'pr.created_at')).join(' AND ')}` : '' + rows = db.prepare(`SELECT pr.*, wp.name as pipeline_name FROM pipeline_runs pr LEFT JOIN workflow_pipelines wp ON pr.pipeline_id = wp.id ${scopedWhere} ORDER BY pr.created_at DESC LIMIT ?`).all(...params, limit) headers = ['id', 'pipeline_id', 'pipeline_name', 'status', 'current_step', 'steps_snapshot', 'started_at', 'completed_at', 'triggered_by', 'created_at'] filename = 'pipeline-runs' break diff --git a/src/app/api/gateway-config/route.ts b/src/app/api/gateway-config/route.ts index cf339c5..96c8a82 100644 --- a/src/app/api/gateway-config/route.ts +++ b/src/app/api/gateway-config/route.ts @@ -1,22 +1,45 @@ import { NextRequest, NextResponse } from 'next/server' +import { createHash } from 'node:crypto' import { requireRole } from '@/lib/auth' import { logAuditEvent } from '@/lib/db' import { config } from '@/lib/config' import { validateBody, gatewayConfigUpdateSchema } from '@/lib/validation' import { mutationLimiter } from '@/lib/rate-limit' -import { parseJsonRelaxed } from '@/lib/json-relaxed' +import { getDetectedGatewayToken } from '@/lib/gateway-runtime' function getConfigPath(): string | null { return config.openclawConfigPath || null } +function gatewayUrl(path: string): string { + return `http://${config.gatewayHost}:${config.gatewayPort}${path}` +} + +function gatewayHeaders(): Record { + const token = getDetectedGatewayToken() + const headers: Record = { 'Content-Type': 'application/json' } + if (token) headers['Authorization'] = `Bearer ${token}` + return headers +} + +function computeHash(raw: string): string { + return createHash('sha256').update(raw, 'utf8').digest('hex') +} + /** * GET /api/gateway-config - Read the gateway configuration + * GET /api/gateway-config?action=schema - Get the config JSON schema */ export async function GET(request: NextRequest) { const auth = requireRole(request, 'admin') if ('error' in auth) return NextResponse.json({ error: auth.error }, { status: auth.status }) + const action = request.nextUrl.searchParams.get('action') + + if (action === 'schema') { + return getSchema() + } + const configPath = getConfigPath() if (!configPath) { return NextResponse.json({ error: 'OPENCLAW_CONFIG_PATH not configured' }, { status: 404 }) @@ -25,7 +48,8 @@ export async function GET(request: NextRequest) { try { const { readFile } = require('fs/promises') const raw = await readFile(configPath, 'utf-8') - const parsed = parseJsonRelaxed(raw) + const parsed = JSON.parse(raw) + const hash = computeHash(raw) // Redact sensitive fields for display const redacted = redactSensitive(JSON.parse(JSON.stringify(parsed))) @@ -34,6 +58,7 @@ export async function GET(request: NextRequest) { path: configPath, config: redacted, raw_size: raw.length, + hash, }) } catch (err: any) { if (err.code === 'ENOENT') { @@ -43,12 +68,38 @@ export async function GET(request: NextRequest) { } } +async function getSchema(): Promise { + const controller = new AbortController() + const timeout = setTimeout(() => controller.abort(), 5000) + try { + const res = await fetch(gatewayUrl('/api/config/schema'), { + signal: controller.signal, + headers: gatewayHeaders(), + }) + clearTimeout(timeout) + if (!res.ok) { + return NextResponse.json( + { error: `Gateway returned ${res.status}` }, + { status: 502 }, + ) + } + const data = await res.json() + return NextResponse.json(data) + } catch (err: any) { + clearTimeout(timeout) + return NextResponse.json( + { error: err.name === 'AbortError' ? 'Gateway timeout' : 'Gateway unreachable' }, + { status: 502 }, + ) + } +} + /** * PUT /api/gateway-config - Update specific config fields - * Body: { updates: { "path.to.key": value, ... } } + * PUT /api/gateway-config?action=apply - Hot-apply config via gateway RPC + * PUT /api/gateway-config?action=update - System update via gateway RPC * - * Uses dot-notation paths to set nested values. - * CRITICAL: Preserves gateway.auth.password and other sensitive fields. + * Body: { updates: { "path.to.key": value, ... }, hash?: string } */ export async function PUT(request: NextRequest) { const auth = requireRole(request, 'admin') @@ -57,6 +108,16 @@ export async function PUT(request: NextRequest) { const rateCheck = mutationLimiter(request) if (rateCheck) return rateCheck + const action = request.nextUrl.searchParams.get('action') + + if (action === 'apply') { + return applyConfig(request, auth) + } + + if (action === 'update') { + return updateSystem(request, auth) + } + const configPath = getConfigPath() if (!configPath) { return NextResponse.json({ error: 'OPENCLAW_CONFIG_PATH not configured' }, { status: 404 }) @@ -77,7 +138,30 @@ export async function PUT(request: NextRequest) { try { const { readFile, writeFile } = require('fs/promises') const raw = await readFile(configPath, 'utf-8') - const parsed = parseJsonRelaxed(raw) + + // Hash-based concurrency check + const clientHash = (body as any).hash + if (clientHash) { + const serverHash = computeHash(raw) + if (clientHash !== serverHash) { + return NextResponse.json( + { error: 'Config has been modified by another user. Please reload and try again.', code: 'CONFLICT' }, + { status: 409 }, + ) + } + } + + const parsed = JSON.parse(raw) + + for (const dotPath of Object.keys(body.updates)) { + const [rootKey] = dotPath.split('.') + if (!rootKey || !(rootKey in parsed)) { + return NextResponse.json( + { error: `Unknown config root: ${rootKey || dotPath}` }, + { status: 400 }, + ) + } + } // Apply updates via dot-notation const appliedKeys: string[] = [] @@ -87,7 +171,8 @@ export async function PUT(request: NextRequest) { } // Write back with pretty formatting - await writeFile(configPath, JSON.stringify(parsed, null, 2) + '\n') + const newRaw = JSON.stringify(parsed, null, 2) + '\n' + await writeFile(configPath, newRaw) const ipAddress = request.headers.get('x-forwarded-for') || request.headers.get('x-real-ip') || 'unknown' logAuditEvent({ @@ -98,12 +183,92 @@ export async function PUT(request: NextRequest) { ip_address: ipAddress, }) - return NextResponse.json({ updated: appliedKeys, count: appliedKeys.length }) + return NextResponse.json({ + updated: appliedKeys, + count: appliedKeys.length, + hash: computeHash(newRaw), + }) } catch (err: any) { return NextResponse.json({ error: `Failed to update config: ${err.message}` }, { status: 500 }) } } +async function applyConfig(request: NextRequest, auth: any): Promise { + const controller = new AbortController() + const timeout = setTimeout(() => controller.abort(), 10000) + try { + const res = await fetch(gatewayUrl('/api/config/apply'), { + method: 'POST', + signal: controller.signal, + headers: gatewayHeaders(), + }) + clearTimeout(timeout) + + const ipAddress = request.headers.get('x-forwarded-for') || request.headers.get('x-real-ip') || 'unknown' + logAuditEvent({ + action: 'gateway_config_apply', + actor: auth.user.username, + actor_id: auth.user.id, + detail: { status: res.status }, + ip_address: ipAddress, + }) + + if (!res.ok) { + const text = await res.text().catch(() => '') + return NextResponse.json( + { error: `Apply failed (${res.status}): ${text}` }, + { status: 502 }, + ) + } + const data = await res.json().catch(() => ({})) + return NextResponse.json({ ok: true, ...data }) + } catch (err: any) { + clearTimeout(timeout) + return NextResponse.json( + { error: err.name === 'AbortError' ? 'Gateway timeout' : 'Gateway unreachable' }, + { status: 502 }, + ) + } +} + +async function updateSystem(request: NextRequest, auth: any): Promise { + const controller = new AbortController() + const timeout = setTimeout(() => controller.abort(), 15000) + try { + const res = await fetch(gatewayUrl('/api/config/update'), { + method: 'POST', + signal: controller.signal, + headers: gatewayHeaders(), + }) + clearTimeout(timeout) + + const ipAddress = request.headers.get('x-forwarded-for') || request.headers.get('x-real-ip') || 'unknown' + logAuditEvent({ + action: 'gateway_config_system_update', + actor: auth.user.username, + actor_id: auth.user.id, + detail: { status: res.status }, + ip_address: ipAddress, + }) + + if (!res.ok) { + const text = await res.text().catch(() => '') + return NextResponse.json( + { error: `Update failed (${res.status}): ${text}` }, + { status: 502 }, + ) + } + const data = await res.json().catch(() => ({})) + return NextResponse.json({ ok: true, ...data }) + } catch (err: any) { + clearTimeout(timeout) + return NextResponse.json( + { error: err.name === 'AbortError' ? 'Gateway timeout' : 'Gateway unreachable' }, + { status: 502 }, + ) + } +} + /** Set a value in a nested object using dot-notation path */ function setNestedValue(obj: any, path: string, value: any) { const keys = path.split('.') @@ -124,7 +289,7 @@ function redactSensitive(obj: any, parentKey = ''): any { for (const key of Object.keys(obj)) { if (sensitiveKeys.some(sk => key.toLowerCase().includes(sk))) { if (typeof obj[key] === 'string' && obj[key].length > 0) { - obj[key] = '••••••••' + obj[key] = '--------' } } else if (typeof obj[key] === 'object' && obj[key] !== null) { redactSensitive(obj[key], key) diff --git a/src/app/api/gateways/connect/route.ts b/src/app/api/gateways/connect/route.ts index 6fdad8f..2ad9198 100644 --- a/src/app/api/gateways/connect/route.ts +++ b/src/app/api/gateways/connect/route.ts @@ -1,13 +1,126 @@ import { NextRequest, NextResponse } from 'next/server' +import { readFileSync } from 'node:fs' import { requireRole } from '@/lib/auth' import { getDatabase } from '@/lib/db' import { buildGatewayWebSocketUrl } from '@/lib/gateway-url' +import { getDetectedGatewayToken } from '@/lib/gateway-runtime' interface GatewayEntry { id: number host: string port: number token: string + is_primary: number +} + +function inferBrowserProtocol(request: NextRequest): 'http:' | 'https:' { + const forwardedProto = String(request.headers.get('x-forwarded-proto') || '').split(',')[0]?.trim().toLowerCase() + if (forwardedProto === 'https') return 'https:' + if (forwardedProto === 'http') return 'http:' + + const origin = request.headers.get('origin') || request.headers.get('referer') || '' + if (origin) { + try { + const parsed = new URL(origin) + if (parsed.protocol === 'https:') return 'https:' + if (parsed.protocol === 'http:') return 'http:' + } catch { + // ignore and continue fallback resolution + } + } + + if (request.nextUrl.protocol === 'https:') return 'https:' + return 'http:' +} + +const LOCALHOST_HOSTS = new Set(['127.0.0.1', 'localhost', '::1']) + +/** + * Detect whether Tailscale Serve is proxying a `/gw` route to the gateway. + * + * Checks in order: + * 1. `tailscale serve status --json` — look for a /gw handler (authoritative) + * 2. Fallback: `gateway.tailscale.mode === 'serve'` in openclaw.json (legacy) + */ +function detectTailscaleServe(): boolean { + // 1. Check live Tailscale Serve config for a /gw handler + try { + const { execFileSync } = require('node:child_process') + const raw = execFileSync('tailscale', ['serve', 'status', '--json'], { + timeout: 3000, + encoding: 'utf-8', + stdio: ['ignore', 'pipe', 'ignore'], + }) + const config = JSON.parse(raw) + const web = config?.Web + if (web) { + for (const host of Object.values(web) as any[]) { + if ((host as any)?.Handlers?.['/gw']) return true + } + } + } catch { + // tailscale CLI not available or not running — fall through + } + + // 2. Legacy: check openclaw.json config + const configPath = process.env.OPENCLAW_CONFIG_PATH || '' + if (!configPath) return false + try { + const raw = readFileSync(configPath, 'utf-8') + const config = JSON.parse(raw) + return config?.gateway?.tailscale?.mode === 'serve' + } catch { + return false + } +} + +/** Cache Tailscale Serve detection with 60-second TTL. */ +let _tailscaleServeCache: { value: boolean; expiresAt: number } | null = null +const TAILSCALE_CACHE_TTL_MS = 60_000 +function isTailscaleServe(): boolean { + const now = Date.now() + if (!_tailscaleServeCache || now > _tailscaleServeCache.expiresAt) { + _tailscaleServeCache = { value: detectTailscaleServe(), expiresAt: now + TAILSCALE_CACHE_TTL_MS } + } + return _tailscaleServeCache.value +} + +/** Extract the browser-facing hostname from the request. */ +function getBrowserHostname(request: NextRequest): string { + const origin = request.headers.get('origin') || request.headers.get('referer') || '' + if (origin) { + try { return new URL(origin).hostname } catch { /* ignore */ } + } + const hostHeader = request.headers.get('host') || '' + return hostHeader.split(':')[0] +} + +/** + * When the gateway is on localhost but the browser is remote, resolve the + * correct WebSocket URL the browser should use. + * + * - Tailscale Serve mode: `wss:///gw` (Tailscale proxies /gw to localhost gateway) + * - Otherwise: rewrite host to dashboard hostname with the gateway port + */ +function resolveRemoteGatewayUrl( + gateway: { host: string; port: number }, + request: NextRequest, +): string | null { + const normalized = (gateway.host || '').toLowerCase().trim() + if (!LOCALHOST_HOSTS.has(normalized)) return null // remote host — use normal path + + const browserHost = getBrowserHostname(request) + if (!browserHost || LOCALHOST_HOSTS.has(browserHost.toLowerCase())) return null // local access + + // Browser is remote — determine the correct proxied URL + if (isTailscaleServe()) { + // Tailscale Serve proxies /gw → localhost:18789 with TLS + return `wss://${browserHost}/gw` + } + + // No Tailscale Serve — try direct connection to dashboard host on gateway port + const protocol = inferBrowserProtocol(request) === 'https:' ? 'wss' : 'ws' + return `${protocol}://${browserHost}:${gateway.port}` } function ensureTable(db: ReturnType) { @@ -35,7 +148,10 @@ function ensureTable(db: ReturnType) { * Resolves websocket URL and token for a selected gateway without exposing tokens in list payloads. */ export async function POST(request: NextRequest) { - const auth = requireRole(request, 'operator') + // Any authenticated dashboard user may initiate a gateway websocket connect. + // Restricting this to operator can cause startup fallback to connect without auth, + // which then fails as "device identity required". + const auth = requireRole(request, 'viewer') if ('error' in auth) return NextResponse.json({ error: auth.error }, { status: auth.status }) const db = getDatabase() @@ -53,19 +169,32 @@ export async function POST(request: NextRequest) { return NextResponse.json({ error: 'id is required' }, { status: 400 }) } - const gateway = db.prepare('SELECT id, host, port, token FROM gateways WHERE id = ?').get(id) as GatewayEntry | undefined + const gateway = db.prepare('SELECT id, host, port, token, is_primary FROM gateways WHERE id = ?').get(id) as GatewayEntry | undefined if (!gateway) { return NextResponse.json({ error: 'Gateway not found' }, { status: 404 }) } - const ws_url = buildGatewayWebSocketUrl({ + // When gateway host is localhost but the browser is remote (e.g. Tailscale), + // resolve the correct browser-accessible WebSocket URL. + const remoteUrl = resolveRemoteGatewayUrl(gateway, request) + const ws_url = remoteUrl || buildGatewayWebSocketUrl({ host: gateway.host, port: gateway.port, - browserProtocol: request.nextUrl.protocol, + browserProtocol: inferBrowserProtocol(request), }) - const envToken = (process.env.NEXT_PUBLIC_GATEWAY_TOKEN || process.env.NEXT_PUBLIC_WS_TOKEN || '').trim() - const token = (gateway.token || '').trim() || envToken + const dbToken = (gateway.token || '').trim() + const detectedToken = gateway.is_primary === 1 ? getDetectedGatewayToken() : '' + const token = detectedToken || dbToken + + // Keep runtime DB aligned with detected OpenClaw gateway token for primary gateway. + if (gateway.is_primary === 1 && detectedToken && detectedToken !== dbToken) { + try { + db.prepare('UPDATE gateways SET token = ?, updated_at = (unixepoch()) WHERE id = ?').run(detectedToken, gateway.id) + } catch { + // Non-fatal: connect still succeeds with detected token even if persistence fails. + } + } return NextResponse.json({ id: gateway.id, diff --git a/src/app/api/gateways/discover/route.ts b/src/app/api/gateways/discover/route.ts new file mode 100644 index 0000000..eafc24c --- /dev/null +++ b/src/app/api/gateways/discover/route.ts @@ -0,0 +1,98 @@ +import { NextRequest, NextResponse } from 'next/server' +import { readFileSync } from 'node:fs' +import { execFileSync } from 'node:child_process' +import { requireRole } from '@/lib/auth' + +interface DiscoveredGateway { + user: string + port: number + active: boolean + description: string +} + +/** + * GET /api/gateways/discover + * Discovers OpenClaw gateways via systemd services and port scanning. + * Does not require filesystem access to other users' configs. + */ +export async function GET(request: NextRequest) { + const auth = requireRole(request, 'viewer') + if ('error' in auth) return NextResponse.json({ error: auth.error }, { status: auth.status }) + + const discovered: DiscoveredGateway[] = [] + + // Parse systemd services for openclaw-gateway instances + try { + const output = execFileSync('systemctl', [ + 'list-units', '--type=service', '--plain', '--no-legend', '--no-pager', + ], { encoding: 'utf-8', timeout: 3000 }) + + const gwLines = output.split('\n').filter(l => l.includes('openclaw') && l.includes('gateway')) + + for (const line of gwLines) { + // e.g. "openclaw-gateway@quant.service loaded active running OpenClaw Gateway (quant)" + const parts = line.trim().split(/\s+/) + const serviceName = parts[0] || '' + const state = parts[2] || '' // active/inactive + const description = parts.slice(4).join(' ') // "OpenClaw Gateway (quant)" + + // Extract user from service name + let user = '' + const templateMatch = serviceName.match(/openclaw-gateway@(\w+)\.service/) + if (templateMatch) { + user = templateMatch[1] + } else { + // Custom service name like "openclaw-leads-gateway.service" + const customMatch = serviceName.match(/openclaw-(\w+)-gateway\.service/) + if (customMatch) user = customMatch[1] + } + if (!user) continue + + // Find the port by checking what openclaw-gateway processes are listening on + let port = 0 + try { + const configPath = `/home/${user}/.openclaw/openclaw.json` + const raw = readFileSync(configPath, 'utf-8') + const config = JSON.parse(raw) + if (typeof config?.gateway?.port === 'number') port = config.gateway.port + } catch { + // Can't read config — try to detect from ss output + } + + // If we couldn't read config, try finding port via ss for the service PID + if (!port) { + try { + const pidOutput = execFileSync('systemctl', [ + 'show', serviceName, '--property=ExecMainPID', '--value', + ], { encoding: 'utf-8', timeout: 2000 }).trim() + const pid = parseInt(pidOutput, 10) + if (pid > 0) { + const ssOutput = execFileSync('ss', ['-ltnp'], { + encoding: 'utf-8', timeout: 2000, + }) + const pidPattern = `pid=${pid},` + for (const ssLine of ssOutput.split('\n')) { + if (ssLine.includes(pidPattern)) { + const portMatch = ssLine.match(/:(\d+)\s/) + if (portMatch) { port = parseInt(portMatch[1], 10); break } + } + } + } + } catch { /* ignore */ } + } + + if (!port) continue + + discovered.push({ + user, + port, + active: state === 'active', + description: description.replace(/[()]/g, '').trim(), + }) + } + } catch { + // systemctl not available or failed — fall back silently + } + + return NextResponse.json({ gateways: discovered }) +} diff --git a/src/app/api/gateways/health/route.ts b/src/app/api/gateways/health/route.ts index 1025b36..6afaeca 100644 --- a/src/app/api/gateways/health/route.ts +++ b/src/app/api/gateways/health/route.ts @@ -46,20 +46,95 @@ function hasOpenClaw32ToolsProfileRisk(version: string | null): boolean { return minor >= 2 } -function isBlockedUrl(urlStr: string): boolean { +/** Check whether an IPv4 address falls within a CIDR block. */ +function ipv4InCidr(ip: string, cidr: string): boolean { + const [base, bits] = cidr.split('/') + const mask = ~((1 << (32 - Number(bits))) - 1) >>> 0 + const ipNum = ipv4ToNum(ip) + const baseNum = ipv4ToNum(base) + if (ipNum === null || baseNum === null) return false + return (ipNum & mask) === (baseNum & mask) +} + +function ipv4ToNum(ip: string): number | null { + const parts = ip.split('.') + if (parts.length !== 4) return null + let num = 0 + for (const p of parts) { + const n = Number(p) + if (!Number.isFinite(n) || n < 0 || n > 255) return null + num = (num << 8) | n + } + return num >>> 0 +} + +const BLOCKED_PRIVATE_CIDRS = [ + '10.0.0.0/8', + '172.16.0.0/12', + '192.168.0.0/16', + '169.254.0.0/16', + '127.0.0.0/8', +] + +const BLOCKED_HOSTNAMES = new Set([ + 'metadata.google.internal', + 'metadata.internal', + 'instance-data', +]) + +function isBlockedUrl(urlStr: string, userConfiguredHosts: Set): boolean { try { const url = new URL(urlStr) const hostname = url.hostname - // Block link-local / cloud metadata endpoints - if (hostname.startsWith('169.254.')) return true + + // Allow user-configured gateway hosts (operators intentionally target their own infra) + if (userConfiguredHosts.has(hostname)) return false + // Block well-known cloud metadata hostnames - if (hostname === 'metadata.google.internal') return true + if (BLOCKED_HOSTNAMES.has(hostname)) return true + + // Block private/reserved IPv4 ranges + if (/^\d{1,3}(\.\d{1,3}){3}$/.test(hostname)) { + for (const cidr of BLOCKED_PRIVATE_CIDRS) { + if (ipv4InCidr(hostname, cidr)) return true + } + } + return false } catch { return true // Block malformed URLs } } +function buildGatewayProbeUrl(host: string, port: number): string | null { + const rawHost = String(host || '').trim() + if (!rawHost) return null + + const hasProtocol = + rawHost.startsWith('ws://') || + rawHost.startsWith('wss://') || + rawHost.startsWith('http://') || + rawHost.startsWith('https://') + + if (hasProtocol) { + try { + const parsed = new URL(rawHost) + if (parsed.protocol === 'ws:') parsed.protocol = 'http:' + if (parsed.protocol === 'wss:') parsed.protocol = 'https:' + if (!parsed.port && Number.isFinite(port) && port > 0) { + parsed.port = String(port) + } + if (!parsed.pathname) parsed.pathname = '/' + return parsed.toString() + } catch { + return null + } + } + + if (!Number.isFinite(port) || port <= 0) return null + return `http://${rawHost}:${port}/` +} + /** * POST /api/gateways/health - Server-side health probe for all gateways * Probes gateways from the server where loopback addresses are reachable. @@ -71,6 +146,15 @@ export async function POST(request: NextRequest) { const db = getDatabase() const gateways = db.prepare("SELECT * FROM gateways ORDER BY is_primary DESC, name ASC").all() as GatewayEntry[] + // Build set of user-configured gateway hosts so the SSRF filter allows them + const configuredHosts = new Set() + for (const gw of gateways) { + const h = (gw.host || '').trim() + if (h) { + try { configuredHosts.add(new URL(h.includes('://') ? h : `http://${h}`).hostname) } catch { configuredHosts.add(h) } + } + } + // Prepare update statements once (avoids N+1) const updateOnlineStmt = db.prepare( "UPDATE gateways SET status = ?, latency = ?, last_seen = (unixepoch()), updated_at = (unixepoch()) WHERE id = ?" @@ -82,9 +166,13 @@ export async function POST(request: NextRequest) { const results: HealthResult[] = [] for (const gw of gateways) { - const probeUrl = "http://" + gw.host + ":" + gw.port + "/" + const probeUrl = buildGatewayProbeUrl(gw.host, gw.port) + if (!probeUrl) { + results.push({ id: gw.id, name: gw.name, status: 'error', latency: null, agents: [], sessions_count: 0, error: 'Invalid gateway address' }) + continue + } - if (isBlockedUrl(probeUrl)) { + if (isBlockedUrl(probeUrl, configuredHosts)) { results.push({ id: gw.id, name: gw.name, status: 'error', latency: null, agents: [], sessions_count: 0, error: 'Blocked URL' }) continue } @@ -106,8 +194,6 @@ export async function POST(request: NextRequest) { ? 'OpenClaw 2026.3.2+ defaults tools.profile=messaging; Mission Control should enforce coding profile when spawning.' : undefined - updateOnlineStmt.run(status, latency, gw.id) - results.push({ id: gw.id, name: gw.name, @@ -119,8 +205,6 @@ export async function POST(request: NextRequest) { compatibility_warning: compatibilityWarning, }) } catch (err: any) { - updateOfflineStmt.run("offline", gw.id) - results.push({ id: gw.id, name: gw.name, @@ -133,5 +217,16 @@ export async function POST(request: NextRequest) { } } + // Persist all probe results in a single transaction + db.transaction(() => { + for (const r of results) { + if (r.status === 'online' || r.status === 'error') { + updateOnlineStmt.run(r.status, r.latency, r.id) + } else { + updateOfflineStmt.run(r.status, r.id) + } + } + })() + return NextResponse.json({ results, probed_at: Date.now() }) } diff --git a/src/app/api/gateways/route.ts b/src/app/api/gateways/route.ts index fb4d4c7..b2d7cd3 100644 --- a/src/app/api/gateways/route.ts +++ b/src/app/api/gateways/route.ts @@ -1,6 +1,7 @@ import { NextRequest, NextResponse } from 'next/server' import { requireRole } from '@/lib/auth' import { getDatabase } from '@/lib/db' +import { getDetectedGatewayPort, getDetectedGatewayToken } from '@/lib/gateway-runtime' interface GatewayEntry { id: number @@ -54,11 +55,8 @@ export async function GET(request: NextRequest) { if (gateways.length === 0) { const name = String(process.env.MC_DEFAULT_GATEWAY_NAME || 'primary') const host = String(process.env.OPENCLAW_GATEWAY_HOST || '127.0.0.1') - const mainPort = parseInt(process.env.OPENCLAW_GATEWAY_PORT || process.env.GATEWAY_PORT || process.env.NEXT_PUBLIC_GATEWAY_PORT || '18789') - const mainToken = - process.env.OPENCLAW_GATEWAY_TOKEN || - process.env.GATEWAY_TOKEN || - '' + const mainPort = getDetectedGatewayPort() || parseInt(process.env.NEXT_PUBLIC_GATEWAY_PORT || '18789') + const mainToken = getDetectedGatewayToken() db.prepare(` INSERT INTO gateways (name, host, port, token, is_primary) VALUES (?, ?, ?, ?, 1) diff --git a/src/app/api/github/route.ts b/src/app/api/github/route.ts index 8f6bbea..c73eb0a 100644 --- a/src/app/api/github/route.ts +++ b/src/app/api/github/route.ts @@ -14,6 +14,7 @@ import { updateIssueState, type GitHubIssue, } from '@/lib/github' +import { initializeLabels, pullFromGitHub } from '@/lib/github-sync-engine' /** * GET /api/github?action=issues&repo=owner/repo&state=open&labels=bug @@ -83,6 +84,10 @@ export async function POST(request: NextRequest) { return await handleClose(body, auth.user.username, auth.user.workspace_id ?? 1) case 'status': return handleStatus(auth.user.workspace_id ?? 1) + case 'init-labels': + return await handleInitLabels(body, auth.user.workspace_id ?? 1) + case 'sync-project': + return await handleSyncProject(body, auth.user.username, auth.user.workspace_id ?? 1) default: return NextResponse.json({ error: 'Unknown action' }, { status: 400 }) } @@ -417,6 +422,67 @@ async function handleGitHubStats() { }) } +// ── Init Labels: create MC labels on repo ──────────────────────── + +async function handleInitLabels( + body: { repo?: string }, + workspaceId: number +) { + const repo = body.repo || process.env.GITHUB_DEFAULT_REPO + if (!repo) { + return NextResponse.json({ error: 'repo is required' }, { status: 400 }) + } + + await initializeLabels(repo) + + // Mark project labels as initialized + const db = getDatabase() + db.prepare(` + UPDATE projects + SET github_labels_initialized = 1, updated_at = unixepoch() + WHERE github_repo = ? AND workspace_id = ? + `).run(repo, workspaceId) + + return NextResponse.json({ ok: true, repo }) +} + +// ── Sync Project: pull from GitHub for a project ───────────────── + +async function handleSyncProject( + body: { project_id?: number }, + actor: string, + workspaceId: number +) { + if (typeof body.project_id !== 'number') { + return NextResponse.json({ error: 'project_id is required' }, { status: 400 }) + } + + const db = getDatabase() + const project = db.prepare(` + SELECT id, github_repo, github_sync_enabled, github_default_branch + FROM projects + WHERE id = ? AND workspace_id = ? AND status = 'active' + `).get(body.project_id, workspaceId) as any | undefined + + if (!project) { + return NextResponse.json({ error: 'Project not found' }, { status: 404 }) + } + if (!project.github_repo || !project.github_sync_enabled) { + return NextResponse.json({ error: 'GitHub sync not enabled for this project' }, { status: 400 }) + } + + const result = await pullFromGitHub(project, workspaceId) + + db_helpers.logActivity( + 'github_sync', 'project', project.id, actor, + `Manual sync: pulled ${result.pulled}, pushed ${result.pushed}`, + { repo: project.github_repo, ...result }, + workspaceId + ) + + return NextResponse.json({ ok: true, ...result }) +} + // ── Priority mapping helper ───────────────────────────────────── function mapPriority(labels: string[]): 'critical' | 'high' | 'medium' | 'low' { diff --git a/src/app/api/github/sync/route.ts b/src/app/api/github/sync/route.ts new file mode 100644 index 0000000..4545fee --- /dev/null +++ b/src/app/api/github/sync/route.ts @@ -0,0 +1,109 @@ +import { NextRequest, NextResponse } from 'next/server' +import { getDatabase } from '@/lib/db' +import { requireRole } from '@/lib/auth' +import { logger } from '@/lib/logger' +import { pullFromGitHub } from '@/lib/github-sync-engine' +import { getSyncPollerStatus } from '@/lib/github-sync-poller' + +/** + * GET /api/github/sync — sync status for all GitHub-linked projects. + */ +export async function GET(request: NextRequest) { + const auth = requireRole(request, 'operator') + if ('error' in auth) return NextResponse.json({ error: auth.error }, { status: auth.status }) + + try { + const db = getDatabase() + const workspaceId = auth.user.workspace_id ?? 1 + + const syncs = db.prepare(` + SELECT + gs.project_id, + p.name as project_name, + p.github_repo, + MAX(gs.last_synced_at) as last_synced_at, + SUM(gs.changes_pushed) as total_pushed, + SUM(gs.changes_pulled) as total_pulled, + COUNT(*) as sync_count + FROM github_syncs gs + LEFT JOIN projects p ON p.id = gs.project_id AND p.workspace_id = gs.workspace_id + WHERE gs.workspace_id = ? AND gs.project_id IS NOT NULL + GROUP BY gs.project_id + ORDER BY last_synced_at DESC + `).all(workspaceId) + + const poller = getSyncPollerStatus() + + return NextResponse.json({ syncs, poller }) + } catch (error) { + logger.error({ err: error }, 'GET /api/github/sync error') + return NextResponse.json({ error: 'Failed to fetch sync status' }, { status: 500 }) + } +} + +/** + * POST /api/github/sync — trigger sync manually. + * Body: { action: 'trigger', project_id: number } or { action: 'trigger-all' } + */ +export async function POST(request: NextRequest) { + const auth = requireRole(request, 'operator') + if ('error' in auth) return NextResponse.json({ error: auth.error }, { status: auth.status }) + + try { + const body = await request.json() + const { action, project_id } = body + const db = getDatabase() + const workspaceId = auth.user.workspace_id ?? 1 + + if (action === 'trigger' && typeof project_id === 'number') { + const project = db.prepare(` + SELECT id, github_repo, github_sync_enabled, github_default_branch + FROM projects + WHERE id = ? AND workspace_id = ? AND status = 'active' + `).get(project_id, workspaceId) as any | undefined + + if (!project) { + return NextResponse.json({ error: 'Project not found' }, { status: 404 }) + } + if (!project.github_repo || !project.github_sync_enabled) { + return NextResponse.json({ error: 'GitHub sync not enabled for this project' }, { status: 400 }) + } + + const result = await pullFromGitHub(project, workspaceId) + return NextResponse.json({ ok: true, ...result }) + } + + if (action === 'trigger-all') { + const projects = db.prepare(` + SELECT id, github_repo, github_sync_enabled, github_default_branch + FROM projects + WHERE github_sync_enabled = 1 AND github_repo IS NOT NULL AND workspace_id = ? AND status = 'active' + `).all(workspaceId) as any[] + + let totalPulled = 0 + let totalPushed = 0 + + for (const project of projects) { + try { + const result = await pullFromGitHub(project, workspaceId) + totalPulled += result.pulled + totalPushed += result.pushed + } catch (err) { + logger.error({ err, projectId: project.id }, 'Trigger-all: project sync failed') + } + } + + return NextResponse.json({ + ok: true, + projects_synced: projects.length, + pulled: totalPulled, + pushed: totalPushed, + }) + } + + return NextResponse.json({ error: 'Unknown action. Use trigger or trigger-all' }, { status: 400 }) + } catch (error) { + logger.error({ err: error }, 'POST /api/github/sync error') + return NextResponse.json({ error: 'Sync trigger failed' }, { status: 500 }) + } +} diff --git a/src/app/api/hermes/memory/route.ts b/src/app/api/hermes/memory/route.ts new file mode 100644 index 0000000..fe64178 --- /dev/null +++ b/src/app/api/hermes/memory/route.ts @@ -0,0 +1,16 @@ +import { NextRequest, NextResponse } from 'next/server' +import { requireRole } from '@/lib/auth' +import { getHermesMemory } from '@/lib/hermes-memory' + +/** + * GET /api/hermes/memory — Returns Hermes memory file contents + * Read-only bridge: MC reads from ~/.hermes/memories/ + */ +export async function GET(request: NextRequest) { + const auth = requireRole(request, 'viewer') + if ('error' in auth) return NextResponse.json({ error: auth.error }, { status: auth.status }) + + const result = getHermesMemory() + + return NextResponse.json(result) +} diff --git a/src/app/api/hermes/route.ts b/src/app/api/hermes/route.ts new file mode 100644 index 0000000..5fbb26d --- /dev/null +++ b/src/app/api/hermes/route.ts @@ -0,0 +1,182 @@ +import { NextRequest, NextResponse } from 'next/server' +import { existsSync, mkdirSync, writeFileSync, rmSync } from 'node:fs' +import { join } from 'node:path' +import { requireRole } from '@/lib/auth' +import { config } from '@/lib/config' +import { isHermesInstalled, isHermesGatewayRunning, scanHermesSessions } from '@/lib/hermes-sessions' +import { getHermesTasks } from '@/lib/hermes-tasks' +import { getHermesMemory } from '@/lib/hermes-memory' +import { logger } from '@/lib/logger' + +const HERMES_HOME = join(config.homeDir, '.hermes') +const HOOK_DIR = join(HERMES_HOME, 'hooks', 'mission-control') + +export async function GET(request: NextRequest) { + const auth = requireRole(request, 'viewer') + if ('error' in auth) return NextResponse.json({ error: auth.error }, { status: auth.status }) + + try { + const installed = isHermesInstalled() + const gatewayRunning = installed ? isHermesGatewayRunning() : false + const hookInstalled = existsSync(join(HOOK_DIR, 'HOOK.yaml')) + const activeSessions = installed ? scanHermesSessions(50).filter(s => s.isActive).length : 0 + + const cronJobCount = installed ? getHermesTasks().cronJobs.length : 0 + const memoryEntries = installed ? getHermesMemory().agentMemoryEntries : 0 + + return NextResponse.json({ + installed, + gatewayRunning, + hookInstalled, + activeSessions, + cronJobCount, + memoryEntries, + hookDir: HOOK_DIR, + }) + } catch (err) { + logger.error({ err }, 'Hermes status check failed') + return NextResponse.json({ error: 'Failed to check hermes status' }, { status: 500 }) + } +} + +export async function POST(request: NextRequest) { + const auth = requireRole(request, 'admin') + if ('error' in auth) return NextResponse.json({ error: auth.error }, { status: auth.status }) + + try { + const body = await request.json() + const { action } = body + + if (action === 'install-hook') { + if (!isHermesInstalled()) { + return NextResponse.json({ error: 'Hermes is not installed (~/.hermes/ not found)' }, { status: 400 }) + } + + mkdirSync(HOOK_DIR, { recursive: true }) + + // Write HOOK.yaml + writeFileSync(join(HOOK_DIR, 'HOOK.yaml'), HOOK_YAML, 'utf8') + + // Write handler.py + writeFileSync(join(HOOK_DIR, 'handler.py'), HANDLER_PY, 'utf8') + + logger.info('Installed Mission Control hook for Hermes Agent') + return NextResponse.json({ success: true, message: 'Hook installed', hookDir: HOOK_DIR }) + } + + if (action === 'uninstall-hook') { + if (existsSync(HOOK_DIR)) { + rmSync(HOOK_DIR, { recursive: true, force: true }) + } + + logger.info('Uninstalled Mission Control hook for Hermes Agent') + return NextResponse.json({ success: true, message: 'Hook uninstalled' }) + } + + return NextResponse.json({ error: 'Invalid action. Must be: install-hook, uninstall-hook' }, { status: 400 }) + } catch (err: any) { + logger.error({ err }, 'Hermes hook management failed') + return NextResponse.json({ error: err.message || 'Hook operation failed' }, { status: 500 }) + } +} + +// --------------------------------------------------------------------------- +// Hook file contents +// --------------------------------------------------------------------------- + +const HOOK_YAML = `name: mission-control +description: Reports agent telemetry to Mission Control +version: "1.0" +events: + - agent:start + - agent:end + - session:start +` + +const HANDLER_PY = `""" +Mission Control hook for Hermes Agent. +Reports session telemetry to the MC /api/sessions endpoint. + +Configuration (via ~/.hermes/.env or environment): + MC_URL - Mission Control base URL (default: http://localhost:3000) + MC_API_KEY - API key for authentication (optional) +""" + +import os +import logging +from datetime import datetime, timezone + +logger = logging.getLogger("hooks.mission-control") + +MC_URL = os.environ.get("MC_URL", "http://localhost:3000") +MC_API_KEY = os.environ.get("MC_API_KEY", "") + + +def _headers(): + h = {"Content-Type": "application/json"} + if MC_API_KEY: + h["X-Api-Key"] = MC_API_KEY + return h + + +async def handle_event(event_name: str, payload: dict) -> None: + """ + Called by the Hermes hook registry on matching events. + Fire-and-forget with a short timeout — never blocks the agent. + """ + try: + import httpx + except ImportError: + logger.debug("httpx not available, skipping MC telemetry") + return + + try: + if event_name == "agent:start": + await _report_agent_start(payload) + elif event_name == "agent:end": + await _report_agent_end(payload) + elif event_name == "session:start": + await _report_session_start(payload) + except Exception as exc: + logger.debug("MC hook error (%s): %s", event_name, exc) + + +async def _report_agent_start(payload: dict) -> None: + import httpx + + data = { + "name": payload.get("agent_name", "hermes"), + "role": "Hermes Agent", + "status": "active", + "source": "hermes-hook", + } + async with httpx.AsyncClient(timeout=2.0) as client: + await client.post(f"{MC_URL}/api/agents", json=data, headers=_headers()) + + +async def _report_agent_end(payload: dict) -> None: + import httpx + + data = { + "name": payload.get("agent_name", "hermes"), + "status": "idle", + "source": "hermes-hook", + } + async with httpx.AsyncClient(timeout=2.0) as client: + await client.post(f"{MC_URL}/api/agents", json=data, headers=_headers()) + + +async def _report_session_start(payload: dict) -> None: + import httpx + + data = { + "event": "session:start", + "session_id": payload.get("session_id", ""), + "source": payload.get("source", "cli"), + "timestamp": datetime.now(timezone.utc).isoformat(), + } + async with httpx.AsyncClient(timeout=2.0) as client: + await client.post(f"{MC_URL}/api/hermes/events", json=data, headers=_headers()) +` + +export const dynamic = 'force-dynamic' diff --git a/src/app/api/hermes/tasks/route.ts b/src/app/api/hermes/tasks/route.ts new file mode 100644 index 0000000..662bbad --- /dev/null +++ b/src/app/api/hermes/tasks/route.ts @@ -0,0 +1,17 @@ +import { NextRequest, NextResponse } from 'next/server' +import { requireRole } from '@/lib/auth' +import { getHermesTasks } from '@/lib/hermes-tasks' + +/** + * GET /api/hermes/tasks — Returns Hermes cron jobs + * Read-only bridge: MC reads from ~/.hermes/cron/ + */ +export async function GET(request: NextRequest) { + const auth = requireRole(request, 'viewer') + if ('error' in auth) return NextResponse.json({ error: auth.error }, { status: auth.status }) + + const force = request.nextUrl.searchParams.get('force') === 'true' + const result = getHermesTasks(force) + + return NextResponse.json(result) +} diff --git a/src/app/api/index/route.ts b/src/app/api/index/route.ts new file mode 100644 index 0000000..496ff7a --- /dev/null +++ b/src/app/api/index/route.ts @@ -0,0 +1,183 @@ +import { NextResponse } from 'next/server' + +const VERSION = '1.3.0' +export const revalidate = 300 + +interface Endpoint { + path: string + methods: string[] + description: string + tag: string + auth: string +} + +const endpoints: Endpoint[] = [ + // ── Tasks ───────────────────────────────────────── + { path: '/api/tasks', methods: ['GET', 'POST'], description: 'Task CRUD — list, create', tag: 'Tasks', auth: 'viewer/operator' }, + { path: '/api/tasks/:id', methods: ['GET', 'PATCH', 'DELETE'], description: 'Task detail — read, update, delete', tag: 'Tasks', auth: 'viewer/operator/admin' }, + { path: '/api/tasks/:id/comments', methods: ['GET', 'POST'], description: 'Task comments — list, add', tag: 'Tasks', auth: 'viewer/operator' }, + { path: '/api/tasks/:id/broadcast', methods: ['POST'], description: 'Broadcast task update via SSE', tag: 'Tasks', auth: 'operator' }, + { path: '/api/tasks/queue', methods: ['GET'], description: 'Task queue — next assignable tasks', tag: 'Tasks', auth: 'viewer' }, + { path: '/api/tasks/outcomes', methods: ['GET'], description: 'Task outcome analytics', tag: 'Tasks', auth: 'viewer' }, + { path: '/api/tasks/regression', methods: ['GET'], description: 'Task regression detection', tag: 'Tasks', auth: 'viewer' }, + + // ── Projects ────────────────────────────────────── + { path: '/api/workspaces', methods: ['GET'], description: 'Tenant-scoped workspace listing', tag: 'Projects', auth: 'viewer' }, + { path: '/api/projects', methods: ['GET', 'POST'], description: 'Project CRUD — list, create', tag: 'Projects', auth: 'viewer/operator' }, + { path: '/api/projects/:id', methods: ['GET', 'PATCH', 'DELETE'], description: 'Project detail — read, update, archive/delete', tag: 'Projects', auth: 'viewer/operator/admin' }, + { path: '/api/projects/:id/tasks', methods: ['GET'], description: 'Tasks scoped to project', tag: 'Projects', auth: 'viewer' }, + { path: '/api/projects/:id/agents', methods: ['GET', 'POST', 'DELETE'], description: 'Project agent assignments — list, assign, unassign', tag: 'Projects', auth: 'viewer/operator' }, + + // ── Agents ──────────────────────────────────────── + { path: '/api/agents', methods: ['GET', 'POST'], description: 'Agent CRUD — list, register', tag: 'Agents', auth: 'viewer/operator' }, + { path: '/api/agents/:id', methods: ['GET', 'PATCH', 'DELETE'], description: 'Agent detail — read, update, delete', tag: 'Agents', auth: 'viewer/operator/admin' }, + { path: '/api/agents/:id/heartbeat', methods: ['POST'], description: 'Agent heartbeat ping', tag: 'Agents', auth: 'operator' }, + { path: '/api/agents/:id/wake', methods: ['POST'], description: 'Wake idle agent', tag: 'Agents', auth: 'operator' }, + { path: '/api/agents/:id/soul', methods: ['GET', 'PUT'], description: 'Agent soul file — read, write', tag: 'Agents', auth: 'viewer/operator' }, + { path: '/api/agents/:id/memory', methods: ['GET'], description: 'Agent memory files', tag: 'Agents', auth: 'viewer' }, + { path: '/api/agents/:id/files', methods: ['GET'], description: 'Agent workspace files', tag: 'Agents', auth: 'viewer' }, + { path: '/api/agents/:id/diagnostics', methods: ['GET'], description: 'Agent diagnostics', tag: 'Agents', auth: 'viewer' }, + { path: '/api/agents/:id/attribution', methods: ['GET'], description: 'Agent token usage attribution', tag: 'Agents', auth: 'viewer' }, + { path: '/api/agents/sync', methods: ['POST'], description: 'Sync agents from gateway sessions', tag: 'Agents', auth: 'operator' }, + { path: '/api/agents/comms', methods: ['GET'], description: 'Agent communication feed', tag: 'Agents', auth: 'viewer' }, + { path: '/api/agents/message', methods: ['POST'], description: 'Send message to agent', tag: 'Agents', auth: 'operator' }, + + // ── Chat ────────────────────────────────────────── + { path: '/api/chat/messages', methods: ['GET', 'POST'], description: 'Chat messages — list, send', tag: 'Chat', auth: 'viewer/operator' }, + { path: '/api/chat/messages/:id', methods: ['PATCH'], description: 'Mark chat message read', tag: 'Chat', auth: 'operator' }, + { path: '/api/chat/conversations', methods: ['GET'], description: 'List conversations', tag: 'Chat', auth: 'viewer' }, + { path: '/api/chat/session-prefs', methods: ['GET', 'PATCH'], description: 'Local session chat preferences (rename + color)', tag: 'Chat', auth: 'viewer/operator' }, + + // ── Sessions ────────────────────────────────────── + { path: '/api/sessions', methods: ['GET'], description: 'List gateway sessions', tag: 'Sessions', auth: 'viewer' }, + { path: '/api/sessions/:id/control', methods: ['POST'], description: 'Session control (stop, message)', tag: 'Sessions', auth: 'operator' }, + { path: '/api/sessions/continue', methods: ['POST'], description: 'Continue a local Claude/Codex session with a prompt', tag: 'Sessions', auth: 'operator' }, + { path: '/api/sessions/transcript', methods: ['GET'], description: 'Read local Claude/Codex session transcript snippets', tag: 'Sessions', auth: 'viewer' }, + { path: '/api/claude/sessions', methods: ['GET'], description: 'Claude CLI session scanner', tag: 'Sessions', auth: 'viewer' }, + + // ── Activities & Notifications ──────────────────── + { path: '/api/activities', methods: ['GET'], description: 'Activity feed', tag: 'Activities', auth: 'viewer' }, + { path: '/api/notifications', methods: ['GET', 'PATCH'], description: 'Notifications — list, mark read', tag: 'Notifications', auth: 'viewer/operator' }, + { path: '/api/notifications/deliver', methods: ['POST'], description: 'Deliver notification', tag: 'Notifications', auth: 'operator' }, + + // ── Quality & Standup ───────────────────────────── + { path: '/api/quality-review', methods: ['GET', 'POST'], description: 'Quality review gate', tag: 'Quality', auth: 'viewer/operator' }, + { path: '/api/standup', methods: ['GET', 'POST'], description: 'Daily standup reports', tag: 'Standup', auth: 'viewer/operator' }, + + // ── Workflows & Pipelines ───────────────────────── + { path: '/api/workflows', methods: ['GET', 'POST', 'PUT', 'DELETE'], description: 'Workflow templates CRUD', tag: 'Workflows', auth: 'viewer/operator' }, + { path: '/api/pipelines', methods: ['GET', 'POST', 'DELETE'], description: 'Pipeline CRUD', tag: 'Pipelines', auth: 'viewer/operator' }, + { path: '/api/pipelines/run', methods: ['POST'], description: 'Execute pipeline', tag: 'Pipelines', auth: 'operator' }, + + // ── Webhooks ────────────────────────────────────── + { path: '/api/webhooks', methods: ['GET', 'POST', 'PATCH', 'DELETE'], description: 'Webhook CRUD', tag: 'Webhooks', auth: 'viewer/operator' }, + { path: '/api/webhooks/deliveries', methods: ['GET'], description: 'Webhook delivery history', tag: 'Webhooks', auth: 'viewer' }, + { path: '/api/webhooks/retry', methods: ['POST'], description: 'Retry webhook delivery', tag: 'Webhooks', auth: 'operator' }, + { path: '/api/webhooks/test', methods: ['POST'], description: 'Send test webhook', tag: 'Webhooks', auth: 'operator' }, + { path: '/api/webhooks/verify-docs', methods: ['GET'], description: 'Webhook verification docs', tag: 'Webhooks', auth: 'public' }, + + // ── Alerts ──────────────────────────────────────── + { path: '/api/alerts', methods: ['GET', 'POST', 'PATCH', 'DELETE'], description: 'Alert rules CRUD', tag: 'Alerts', auth: 'viewer/operator' }, + + // ── Auth ────────────────────────────────────────── + { path: '/api/auth/login', methods: ['POST'], description: 'User login', tag: 'Auth', auth: 'public' }, + { path: '/api/auth/logout', methods: ['POST'], description: 'User logout', tag: 'Auth', auth: 'authenticated' }, + { path: '/api/auth/me', methods: ['GET'], description: 'Current user info', tag: 'Auth', auth: 'authenticated' }, + { path: '/api/auth/users', methods: ['GET', 'POST', 'PATCH', 'DELETE'], description: 'User management', tag: 'Auth', auth: 'admin' }, + { path: '/api/auth/google', methods: ['POST'], description: 'Google OAuth callback', tag: 'Auth', auth: 'public' }, + { path: '/api/auth/access-requests', methods: ['GET', 'PATCH'], description: 'Access request approvals', tag: 'Auth', auth: 'admin' }, + + // ── Tokens & Costs ──────────────────────────────── + { path: '/api/tokens', methods: ['GET', 'POST'], description: 'Token usage tracking', tag: 'Tokens', auth: 'viewer/operator' }, + + // ── Cron & Scheduler ────────────────────────────── + { path: '/api/cron', methods: ['GET', 'POST', 'PATCH', 'DELETE'], description: 'Cron job management', tag: 'Cron', auth: 'viewer/operator' }, + { path: '/api/scheduler', methods: ['POST'], description: 'Scheduler tick (internal)', tag: 'Cron', auth: 'operator' }, + + // ── Spawn ───────────────────────────────────────── + { path: '/api/spawn', methods: ['POST'], description: 'Spawn agent subprocess', tag: 'Spawn', auth: 'operator' }, + + // ── Memory ──────────────────────────────────────── + { path: '/api/memory', methods: ['GET', 'POST', 'PUT', 'DELETE'], description: 'Memory browser — list, read, write, delete', tag: 'Memory', auth: 'viewer/operator' }, + + // ── Search & Mentions ───────────────────────────── + { path: '/api/search', methods: ['GET'], description: 'Full-text search across entities', tag: 'Search', auth: 'viewer' }, + { path: '/api/mentions', methods: ['GET'], description: 'Autocomplete for @mentions', tag: 'Search', auth: 'viewer' }, + + // ── Logs ────────────────────────────────────────── + { path: '/api/logs', methods: ['GET'], description: 'Application logs', tag: 'Logs', auth: 'viewer' }, + + // ── Settings ────────────────────────────────────── + { path: '/api/settings', methods: ['GET', 'PATCH'], description: 'System settings', tag: 'Settings', auth: 'viewer/admin' }, + { path: '/api/integrations', methods: ['GET', 'PATCH'], description: 'Integration configuration', tag: 'Settings', auth: 'viewer/admin' }, + { path: '/api/skills', methods: ['GET', 'POST', 'PUT', 'DELETE'], description: 'Installed skills index and disk CRUD', tag: 'Settings', auth: 'viewer/operator' }, + + // ── Gateway ─────────────────────────────────────── + { path: '/api/gateways', methods: ['GET', 'POST', 'PATCH', 'DELETE'], description: 'Gateway management', tag: 'Gateway', auth: 'admin' }, + { path: '/api/gateways/connect', methods: ['POST'], description: 'Connect to gateway WebSocket', tag: 'Gateway', auth: 'operator' }, + { path: '/api/gateways/health', methods: ['GET'], description: 'Gateway health check', tag: 'Gateway', auth: 'viewer' }, + { path: '/api/gateway-config', methods: ['GET', 'PATCH'], description: 'Gateway configuration', tag: 'Gateway', auth: 'admin' }, + { path: '/api/connect', methods: ['POST'], description: 'WebSocket connection info', tag: 'Gateway', auth: 'operator' }, + + // ── GitHub ──────────────────────────────────────── + { path: '/api/github', methods: ['GET', 'POST'], description: 'GitHub issue sync', tag: 'GitHub', auth: 'viewer/operator' }, + + // ── Super Admin ─────────────────────────────────── + { path: '/api/super/tenants', methods: ['GET', 'POST', 'PATCH', 'DELETE'], description: 'Tenant management', tag: 'Super Admin', auth: 'admin' }, + { path: '/api/super/tenants/:id/decommission', methods: ['POST'], description: 'Decommission tenant', tag: 'Super Admin', auth: 'admin' }, + { path: '/api/super/provision-jobs', methods: ['GET', 'POST'], description: 'Provision job management', tag: 'Super Admin', auth: 'admin' }, + { path: '/api/super/provision-jobs/:id', methods: ['GET'], description: 'Provision job detail', tag: 'Super Admin', auth: 'admin' }, + { path: '/api/super/provision-jobs/:id/run', methods: ['POST'], description: 'Execute provision job', tag: 'Super Admin', auth: 'admin' }, + { path: '/api/super/os-users', methods: ['GET'], description: 'OS user listing', tag: 'Super Admin', auth: 'admin' }, + + // ── System ──────────────────────────────────────── + { path: '/api/status', methods: ['GET'], description: 'System status & capabilities', tag: 'System', auth: 'public' }, + { path: '/api/audit', methods: ['GET'], description: 'Audit trail', tag: 'System', auth: 'admin' }, + { path: '/api/backup', methods: ['POST'], description: 'Database backup', tag: 'System', auth: 'admin' }, + { path: '/api/cleanup', methods: ['POST'], description: 'Database cleanup', tag: 'System', auth: 'admin' }, + { path: '/api/export', methods: ['GET'], description: 'Data export', tag: 'System', auth: 'viewer' }, + { path: '/api/workload', methods: ['GET'], description: 'Agent workload stats', tag: 'System', auth: 'viewer' }, + { path: '/api/releases/check', methods: ['GET'], description: 'Check for updates', tag: 'System', auth: 'public' }, + { path: '/api/openclaw/version', methods: ['GET'], description: 'Installed OpenClaw version and latest release metadata', tag: 'System', auth: 'public' }, + { path: '/api/openclaw/update', methods: ['POST'], description: 'Update OpenClaw to the latest stable release', tag: 'System', auth: 'admin' }, + { path: '/api/openclaw/doctor', methods: ['GET', 'POST'], description: 'Inspect and fix OpenClaw configuration drift', tag: 'System', auth: 'admin' }, + + // ── Local ───────────────────────────────────────── + { path: '/api/local/flight-deck', methods: ['GET'], description: 'Local flight deck status', tag: 'Local', auth: 'viewer' }, + { path: '/api/local/agents-doc', methods: ['GET'], description: 'Local AGENTS.md discovery and content', tag: 'Local', auth: 'viewer' }, + { path: '/api/local/terminal', methods: ['POST'], description: 'Local terminal command', tag: 'Local', auth: 'admin' }, + + // ── Docs ────────────────────────────────────────── + { path: '/api/docs', methods: ['GET'], description: 'OpenAPI spec (JSON)', tag: 'Docs', auth: 'public' }, + { path: '/api/docs/tree', methods: ['GET'], description: 'Documentation tree', tag: 'Docs', auth: 'public' }, + { path: '/api/docs/content', methods: ['GET'], description: 'Documentation page content', tag: 'Docs', auth: 'public' }, + { path: '/api/docs/search', methods: ['GET'], description: 'Documentation search', tag: 'Docs', auth: 'public' }, + + // ── Discovery ───────────────────────────────────── + { path: '/api/index', methods: ['GET'], description: 'API endpoint catalog (this endpoint)', tag: 'Discovery', auth: 'public' }, +] + +const payload = { + version: VERSION, + generated_at: new Date().toISOString(), + total_endpoints: endpoints.length, + endpoints, + event_stream: { + path: '/api/events', + protocol: 'SSE', + description: 'Real-time server-sent events for tasks, agents, chat, and activity updates', + }, + docs: { + openapi: '/api/docs', + tree: '/api/docs/tree', + search: '/api/docs/search', + }, +} + +export async function GET() { + return NextResponse.json(payload, { + headers: { + 'Cache-Control': 'public, s-maxage=300, stale-while-revalidate=600', + }, + }) +} diff --git a/src/app/api/integrations/route.ts b/src/app/api/integrations/route.ts index 921d1c9..6f7713e 100644 --- a/src/app/api/integrations/route.ts +++ b/src/app/api/integrations/route.ts @@ -4,23 +4,42 @@ import { logAuditEvent } from '@/lib/db' import { config } from '@/lib/config' import { join } from 'path' import { readFile, writeFile, rename } from 'fs/promises' +import { existsSync } from 'fs' +import os from 'os' import { execFileSync } from 'child_process' import { validateBody, integrationActionSchema } from '@/lib/validation' import { mutationLimiter } from '@/lib/rate-limit' +import { detectProviderSubscriptions } from '@/lib/provider-subscriptions' +import { getPluginIntegrations, getPluginCategories } from '@/lib/plugins' +import type { PluginIntegrationDef } from '@/lib/plugins' // --------------------------------------------------------------------------- // Integration registry // --------------------------------------------------------------------------- +type BuiltinCategory = 'ai' | 'search' | 'social' | 'messaging' | 'devtools' | 'security' | 'infra' | 'productivity' | 'browser' + interface IntegrationDef { id: string name: string - category: 'ai' | 'search' | 'social' | 'messaging' | 'devtools' | 'security' | 'infra' + category: string envVars: string[] vaultItem?: string // 1Password item name testable?: boolean + recommendation?: string } +interface IntegrationProbeSnapshot { + opAvailable: boolean + xint: { installed: boolean; oauthConfigured: boolean; envConfigured: boolean } + ollamaInstalled: boolean + ollamaReachable: boolean + gwsInstalled: boolean +} + +let integrationProbeCache: { ts: number; value: IntegrationProbeSnapshot } | null = null +const INTEGRATION_PROBE_TTL_MS = 5000 + const INTEGRATIONS: IntegrationDef[] = [ // AI Providers { id: 'anthropic', name: 'Anthropic', category: 'ai', envVars: ['ANTHROPIC_API_KEY'], vaultItem: 'openclaw-anthropic-api-key', testable: true }, @@ -34,7 +53,13 @@ const INTEGRATIONS: IntegrationDef[] = [ { id: 'brave', name: 'Brave Search', category: 'search', envVars: ['BRAVE_API_KEY'], vaultItem: 'openclaw-brave-api-key' }, // Social - { id: 'x_twitter', name: 'X / Twitter', category: 'social', envVars: ['X_COOKIES_PATH'] }, + { + id: 'x_twitter', + name: 'X / Twitter', + category: 'social', + envVars: ['X_COOKIES_PATH'], + recommendation: 'Recommended: use xint CLI as default (`xint auth`) instead of manual cookies path.', + }, { id: 'linkedin', name: 'LinkedIn', category: 'social', envVars: ['LINKEDIN_ACCESS_TOKEN'] }, // Messaging — add entries here for each Telegram bot you run @@ -43,11 +68,24 @@ const INTEGRATIONS: IntegrationDef[] = [ // Dev Tools { id: 'github', name: 'GitHub', category: 'devtools', envVars: ['GITHUB_TOKEN'], vaultItem: 'openclaw-github-token', testable: true }, + // Productivity + { + id: 'google_workspace', + name: 'Google Workspace', + category: 'productivity', + envVars: ['GOOGLE_WORKSPACE_CLI_CREDENTIALS_FILE'], + testable: true, + recommendation: 'Install: npm i -g @googleworkspace/cli — then run `gws auth login` or set a service account credentials file.', + }, + // Security { id: 'onepassword', name: '1Password', category: 'security', envVars: ['OP_SERVICE_ACCOUNT_TOKEN'] }, // Infrastructure { id: 'gateway', name: 'Gateway Auth', category: 'infra', envVars: ['OPENCLAW_GATEWAY_TOKEN'], vaultItem: 'openclaw-openclaw-gateway-token' }, + + // Browser Automation + { id: 'hyperbrowser', name: 'Hyperbrowser', category: 'browser', envVars: ['HYPERBROWSER_API_KEY'], testable: true, recommendation: 'Cloud browser automation for AI agents. Get a key at hyperbrowser.ai' }, ] // Category metadata @@ -59,6 +97,8 @@ const CATEGORIES: Record = { devtools: { label: 'Dev Tools', order: 4 }, security: { label: 'Security', order: 5 }, infra: { label: 'Infrastructure', order: 6 }, + productivity: { label: 'Productivity', order: 7 }, + browser: { label: 'Browser Automation', order: 8 }, } // Vars that must never be written via this API @@ -142,6 +182,95 @@ function isVarBlocked(key: string): boolean { return BLOCKED_PREFIXES.some(p => key.startsWith(p)) } +function getEffectiveEnvValue(envMap: Map, key: string): string { + const fromFile = envMap.get(key) + if (typeof fromFile === 'string' && fromFile.length > 0) return fromFile + const fromProcess = process.env[key] + if (typeof fromProcess === 'string' && fromProcess.length > 0) return fromProcess + return '' +} + +function isPathLikeEnvVar(key: string): boolean { + return key.endsWith('_PATH') || key.endsWith('_FILE') +} + +function isConfiguredValue(key: string, value: string): boolean { + if (!value || value.length === 0) return false + if (isPathLikeEnvVar(key)) { + try { + return existsSync(value) + } catch { + return false + } + } + return true +} + +function checkOpAuthenticated(opEnv?: NodeJS.ProcessEnv): boolean { + try { + execFileSync('op', ['whoami', '--format', 'json'], { + stdio: 'pipe', + timeout: 3000, + env: opEnv || process.env, + }) + return true + } catch { + return false + } +} + +function checkCommandAvailable(command: string): boolean { + try { + execFileSync('which', [command], { stdio: 'pipe', timeout: 3000 }) + return true + } catch { + return false + } +} + +function checkXintState(): { installed: boolean; oauthConfigured: boolean; envConfigured: boolean } { + const installed = checkCommandAvailable('xint') + const oauthPath = join(os.homedir(), '.xint', 'data', 'oauth-tokens.json') + const envPath = join(os.homedir(), '.xint', '.env') + const oauthConfigured = existsSync(oauthPath) + const envConfigured = existsSync(envPath) + return { installed, oauthConfigured, envConfigured } +} + +function resolveOllamaBaseUrl(): string { + const raw = String(process.env.OLLAMA_HOST || '').trim() + if (!raw) return 'http://127.0.0.1:11434' + if (raw.startsWith('http://') || raw.startsWith('https://')) return raw + return `http://${raw}` +} + +async function checkOllamaReachable(): Promise { + try { + const base = resolveOllamaBaseUrl().replace(/\/+$/, '') + const res = await fetch(`${base}/api/tags`, { signal: AbortSignal.timeout(1200) }) + return res.ok + } catch { + return false + } +} + +async function getIntegrationProbeSnapshot(): Promise { + const now = Date.now() + if (integrationProbeCache && (now - integrationProbeCache.ts) < INTEGRATION_PROBE_TTL_MS) { + return integrationProbeCache.value + } + + const value: IntegrationProbeSnapshot = { + opAvailable: checkOpAvailable(), + xint: checkXintState(), + ollamaInstalled: checkCommandAvailable('ollama'), + ollamaReachable: await checkOllamaReachable(), + gwsInstalled: checkCommandAvailable('gws'), + } + integrationProbeCache = { ts: now, value } + return value +} + // Uses execFileSync (no shell) to avoid command injection function checkOpAvailable(): boolean { try { @@ -194,16 +323,44 @@ export async function GET(request: NextRequest) { } } - const opAvailable = checkOpAvailable() + const probe = await getIntegrationProbeSnapshot() + const { opAvailable, xint, ollamaInstalled, ollamaReachable, gwsInstalled } = probe + const providerSubscriptions = detectProviderSubscriptions() - const integrations = INTEGRATIONS.map(def => { + // Merge plugin integrations and categories + const pluginIntegrations = getPluginIntegrations() + const allIntegrations: IntegrationDef[] = [...INTEGRATIONS] + const pluginIntegrationMap = new Map() + for (const pi of pluginIntegrations) { + if (!allIntegrations.some(i => i.id === pi.id)) { + allIntegrations.push({ + id: pi.id, + name: pi.name, + category: pi.category, + envVars: pi.envVars, + vaultItem: pi.vaultItem, + testable: pi.testable, + recommendation: pi.recommendation, + }) + } + pluginIntegrationMap.set(pi.id, pi) + } + + const allCategories = { ...CATEGORIES } + for (const pc of getPluginCategories()) { + if (!(pc.id in allCategories)) { + allCategories[pc.id] = { label: pc.label, order: pc.order } + } + } + + const integrations = allIntegrations.map(def => { const vars: Record = {} let allSet = true let anySet = false for (const envVar of def.envVars) { - const val = envMap.get(envVar) - if (val && val.length > 0) { + const val = getEffectiveEnvValue(envMap, envVar) + if (isConfiguredValue(envVar, val)) { vars[envVar] = { redacted: redactValue(val), set: true } anySet = true } else { @@ -212,23 +369,90 @@ export async function GET(request: NextRequest) { } } + if (def.id === 'onepassword' && !anySet && opAvailable) { + const opEnv = { ...process.env } + const fileToken = envMap.get('OP_SERVICE_ACCOUNT_TOKEN') + if (fileToken) opEnv.OP_SERVICE_ACCOUNT_TOKEN = fileToken + if (checkOpAuthenticated(opEnv)) { + vars.OP_SERVICE_ACCOUNT_TOKEN = { + redacted: fileToken ? redactValue(fileToken) : 'op session', + set: true, + } + allSet = true + anySet = true + } + } + + // Support OAuth/subscription-based auth for providers that may not expose API keys. + if ((def.id === 'anthropic' || def.id === 'openai') && !anySet) { + const sub = providerSubscriptions.active[def.id] + if (sub) { + const primaryVar = def.envVars[0] + vars[primaryVar] = { + redacted: `${sub.type} (${sub.source})`, + set: true, + } + allSet = true + anySet = true + } + } + + // Local Ollama can be available without API key-based auth. + if (def.id === 'ollama' && !anySet) { + const primaryVar = def.envVars[0] + if (ollamaReachable) { + vars[primaryVar] = { redacted: 'local daemon', set: true } + allSet = true + anySet = true + } else if (ollamaInstalled) { + vars[primaryVar] = { redacted: 'installed (daemon not reachable)', set: true } + allSet = false + anySet = true + } + } + + // Google Workspace CLI detection + if (def.id === 'google_workspace' && !anySet) { + const primaryVar = def.envVars[0] + if (gwsInstalled) { + vars[primaryVar] = { redacted: 'gws CLI installed (run `gws auth login`)', set: true } + allSet = false + anySet = true + } + } + + // X integration should default to xint auth when present. + if (def.id === 'x_twitter' && !anySet) { + const primaryVar = def.envVars[0] + if (xint.oauthConfigured) { + vars[primaryVar] = { redacted: 'xint oauth', set: true } + allSet = true + anySet = true + } else if (xint.installed || xint.envConfigured) { + vars[primaryVar] = { redacted: 'xint installed (run `xint auth`)', set: true } + allSet = false + anySet = true + } + } + const status = allSet && anySet ? 'connected' : anySet ? 'partial' : 'not_configured' return { id: def.id, name: def.name, category: def.category, - categoryLabel: CATEGORIES[def.category]?.label ?? def.category, + categoryLabel: allCategories[def.category]?.label ?? def.category, envVars: vars, status, vaultItem: def.vaultItem ?? null, testable: def.testable ?? false, + recommendation: def.recommendation ?? null, } }) return NextResponse.json({ integrations, - categories: Object.entries(CATEGORIES) + categories: Object.entries(allCategories) .sort(([, a], [, b]) => a.order - b.order) .map(([id, meta]) => ({ id, label: meta.label })), opAvailable, @@ -377,7 +601,22 @@ export async function POST(request: NextRequest) { return NextResponse.json({ error: 'integrationId required' }, { status: 400 }) } - const integration = INTEGRATIONS.find(i => i.id === body.integrationId) + let integration: IntegrationDef | undefined = INTEGRATIONS.find(i => i.id === body.integrationId) + if (!integration) { + // Check plugin integrations + const pi = getPluginIntegrations().find(i => i.id === body.integrationId) + if (pi) { + integration = { + id: pi.id, + name: pi.name, + category: pi.category, + envVars: pi.envVars, + vaultItem: pi.vaultItem, + testable: pi.testable, + recommendation: pi.recommendation, + } + } + } if (!integration) { return NextResponse.json({ error: `Unknown integration: ${body.integrationId}` }, { status: 404 }) } @@ -418,10 +657,11 @@ async function handleTest( try { let result: { ok: boolean; detail: string } + const providerSubscriptions = detectProviderSubscriptions() switch (integration.id) { case 'telegram': { - const token = envMap.get(integration.envVars[0]) + const token = getEffectiveEnvValue(envMap, integration.envVars[0]) if (!token) return NextResponse.json({ ok: false, detail: 'Token not set' }) const res = await fetch(`https://api.telegram.org/bot${token}/getMe`, { signal: AbortSignal.timeout(5000) }) const data = await res.json() @@ -432,7 +672,7 @@ async function handleTest( } case 'github': { - const token = envMap.get('GITHUB_TOKEN') + const token = getEffectiveEnvValue(envMap, 'GITHUB_TOKEN') if (!token) return NextResponse.json({ ok: false, detail: 'Token not set' }) const res = await fetch('https://api.github.com/user', { headers: { Authorization: `Bearer ${token}`, 'User-Agent': 'MissionControl/1.0' }, @@ -448,8 +688,12 @@ async function handleTest( } case 'anthropic': { - const key = envMap.get('ANTHROPIC_API_KEY') - if (!key) return NextResponse.json({ ok: false, detail: 'API key not set' }) + const key = getEffectiveEnvValue(envMap, 'ANTHROPIC_API_KEY') + if (!key) { + const sub = providerSubscriptions.active.anthropic + if (sub) return NextResponse.json({ ok: true, detail: `OAuth/subscription detected: ${sub.type}` }) + return NextResponse.json({ ok: false, detail: 'API key not set' }) + } const res = await fetch('https://api.anthropic.com/v1/models', { method: 'GET', headers: { 'x-api-key': key, 'anthropic-version': '2023-06-01' }, @@ -462,8 +706,12 @@ async function handleTest( } case 'openai': { - const key = envMap.get('OPENAI_API_KEY') - if (!key) return NextResponse.json({ ok: false, detail: 'API key not set' }) + const key = getEffectiveEnvValue(envMap, 'OPENAI_API_KEY') + if (!key) { + const sub = providerSubscriptions.active.openai + if (sub) return NextResponse.json({ ok: true, detail: `OAuth/subscription detected: ${sub.type}` }) + return NextResponse.json({ ok: false, detail: 'API key not set' }) + } const res = await fetch('https://api.openai.com/v1/models', { headers: { Authorization: `Bearer ${key}` }, signal: AbortSignal.timeout(5000), @@ -475,7 +723,7 @@ async function handleTest( } case 'openrouter': { - const key = envMap.get('OPENROUTER_API_KEY') + const key = getEffectiveEnvValue(envMap, 'OPENROUTER_API_KEY') if (!key) return NextResponse.json({ ok: false, detail: 'API key not set' }) const res = await fetch('https://openrouter.ai/api/v1/models', { headers: { Authorization: `Bearer ${key}` }, @@ -487,8 +735,70 @@ async function handleTest( break } - default: - return NextResponse.json({ error: 'Test not implemented for this integration' }, { status: 400 }) + case 'hyperbrowser': { + const key = getEffectiveEnvValue(envMap, 'HYPERBROWSER_API_KEY') + if (!key) return NextResponse.json({ ok: false, detail: 'API key not set' }) + const res = await fetch('https://app.hyperbrowser.ai/api/v2/sessions', { + headers: { 'x-api-key': key }, + signal: AbortSignal.timeout(5000), + }) + result = res.ok + ? { ok: true, detail: 'API key valid' } + : { ok: false, detail: `HTTP ${res.status}` } + break + } + + case 'google_workspace': { + const credsFile = getEffectiveEnvValue(envMap, 'GOOGLE_WORKSPACE_CLI_CREDENTIALS_FILE') + const gwsAvail = checkCommandAvailable('gws') + if (!gwsAvail) { + result = { ok: false, detail: 'gws CLI not installed — run: npm i -g @googleworkspace/cli' } + break + } + try { + const env: NodeJS.ProcessEnv = { ...process.env } + if (credsFile) env.GOOGLE_WORKSPACE_CLI_CREDENTIALS_FILE = credsFile + execFileSync('gws', ['auth', 'status'], { + timeout: 10000, + stdio: ['pipe', 'pipe', 'pipe'], + env, + }) + result = { ok: true, detail: 'Authenticated' } + } catch (err: any) { + const stderr = err.stderr?.toString() || '' + result = { ok: false, detail: stderr.slice(0, 120) || 'Not authenticated — run `gws auth login`' } + } + break + } + + default: { + // Check plugin testHandler first + const pluginDef = getPluginIntegrations().find(pi => pi.id === integration.id) + if (pluginDef?.testHandler) { + result = await pluginDef.testHandler(envMap) + break + } + + // Generic connectivity test: attempt a HEAD request to known base URLs + const baseUrls: Record = { + nvidia: 'https://api.nvidia.com', + moonshot: 'https://api.moonshot.cn', + brave: 'https://api.search.brave.com', + linkedin: 'https://api.linkedin.com', + ollama: resolveOllamaBaseUrl(), + gateway: String(process.env.OPENCLAW_GATEWAY_URL || '').trim() || '', + } + const url = baseUrls[integration.id] + if (url) { + const res = await fetch(url, { method: 'HEAD', signal: AbortSignal.timeout(5000) }) + result = res.ok || res.status < 500 + ? { ok: true, detail: `Reachable (HTTP ${res.status})` } + : { ok: false, detail: `Unreachable (HTTP ${res.status})` } + } else { + return NextResponse.json({ ok: false, detail: 'No test available — configure the integration URL to enable testing' }) + } + break + } } const ipAddress = request.headers.get('x-forwarded-for') || request.headers.get('x-real-ip') || 'unknown' diff --git a/src/app/api/local/agents-doc/route.ts b/src/app/api/local/agents-doc/route.ts new file mode 100644 index 0000000..f8e7ad6 --- /dev/null +++ b/src/app/api/local/agents-doc/route.ts @@ -0,0 +1,53 @@ +import { NextRequest, NextResponse } from 'next/server' +import { access, readFile } from 'node:fs/promises' +import { constants } from 'node:fs' +import { join } from 'node:path' +import { homedir } from 'node:os' +import { requireRole } from '@/lib/auth' + +async function findFirstReadable(paths: string[]): Promise { + for (const p of paths) { + try { + await access(p, constants.R_OK) + return p + } catch { + // Try next candidate + } + } + return null +} + +export async function GET(request: NextRequest) { + const auth = requireRole(request, 'viewer') + if ('error' in auth) return NextResponse.json({ error: auth.error }, { status: auth.status }) + + const cwd = process.cwd() + const home = homedir() + const candidates = [ + join(cwd, 'AGENTS.md'), + join(cwd, 'agents.md'), + join(home, '.codex', 'AGENTS.md'), + join(home, '.agents', 'AGENTS.md'), + join(home, '.config', 'codex', 'AGENTS.md'), + ] + + const found = await findFirstReadable(candidates) + if (!found) { + return NextResponse.json({ + found: false, + path: null, + content: null, + candidates, + }) + } + + const content = await readFile(found, 'utf8') + return NextResponse.json({ + found: true, + path: found, + content, + candidates, + }) +} + +export const dynamic = 'force-dynamic' diff --git a/src/app/api/memory/context/route.ts b/src/app/api/memory/context/route.ts new file mode 100644 index 0000000..bfc6da1 --- /dev/null +++ b/src/app/api/memory/context/route.ts @@ -0,0 +1,32 @@ +import { NextRequest, NextResponse } from 'next/server' +import { config } from '@/lib/config' +import { requireRole } from '@/lib/auth' +import { readLimiter } from '@/lib/rate-limit' +import { generateContextPayload } from '@/lib/memory-utils' +import { logger } from '@/lib/logger' + +const MEMORY_PATH = config.memoryDir + +/** + * Context injection endpoint — generates a payload for agent session start. + * Returns workspace tree, recent files, health summary, and maintenance signals. + */ +export async function GET(request: NextRequest) { + const auth = requireRole(request, 'viewer') + if ('error' in auth) return NextResponse.json({ error: auth.error }, { status: auth.status }) + + const limited = readLimiter(request) + if (limited) return limited + + if (!MEMORY_PATH) { + return NextResponse.json({ error: 'Memory directory not configured' }, { status: 500 }) + } + + try { + const payload = await generateContextPayload(MEMORY_PATH) + return NextResponse.json(payload) + } catch (err) { + logger.error({ err }, 'Memory context API error') + return NextResponse.json({ error: 'Internal server error' }, { status: 500 }) + } +} diff --git a/src/app/api/memory/graph/route.ts b/src/app/api/memory/graph/route.ts new file mode 100644 index 0000000..2e49c41 --- /dev/null +++ b/src/app/api/memory/graph/route.ts @@ -0,0 +1,115 @@ +import { NextRequest, NextResponse } from 'next/server' +import { existsSync, readdirSync, statSync } from 'fs' +import path from 'path' +import Database from 'better-sqlite3' +import { config } from '@/lib/config' +import { requireRole } from '@/lib/auth' +import { readLimiter } from '@/lib/rate-limit' +import { logger } from '@/lib/logger' + +interface AgentFileInfo { + path: string + chunks: number + textSize: number +} + +interface AgentGraphData { + name: string + dbSize: number + totalChunks: number + totalFiles: number + files: AgentFileInfo[] +} + +const memoryDbDir = config.openclawStateDir + ? path.join(config.openclawStateDir, 'memory') + : '' + +function getAgentData(dbPath: string, agentName: string): AgentGraphData | null { + try { + const dbStat = statSync(dbPath) + const db = new Database(dbPath, { readonly: true, fileMustExist: true }) + + let files: AgentFileInfo[] = [] + let totalChunks = 0 + let totalFiles = 0 + + try { + // Check if chunks table exists + const tableCheck = db + .prepare("SELECT name FROM sqlite_master WHERE type='table' AND name='chunks'") + .get() as { name: string } | undefined + + if (tableCheck) { + // Use COUNT only — skip SUM(LENGTH(text)) which forces a full data scan + const rows = db + .prepare( + 'SELECT path, COUNT(*) as chunks FROM chunks GROUP BY path ORDER BY chunks DESC' + ) + .all() as Array<{ path: string; chunks: number }> + + files = rows.map((r) => ({ + path: r.path || '(unknown)', + chunks: r.chunks, + textSize: 0, + })) + + totalChunks = files.reduce((sum, f) => sum + f.chunks, 0) + totalFiles = files.length + } + } finally { + db.close() + } + + return { + name: agentName, + dbSize: dbStat.size, + totalChunks, + totalFiles, + files, + } + } catch (err) { + logger.warn(`Failed to read memory DB for agent "${agentName}": ${err}`) + return null + } +} + +export async function GET(request: NextRequest) { + const auth = requireRole(request, 'viewer') + if ('error' in auth) return NextResponse.json({ error: auth.error }, { status: auth.status }) + + const limited = readLimiter(request) + if (limited) return limited + + if (!memoryDbDir || !existsSync(memoryDbDir)) { + return NextResponse.json( + { error: 'Memory directory not available', agents: [] }, + { status: 404 } + ) + } + + const agentFilter = request.nextUrl.searchParams.get('agent') || 'all' + + try { + const entries = readdirSync(memoryDbDir).filter((f) => f.endsWith('.sqlite')) + const agents: AgentGraphData[] = [] + + for (const entry of entries) { + const agentName = entry.replace('.sqlite', '') + + if (agentFilter !== 'all' && agentName !== agentFilter) continue + + const dbPath = path.join(memoryDbDir, entry) + const data = getAgentData(dbPath, agentName) + if (data) agents.push(data) + } + + // Sort by total chunks descending + agents.sort((a, b) => b.totalChunks - a.totalChunks) + + return NextResponse.json({ agents }) + } catch (err) { + logger.error(`Failed to build memory graph data: ${err}`) + return NextResponse.json({ error: 'Internal server error' }, { status: 500 }) + } +} diff --git a/src/app/api/memory/health/route.ts b/src/app/api/memory/health/route.ts new file mode 100644 index 0000000..e9937f3 --- /dev/null +++ b/src/app/api/memory/health/route.ts @@ -0,0 +1,28 @@ +import { NextRequest, NextResponse } from 'next/server' +import { config } from '@/lib/config' +import { requireRole } from '@/lib/auth' +import { readLimiter } from '@/lib/rate-limit' +import { runHealthDiagnostics } from '@/lib/memory-utils' +import { logger } from '@/lib/logger' + +const MEMORY_PATH = config.memoryDir + +export async function GET(request: NextRequest) { + const auth = requireRole(request, 'viewer') + if ('error' in auth) return NextResponse.json({ error: auth.error }, { status: auth.status }) + + const limited = readLimiter(request) + if (limited) return limited + + if (!MEMORY_PATH) { + return NextResponse.json({ error: 'Memory directory not configured' }, { status: 500 }) + } + + try { + const report = await runHealthDiagnostics(MEMORY_PATH) + return NextResponse.json(report) + } catch (err) { + logger.error({ err }, 'Memory health API error') + return NextResponse.json({ error: 'Internal server error' }, { status: 500 }) + } +} diff --git a/src/app/api/memory/links/route.ts b/src/app/api/memory/links/route.ts new file mode 100644 index 0000000..7197421 --- /dev/null +++ b/src/app/api/memory/links/route.ts @@ -0,0 +1,74 @@ +import { NextRequest, NextResponse } from 'next/server' +import { config } from '@/lib/config' +import { requireRole } from '@/lib/auth' +import { readLimiter } from '@/lib/rate-limit' +import { buildLinkGraph, extractWikiLinks } from '@/lib/memory-utils' +import { readFile } from 'fs/promises' +import { join, basename, extname } from 'path' +import { logger } from '@/lib/logger' + +const MEMORY_PATH = config.memoryDir + +export async function GET(request: NextRequest) { + const auth = requireRole(request, 'viewer') + if ('error' in auth) return NextResponse.json({ error: auth.error }, { status: auth.status }) + + const limited = readLimiter(request) + if (limited) return limited + + if (!MEMORY_PATH) { + return NextResponse.json({ error: 'Memory directory not configured' }, { status: 500 }) + } + + const { searchParams } = new URL(request.url) + const filePath = searchParams.get('file') + + try { + if (filePath) { + // Return links for a specific file + const fullPath = join(MEMORY_PATH, filePath) + // Basic path traversal check + if (!fullPath.startsWith(MEMORY_PATH)) { + return NextResponse.json({ error: 'Invalid path' }, { status: 400 }) + } + const content = await readFile(fullPath, 'utf-8') + const links = extractWikiLinks(content) + + // Also find backlinks from the full graph + const graph = await buildLinkGraph(MEMORY_PATH) + const node = graph.nodes[filePath] + const incoming = node?.incoming ?? [] + const outgoing = node?.outgoing ?? [] + + return NextResponse.json({ + file: filePath, + wikiLinks: links, + outgoing, + incoming, + }) + } + + // Return full link graph + const graph = await buildLinkGraph(MEMORY_PATH) + + // Serialize for the frontend (strip wikiLinks detail for the full graph) + const nodes = Object.values(graph.nodes).map((n) => ({ + path: n.path, + name: n.name, + outgoing: n.outgoing, + incoming: n.incoming, + linkCount: n.outgoing.length + n.incoming.length, + hasSchema: n.schema !== null, + })) + + return NextResponse.json({ + nodes, + totalFiles: graph.totalFiles, + totalLinks: graph.totalLinks, + orphans: graph.orphans, + }) + } catch (err) { + logger.error({ err }, 'Memory links API error') + return NextResponse.json({ error: 'Internal server error' }, { status: 500 }) + } +} diff --git a/src/app/api/memory/process/route.ts b/src/app/api/memory/process/route.ts new file mode 100644 index 0000000..6802a70 --- /dev/null +++ b/src/app/api/memory/process/route.ts @@ -0,0 +1,59 @@ +import { NextRequest, NextResponse } from 'next/server' +import { config } from '@/lib/config' +import { requireRole } from '@/lib/auth' +import { mutationLimiter } from '@/lib/rate-limit' +import { reflectPass, reweavePass, generateMOCs } from '@/lib/memory-utils' +import { logger } from '@/lib/logger' + +const MEMORY_PATH = config.memoryDir + +/** + * Processing pipeline endpoint — runs knowledge maintenance operations. + * Actions: reflect, reweave, generate-moc + * + * These mirror Ars Contexta's 6 Rs processing pipeline, adapted for MC: + * - reflect: Find connection opportunities between files + * - reweave: Identify stale files needing updates from newer linked files + * - generate-moc: Auto-generate Maps of Content from file clusters + */ +export async function POST(request: NextRequest) { + const auth = requireRole(request, 'operator') + if ('error' in auth) return NextResponse.json({ error: auth.error }, { status: auth.status }) + + const rateCheck = mutationLimiter(request) + if (rateCheck) return rateCheck + + if (!MEMORY_PATH) { + return NextResponse.json({ error: 'Memory directory not configured' }, { status: 500 }) + } + + try { + const body = await request.json() + const { action } = body + + if (action === 'reflect') { + const result = await reflectPass(MEMORY_PATH) + return NextResponse.json(result) + } + + if (action === 'reweave') { + const result = await reweavePass(MEMORY_PATH) + return NextResponse.json(result) + } + + if (action === 'generate-moc') { + const mocs = await generateMOCs(MEMORY_PATH) + return NextResponse.json({ + action: 'generate-moc', + groups: mocs, + totalGroups: mocs.length, + totalEntries: mocs.reduce((s, g) => s + g.entries.length, 0), + }) + } + + return NextResponse.json({ error: 'Invalid action. Use: reflect, reweave, generate-moc' }, { status: 400 }) + } catch (err) { + logger.error({ err }, 'Memory process API error') + return NextResponse.json({ error: 'Internal server error' }, { status: 500 }) + } +} diff --git a/src/app/api/memory/route.ts b/src/app/api/memory/route.ts index a7fddce..2e246a2 100644 --- a/src/app/api/memory/route.ts +++ b/src/app/api/memory/route.ts @@ -3,10 +3,12 @@ import { readdir, readFile, stat, lstat, realpath, writeFile, mkdir, unlink } fr import { existsSync, mkdirSync } from 'fs' import { join, dirname, sep } from 'path' import { config } from '@/lib/config' +import { db_helpers } from '@/lib/db' import { resolveWithin } from '@/lib/paths' import { requireRole } from '@/lib/auth' import { readLimiter, mutationLimiter } from '@/lib/rate-limit' import { logger } from '@/lib/logger' +import { validateSchema, extractWikiLinks } from '@/lib/memory-utils' const MEMORY_PATH = config.memoryDir const MEMORY_ALLOWED_PREFIXES = (config.memoryAllowedPrefixes || []).map((p) => p.replace(/\\/g, '/')) @@ -85,7 +87,11 @@ async function resolveSafeMemoryPath(baseDir: string, relativePath: string): Pro return fullPath } -async function buildFileTree(dirPath: string, relativePath: string = ''): Promise { +async function buildFileTree( + dirPath: string, + relativePath: string = '', + maxDepth: number = Number.POSITIVE_INFINITY, +): Promise { try { const items = await readdir(dirPath, { withFileTypes: true }) const files: MemoryFile[] = [] @@ -101,7 +107,10 @@ async function buildFileTree(dirPath: string, relativePath: string = ''): Promis const stats = await stat(itemPath) if (item.isDirectory()) { - const children = await buildFileTree(itemPath, itemRelativePath) + const children = + maxDepth > 0 + ? await buildFileTree(itemPath, itemRelativePath, maxDepth - 1) + : undefined files.push({ path: itemRelativePath, name: item.name, @@ -147,12 +156,26 @@ export async function GET(request: NextRequest) { const { searchParams } = new URL(request.url) const path = searchParams.get('path') const action = searchParams.get('action') + const depthParam = Number.parseInt(searchParams.get('depth') || '', 10) + const maxDepth = Number.isFinite(depthParam) ? Math.max(0, Math.min(depthParam, 8)) : Number.POSITIVE_INFINITY if (action === 'tree') { // Return the file tree if (!MEMORY_PATH) { return NextResponse.json({ tree: [] }) } + if (path) { + if (!isPathAllowed(path)) { + return NextResponse.json({ error: 'Path not allowed' }, { status: 403 }) + } + const fullPath = await resolveSafeMemoryPath(MEMORY_PATH, path) + const stats = await stat(fullPath).catch(() => null) + if (!stats?.isDirectory()) { + return NextResponse.json({ error: 'Directory not found' }, { status: 404 }) + } + const tree = await buildFileTree(fullPath, path, maxDepth) + return NextResponse.json({ tree }) + } if (MEMORY_ALLOWED_PREFIXES.length) { const tree: MemoryFile[] = [] for (const prefix of MEMORY_ALLOWED_PREFIXES) { @@ -167,7 +190,7 @@ export async function GET(request: NextRequest) { name: folder, type: 'directory', modified: stats.mtime.getTime(), - children: await buildFileTree(fullPath, folder), + children: await buildFileTree(fullPath, folder, maxDepth), }) } catch { // Skip unreadable roots @@ -175,7 +198,7 @@ export async function GET(request: NextRequest) { } return NextResponse.json({ tree }) } - const tree = await buildFileTree(MEMORY_PATH) + const tree = await buildFileTree(MEMORY_PATH, '', maxDepth) return NextResponse.json({ tree }) } @@ -192,12 +215,19 @@ export async function GET(request: NextRequest) { try { const content = await readFile(fullPath, 'utf-8') const stats = await stat(fullPath) - + + // Extract wiki-links and schema validation for .md files + const isMarkdown = path.endsWith('.md') + const wikiLinks = isMarkdown ? extractWikiLinks(content) : [] + const schemaResult = isMarkdown ? validateSchema(content) : null + return NextResponse.json({ content, size: stats.size, modified: stats.mtime.getTime(), - path + path, + wikiLinks, + schema: schemaResult, }) } catch (error) { return NextResponse.json({ error: 'File not found' }, { status: 404 }) @@ -321,8 +351,19 @@ export async function POST(request: NextRequest) { return NextResponse.json({ error: 'Content is required for save action' }, { status: 400 }) } + // Validate schema if present (warn but don't block save) + const schemaResult = path.endsWith('.md') ? validateSchema(content) : null + const schemaWarnings = schemaResult?.errors ?? [] + await writeFile(fullPath, content, 'utf-8') - return NextResponse.json({ success: true, message: 'File saved successfully' }) + try { + db_helpers.logActivity('memory_file_saved', 'memory', 0, auth.user.username || 'unknown', `Updated ${path}`, { path, size: content.length }) + } catch { /* best-effort */ } + return NextResponse.json({ + success: true, + message: 'File saved successfully', + schemaWarnings, + }) } if (action === 'create') { @@ -345,6 +386,9 @@ export async function POST(request: NextRequest) { } await writeFile(fullPath, content || '', 'utf-8') + try { + db_helpers.logActivity('memory_file_created', 'memory', 0, auth.user.username || 'unknown', `Created ${path}`, { path }) + } catch { /* best-effort */ } return NextResponse.json({ success: true, message: 'File created successfully' }) } @@ -387,6 +431,9 @@ export async function DELETE(request: NextRequest) { } await unlink(fullPath) + try { + db_helpers.logActivity('memory_file_deleted', 'memory', 0, auth.user.username || 'unknown', `Deleted ${path}`, { path }) + } catch { /* best-effort */ } return NextResponse.json({ success: true, message: 'File deleted successfully' }) } diff --git a/src/app/api/nodes/route.ts b/src/app/api/nodes/route.ts new file mode 100644 index 0000000..c01beb5 --- /dev/null +++ b/src/app/api/nodes/route.ts @@ -0,0 +1,132 @@ +import { NextRequest, NextResponse } from 'next/server' +import { requireRole } from '@/lib/auth' +import { config } from '@/lib/config' +import { logger } from '@/lib/logger' + +const GATEWAY_TIMEOUT = 5000 + +function gatewayUrl(path: string): string { + return `http://${config.gatewayHost}:${config.gatewayPort}${path}` +} + +async function fetchGateway(path: string, init?: RequestInit): Promise { + const controller = new AbortController() + const timeout = setTimeout(() => controller.abort(), GATEWAY_TIMEOUT) + try { + return await fetch(gatewayUrl(path), { + ...init, + signal: controller.signal, + }) + } finally { + clearTimeout(timeout) + } +} + +export async function GET(request: NextRequest) { + const auth = requireRole(request, 'viewer') + if ('error' in auth) return NextResponse.json({ error: auth.error }, { status: auth.status }) + + const action = request.nextUrl.searchParams.get('action') || 'list' + + if (action === 'list') { + try { + const res = await fetchGateway('/api/presence') + if (!res.ok) { + logger.warn({ status: res.status }, 'Gateway presence endpoint returned non-OK') + return NextResponse.json({ nodes: [], connected: false }) + } + const data = await res.json() + return NextResponse.json(data) + } catch (err) { + logger.warn({ err }, 'Gateway unreachable for presence listing') + return NextResponse.json({ nodes: [], connected: false }) + } + } + + if (action === 'devices') { + try { + const res = await fetchGateway('/api/devices') + if (!res.ok) { + logger.warn({ status: res.status }, 'Gateway devices endpoint returned non-OK') + return NextResponse.json({ devices: [] }) + } + const data = await res.json() + return NextResponse.json(data) + } catch (err) { + logger.warn({ err }, 'Gateway unreachable for device listing') + return NextResponse.json({ devices: [] }) + } + } + + return NextResponse.json({ error: `Unknown action: ${action}` }, { status: 400 }) +} + +const VALID_DEVICE_ACTIONS = ['approve', 'reject', 'rotate-token', 'revoke-token'] as const +type DeviceAction = (typeof VALID_DEVICE_ACTIONS)[number] + +const ACTION_RPC_MAP: Record = { + 'approve': { method: 'device.pair.approve', paramKey: 'requestId' }, + 'reject': { method: 'device.pair.reject', paramKey: 'requestId' }, + 'rotate-token': { method: 'device.token.rotate', paramKey: 'deviceId' }, + 'revoke-token': { method: 'device.token.revoke', paramKey: 'deviceId' }, +} + +/** + * POST /api/nodes - Device management actions + * Body: { action: DeviceAction, requestId?: string, deviceId?: string, role?: string, scopes?: string[] } + */ +export async function POST(request: NextRequest) { + const auth = requireRole(request, 'operator') + if ('error' in auth) return NextResponse.json({ error: auth.error }, { status: auth.status }) + + let body: Record + try { + body = await request.json() + } catch { + return NextResponse.json({ error: 'Invalid JSON body' }, { status: 400 }) + } + + const action = body.action as string + if (!action || !VALID_DEVICE_ACTIONS.includes(action as DeviceAction)) { + return NextResponse.json( + { error: `Invalid action. Must be one of: ${VALID_DEVICE_ACTIONS.join(', ')}` }, + { status: 400 }, + ) + } + + const spec = ACTION_RPC_MAP[action as DeviceAction] + + // Validate required param + const id = body[spec.paramKey] as string | undefined + if (!id || typeof id !== 'string') { + return NextResponse.json({ error: `Missing required field: ${spec.paramKey}` }, { status: 400 }) + } + + // Build RPC params + const params: Record = { [spec.paramKey]: id } + if ((action === 'rotate-token' || action === 'revoke-token') && body.role) { + params.role = body.role + } + if (action === 'rotate-token' && Array.isArray(body.scopes)) { + params.scopes = body.scopes + } + + try { + const res = await fetchGateway('/api/rpc', { + method: 'POST', + headers: { 'Content-Type': 'application/json' }, + body: JSON.stringify({ method: spec.method, params }), + }) + + const data = await res.json() + return NextResponse.json(data, { status: res.status }) + } catch (err: unknown) { + const name = err instanceof Error ? err.name : '' + if (name === 'AbortError') { + logger.error('Gateway device action request timed out') + return NextResponse.json({ error: 'Gateway request timed out' }, { status: 504 }) + } + logger.error({ err }, 'Gateway device action failed') + return NextResponse.json({ error: 'Gateway unreachable' }, { status: 502 }) + } +} diff --git a/src/app/api/onboarding/route.ts b/src/app/api/onboarding/route.ts new file mode 100644 index 0000000..5af5b13 --- /dev/null +++ b/src/app/api/onboarding/route.ts @@ -0,0 +1,150 @@ +import { NextRequest, NextResponse } from 'next/server' +import { requireRole } from '@/lib/auth' +import { getDatabase } from '@/lib/db' +import { logger } from '@/lib/logger' +import { nextIncompleteStepIndex, parseCompletedSteps, shouldShowOnboarding, markStepCompleted } from '@/lib/onboarding-state' + +const ONBOARDING_STEPS = [ + { id: 'welcome', title: 'Welcome' }, + { id: 'interface-mode', title: 'Interface' }, + { id: 'gateway-link', title: 'Gateway' }, + { id: 'credentials', title: 'Credentials' }, +] as const + +const ONBOARDING_SETTING_KEYS = { + completed: 'onboarding.completed', + completedAt: 'onboarding.completed_at', + skipped: 'onboarding.skipped', + completedSteps: 'onboarding.completed_steps', + checklistDismissed: 'onboarding.checklist_dismissed', +} as const + +type OnboardingSettingKey = typeof ONBOARDING_SETTING_KEYS[keyof typeof ONBOARDING_SETTING_KEYS] + +function scopedOnboardingKey(key: OnboardingSettingKey, username: string): string { + return `user.${username}.${key}` +} + +function getOnboardingSetting(key: string): string { + try { + const db = getDatabase() + const row = db.prepare('SELECT value FROM settings WHERE key = ?').get(key) as { value: string } | undefined + return row?.value ?? '' + } catch { + return '' + } +} + +function setOnboardingSetting(key: string, value: string, actor: string) { + const db = getDatabase() + db.prepare(` + INSERT INTO settings (key, value, description, category, updated_by, updated_at) + VALUES (?, ?, ?, 'onboarding', ?, unixepoch()) + ON CONFLICT(key) DO UPDATE SET + value = excluded.value, + updated_by = excluded.updated_by, + updated_at = unixepoch() + `).run(key, value, `Onboarding: ${key}`, actor) +} + +function readUserOnboardingSetting(key: OnboardingSettingKey, username: string): string { + const scopedValue = getOnboardingSetting(scopedOnboardingKey(key, username)) + if (scopedValue !== '') return scopedValue + return getOnboardingSetting(key) +} + +function writeUserOnboardingSetting(key: OnboardingSettingKey, value: string, actor: string) { + setOnboardingSetting(scopedOnboardingKey(key, actor), value, actor) +} + +export async function GET(request: NextRequest) { + const auth = requireRole(request, 'viewer') + if ('error' in auth) return NextResponse.json({ error: auth.error }, { status: auth.status }) + + try { + const completed = readUserOnboardingSetting(ONBOARDING_SETTING_KEYS.completed, auth.user.username) === 'true' + const skipped = readUserOnboardingSetting(ONBOARDING_SETTING_KEYS.skipped, auth.user.username) === 'true' + const checklistDismissed = readUserOnboardingSetting(ONBOARDING_SETTING_KEYS.checklistDismissed, auth.user.username) === 'true' + const completedStepsRaw = readUserOnboardingSetting(ONBOARDING_SETTING_KEYS.completedSteps, auth.user.username) + const completedSteps = parseCompletedSteps(completedStepsRaw, ONBOARDING_STEPS) + + const isAdmin = auth.user.role === 'admin' + const showOnboarding = shouldShowOnboarding({ completed, skipped, isAdmin }) + + const steps = ONBOARDING_STEPS.map((s) => ({ + ...s, + completed: completedSteps.includes(s.id), + })) + + const currentStep = nextIncompleteStepIndex(ONBOARDING_STEPS, completedSteps) + + return NextResponse.json({ + showOnboarding, + completed, + skipped, + checklistDismissed, + isAdmin, + currentStep: currentStep === -1 ? steps.length - 1 : currentStep, + steps, + }) + } catch (error) { + logger.error({ err: error }, 'Onboarding GET error') + return NextResponse.json({ error: 'Internal server error' }, { status: 500 }) + } +} + +export async function POST(request: NextRequest) { + const auth = requireRole(request, 'admin') + if ('error' in auth) return NextResponse.json({ error: auth.error }, { status: auth.status }) + + try { + const body = await request.json() + const { action, step } = body as { action: string; step?: string } + + switch (action) { + case 'complete_step': { + if (!step) return NextResponse.json({ error: 'step is required' }, { status: 400 }) + const valid = ONBOARDING_STEPS.some(s => s.id === step) + if (!valid) return NextResponse.json({ error: 'Invalid step' }, { status: 400 }) + + const raw = readUserOnboardingSetting(ONBOARDING_SETTING_KEYS.completedSteps, auth.user.username) + const parsed = parseCompletedSteps(raw, ONBOARDING_STEPS) + const steps = markStepCompleted(parsed, step, ONBOARDING_STEPS) + writeUserOnboardingSetting(ONBOARDING_SETTING_KEYS.completedSteps, JSON.stringify(steps), auth.user.username) + return NextResponse.json({ ok: true, completedSteps: steps }) + } + + case 'complete': { + writeUserOnboardingSetting(ONBOARDING_SETTING_KEYS.completed, 'true', auth.user.username) + writeUserOnboardingSetting(ONBOARDING_SETTING_KEYS.completedAt, String(Date.now()), auth.user.username) + return NextResponse.json({ ok: true }) + } + + case 'skip': { + writeUserOnboardingSetting(ONBOARDING_SETTING_KEYS.skipped, 'true', auth.user.username) + writeUserOnboardingSetting(ONBOARDING_SETTING_KEYS.completedAt, String(Date.now()), auth.user.username) + return NextResponse.json({ ok: true }) + } + + case 'dismiss_checklist': { + writeUserOnboardingSetting(ONBOARDING_SETTING_KEYS.checklistDismissed, 'true', auth.user.username) + return NextResponse.json({ ok: true }) + } + + case 'reset': { + writeUserOnboardingSetting(ONBOARDING_SETTING_KEYS.completed, 'false', auth.user.username) + writeUserOnboardingSetting(ONBOARDING_SETTING_KEYS.completedAt, '', auth.user.username) + writeUserOnboardingSetting(ONBOARDING_SETTING_KEYS.skipped, 'false', auth.user.username) + writeUserOnboardingSetting(ONBOARDING_SETTING_KEYS.completedSteps, '[]', auth.user.username) + writeUserOnboardingSetting(ONBOARDING_SETTING_KEYS.checklistDismissed, 'false', auth.user.username) + return NextResponse.json({ ok: true }) + } + + default: + return NextResponse.json({ error: 'Invalid action' }, { status: 400 }) + } + } catch (error) { + logger.error({ err: error }, 'Onboarding POST error') + return NextResponse.json({ error: 'Internal server error' }, { status: 500 }) + } +} diff --git a/src/app/api/openclaw/doctor/route.ts b/src/app/api/openclaw/doctor/route.ts new file mode 100644 index 0000000..67f8e10 --- /dev/null +++ b/src/app/api/openclaw/doctor/route.ts @@ -0,0 +1,127 @@ +import { NextResponse } from 'next/server' +import { requireRole } from '@/lib/auth' +import { runOpenClaw } from '@/lib/command' +import { config } from '@/lib/config' +import { getDatabase } from '@/lib/db' +import { logger } from '@/lib/logger' +import { archiveOrphanTranscriptsForStateDir } from '@/lib/openclaw-doctor-fix' +import { parseOpenClawDoctorOutput } from '@/lib/openclaw-doctor' + +function getCommandDetail(error: unknown): { detail: string; code: number | null } { + const err = error as { + stdout?: string + stderr?: string + message?: string + code?: number | null + } + + return { + detail: [err?.stdout, err?.stderr, err?.message].filter(Boolean).join('\n').trim(), + code: typeof err?.code === 'number' ? err.code : null, + } +} + +function isMissingOpenClaw(detail: string): boolean { + return /enoent|not installed|not reachable|command not found/i.test(detail) +} + +export async function GET(request: Request) { + const auth = requireRole(request, 'admin') + if ('error' in auth) { + return NextResponse.json({ error: auth.error }, { status: auth.status }) + } + + try { + const result = await runOpenClaw(['doctor'], { timeoutMs: 15000 }) + return NextResponse.json(parseOpenClawDoctorOutput(`${result.stdout}\n${result.stderr}`, result.code ?? 0, { + stateDir: config.openclawStateDir, + }), { + headers: { 'Cache-Control': 'no-store' }, + }) + } catch (error) { + const { detail, code } = getCommandDetail(error) + if (isMissingOpenClaw(detail)) { + return NextResponse.json({ error: 'OpenClaw is not installed or not reachable' }, { status: 400 }) + } + + return NextResponse.json(parseOpenClawDoctorOutput(detail, code ?? 1, { + stateDir: config.openclawStateDir, + }), { + headers: { 'Cache-Control': 'no-store' }, + }) + } +} + +export async function POST(request: Request) { + const auth = requireRole(request, 'admin') + if ('error' in auth) { + return NextResponse.json({ error: auth.error }, { status: auth.status }) + } + + try { + const progress: Array<{ step: string; detail: string }> = [] + + const fixResult = await runOpenClaw(['doctor', '--fix'], { timeoutMs: 120000 }) + progress.push({ step: 'doctor', detail: 'Applied OpenClaw doctor config fixes.' }) + + try { + await runOpenClaw(['sessions', 'cleanup', '--all-agents', '--enforce', '--fix-missing'], { timeoutMs: 120000 }) + progress.push({ step: 'sessions', detail: 'Pruned missing transcript entries from session stores.' }) + } catch (error) { + const { detail } = getCommandDetail(error) + progress.push({ step: 'sessions', detail: detail || 'Session cleanup skipped.' }) + } + + const orphanFix = archiveOrphanTranscriptsForStateDir(config.openclawStateDir) + progress.push({ + step: 'orphans', + detail: + orphanFix.archivedOrphans > 0 + ? `Archived ${orphanFix.archivedOrphans} orphan transcript file(s) across ${orphanFix.storesScanned} session store(s).` + : `No orphan transcript files found across ${orphanFix.storesScanned} session store(s).`, + }) + + const postFix = await runOpenClaw(['doctor'], { timeoutMs: 15000 }) + const status = parseOpenClawDoctorOutput(`${postFix.stdout}\n${postFix.stderr}`, postFix.code ?? 0, { + stateDir: config.openclawStateDir, + }) + + try { + const db = getDatabase() + db.prepare( + 'INSERT INTO audit_log (action, actor, detail) VALUES (?, ?, ?)' + ).run( + 'openclaw.doctor.fix', + auth.user.username, + JSON.stringify({ level: status.level, healthy: status.healthy, issues: status.issues }) + ) + } catch { + // Non-critical. + } + + return NextResponse.json({ + success: true, + output: `${fixResult.stdout}\n${fixResult.stderr}`.trim(), + progress, + status, + }) + } catch (error) { + const { detail, code } = getCommandDetail(error) + if (isMissingOpenClaw(detail)) { + return NextResponse.json({ error: 'OpenClaw is not installed or not reachable' }, { status: 400 }) + } + + logger.error({ err: error }, 'OpenClaw doctor fix failed') + + return NextResponse.json( + { + error: 'OpenClaw doctor fix failed', + detail, + status: parseOpenClawDoctorOutput(detail, code ?? 1, { + stateDir: config.openclawStateDir, + }), + }, + { status: 500 } + ) + } +} diff --git a/src/app/api/openclaw/update/route.ts b/src/app/api/openclaw/update/route.ts new file mode 100644 index 0000000..1fb321e --- /dev/null +++ b/src/app/api/openclaw/update/route.ts @@ -0,0 +1,71 @@ +import { NextResponse } from 'next/server' +import { requireRole } from '@/lib/auth' +import { runOpenClaw } from '@/lib/command' +import { getDatabase } from '@/lib/db' +import { logger } from '@/lib/logger' + +export async function POST(request: Request) { + const auth = requireRole(request, 'admin') + if ('error' in auth) { + return NextResponse.json({ error: auth.error }, { status: auth.status }) + } + + let installedBefore: string | null = null + + try { + const vResult = await runOpenClaw(['--version'], { timeoutMs: 3000 }) + const match = vResult.stdout.match(/(\d+\.\d+\.\d+)/) + if (match) installedBefore = match[1] + } catch { + return NextResponse.json( + { error: 'OpenClaw is not installed or not reachable' }, + { status: 400 } + ) + } + + try { + const result = await runOpenClaw(['update', '--channel', 'stable'], { + timeoutMs: 5 * 60 * 1000, + }) + + // Read new version after update + let installedAfter: string | null = null + try { + const vResult = await runOpenClaw(['--version'], { timeoutMs: 3000 }) + const match = vResult.stdout.match(/(\d+\.\d+\.\d+)/) + if (match) installedAfter = match[1] + } catch { /* keep null */ } + + // Audit log + try { + const db = getDatabase() + db.prepare( + 'INSERT INTO audit_log (action, actor, detail) VALUES (?, ?, ?)' + ).run( + 'openclaw.update', + auth.user.username, + JSON.stringify({ previousVersion: installedBefore, newVersion: installedAfter }) + ) + } catch { /* non-critical */ } + + return NextResponse.json({ + success: true, + previousVersion: installedBefore, + newVersion: installedAfter, + output: result.stdout, + }) + } catch (err: any) { + const detail = + err?.stderr?.toString?.()?.trim() || + err?.stdout?.toString?.()?.trim() || + err?.message || + 'Unknown error during OpenClaw update' + + logger.error({ err }, 'OpenClaw update failed') + + return NextResponse.json( + { error: 'OpenClaw update failed', detail }, + { status: 500 } + ) + } +} diff --git a/src/app/api/openclaw/version/route.ts b/src/app/api/openclaw/version/route.ts new file mode 100644 index 0000000..c95b2ca --- /dev/null +++ b/src/app/api/openclaw/version/route.ts @@ -0,0 +1,77 @@ +import { NextResponse } from 'next/server' +import { runOpenClaw } from '@/lib/command' + +const GITHUB_RELEASES_URL = + 'https://api.github.com/repos/openclaw/openclaw/releases/latest' + +function compareSemver(a: string, b: string): number { + const pa = a.replace(/^v/, '').split('.').map(Number) + const pb = b.replace(/^v/, '').split('.').map(Number) + for (let i = 0; i < Math.max(pa.length, pb.length); i++) { + const na = pa[i] ?? 0 + const nb = pb[i] ?? 0 + if (na > nb) return 1 + if (na < nb) return -1 + } + return 0 +} + +const headers = { 'Cache-Control': 'public, max-age=3600' } + +export async function GET() { + let installed: string | null = null + + try { + const result = await runOpenClaw(['--version'], { timeoutMs: 3000 }) + const match = result.stdout.match(/(\d+\.\d+\.\d+)/) + if (match) installed = match[1] + } catch { + // OpenClaw not installed or not reachable + return NextResponse.json( + { installed: null, latest: null, updateAvailable: false }, + { headers } + ) + } + + if (!installed) { + return NextResponse.json( + { installed: null, latest: null, updateAvailable: false }, + { headers } + ) + } + + try { + const res = await fetch(GITHUB_RELEASES_URL, { + headers: { Accept: 'application/vnd.github+json' }, + next: { revalidate: 3600 }, + }) + + if (!res.ok) { + return NextResponse.json( + { installed, latest: null, updateAvailable: false }, + { headers } + ) + } + + const release = await res.json() + const latest = (release.tag_name ?? '').replace(/^v/, '') + const updateAvailable = compareSemver(latest, installed) > 0 + + return NextResponse.json( + { + installed, + latest, + updateAvailable, + releaseUrl: release.html_url ?? '', + releaseNotes: release.body ?? '', + updateCommand: 'openclaw update --channel stable', + }, + { headers } + ) + } catch { + return NextResponse.json( + { installed, latest: null, updateAvailable: false }, + { headers } + ) + } +} diff --git a/src/app/api/projects/[id]/agents/route.ts b/src/app/api/projects/[id]/agents/route.ts new file mode 100644 index 0000000..7a684e4 --- /dev/null +++ b/src/app/api/projects/[id]/agents/route.ts @@ -0,0 +1,179 @@ +import { NextRequest, NextResponse } from 'next/server' +import { getDatabase } from '@/lib/db' +import { requireRole } from '@/lib/auth' +import { mutationLimiter } from '@/lib/rate-limit' +import { logger } from '@/lib/logger' +import { + ensureTenantWorkspaceAccess, + ForbiddenError +} from '@/lib/workspaces' + +function toProjectId(raw: string): number { + const id = Number.parseInt(raw, 10) + return Number.isFinite(id) ? id : NaN +} + +export async function GET( + request: NextRequest, + { params }: { params: Promise<{ id: string }> } +) { + const auth = requireRole(request, 'viewer') + if ('error' in auth) return NextResponse.json({ error: auth.error }, { status: auth.status }) + + try { + const db = getDatabase() + const workspaceId = auth.user.workspace_id ?? 1 + const tenantId = auth.user.tenant_id ?? 1 + const forwardedFor = (request.headers.get('x-forwarded-for') || '').split(',')[0]?.trim() || null + ensureTenantWorkspaceAccess(db, tenantId, workspaceId, { + actor: auth.user.username, + actorId: auth.user.id, + route: '/api/projects/[id]/agents', + ipAddress: forwardedFor, + userAgent: request.headers.get('user-agent'), + }) + const { id } = await params + const projectId = toProjectId(id) + if (Number.isNaN(projectId)) return NextResponse.json({ error: 'Invalid project ID' }, { status: 400 }) + const projectScope = db.prepare(` + SELECT p.id + FROM projects p + JOIN workspaces w ON w.id = p.workspace_id + WHERE p.id = ? AND p.workspace_id = ? AND w.tenant_id = ? + LIMIT 1 + `).get(projectId, workspaceId, tenantId) + if (!projectScope) return NextResponse.json({ error: 'Project not found' }, { status: 404 }) + + // Verify project belongs to workspace + const project = db.prepare(`SELECT id FROM projects WHERE id = ? AND workspace_id = ?`).get(projectId, workspaceId) + if (!project) return NextResponse.json({ error: 'Project not found' }, { status: 404 }) + + const assignments = db.prepare(` + SELECT id, project_id, agent_name, role, assigned_at + FROM project_agent_assignments + WHERE project_id = ? + ORDER BY assigned_at ASC + `).all(projectId) + + return NextResponse.json({ assignments }) + } catch (error) { + if (error instanceof ForbiddenError) { + return NextResponse.json({ error: error.message }, { status: error.status }) + } + logger.error({ err: error }, 'GET /api/projects/[id]/agents error') + return NextResponse.json({ error: 'Failed to fetch agent assignments' }, { status: 500 }) + } +} + +export async function POST( + request: NextRequest, + { params }: { params: Promise<{ id: string }> } +) { + const auth = requireRole(request, 'operator') + if ('error' in auth) return NextResponse.json({ error: auth.error }, { status: auth.status }) + + const rateCheck = mutationLimiter(request) + if (rateCheck) return rateCheck + + try { + const db = getDatabase() + const workspaceId = auth.user.workspace_id ?? 1 + const tenantId = auth.user.tenant_id ?? 1 + const forwardedFor = (request.headers.get('x-forwarded-for') || '').split(',')[0]?.trim() || null + ensureTenantWorkspaceAccess(db, tenantId, workspaceId, { + actor: auth.user.username, + actorId: auth.user.id, + route: '/api/projects/[id]/agents', + ipAddress: forwardedFor, + userAgent: request.headers.get('user-agent'), + }) + const { id } = await params + const projectId = toProjectId(id) + if (Number.isNaN(projectId)) return NextResponse.json({ error: 'Invalid project ID' }, { status: 400 }) + const projectScope = db.prepare(` + SELECT p.id + FROM projects p + JOIN workspaces w ON w.id = p.workspace_id + WHERE p.id = ? AND p.workspace_id = ? AND w.tenant_id = ? + LIMIT 1 + `).get(projectId, workspaceId, tenantId) + if (!projectScope) return NextResponse.json({ error: 'Project not found' }, { status: 404 }) + + const project = db.prepare(`SELECT id FROM projects WHERE id = ? AND workspace_id = ?`).get(projectId, workspaceId) + if (!project) return NextResponse.json({ error: 'Project not found' }, { status: 404 }) + + const body = await request.json() + const agentName = String(body?.agent_name || '').trim() + const role = String(body?.role || 'member').trim() + + if (!agentName) return NextResponse.json({ error: 'agent_name is required' }, { status: 400 }) + + db.prepare(` + INSERT OR IGNORE INTO project_agent_assignments (project_id, agent_name, role) + VALUES (?, ?, ?) + `).run(projectId, agentName, role) + + return NextResponse.json({ success: true }, { status: 201 }) + } catch (error) { + if (error instanceof ForbiddenError) { + return NextResponse.json({ error: error.message }, { status: error.status }) + } + logger.error({ err: error }, 'POST /api/projects/[id]/agents error') + return NextResponse.json({ error: 'Failed to assign agent' }, { status: 500 }) + } +} + +export async function DELETE( + request: NextRequest, + { params }: { params: Promise<{ id: string }> } +) { + const auth = requireRole(request, 'operator') + if ('error' in auth) return NextResponse.json({ error: auth.error }, { status: auth.status }) + + const rateCheck = mutationLimiter(request) + if (rateCheck) return rateCheck + + try { + const db = getDatabase() + const workspaceId = auth.user.workspace_id ?? 1 + const tenantId = auth.user.tenant_id ?? 1 + const forwardedFor = (request.headers.get('x-forwarded-for') || '').split(',')[0]?.trim() || null + ensureTenantWorkspaceAccess(db, tenantId, workspaceId, { + actor: auth.user.username, + actorId: auth.user.id, + route: '/api/projects/[id]/agents', + ipAddress: forwardedFor, + userAgent: request.headers.get('user-agent'), + }) + const { id } = await params + const projectId = toProjectId(id) + if (Number.isNaN(projectId)) return NextResponse.json({ error: 'Invalid project ID' }, { status: 400 }) + const projectScope = db.prepare(` + SELECT p.id + FROM projects p + JOIN workspaces w ON w.id = p.workspace_id + WHERE p.id = ? AND p.workspace_id = ? AND w.tenant_id = ? + LIMIT 1 + `).get(projectId, workspaceId, tenantId) + if (!projectScope) return NextResponse.json({ error: 'Project not found' }, { status: 404 }) + + const project = db.prepare(`SELECT id FROM projects WHERE id = ? AND workspace_id = ?`).get(projectId, workspaceId) + if (!project) return NextResponse.json({ error: 'Project not found' }, { status: 404 }) + + const agentName = new URL(request.url).searchParams.get('agent_name') + if (!agentName) return NextResponse.json({ error: 'agent_name query parameter is required' }, { status: 400 }) + + db.prepare(` + DELETE FROM project_agent_assignments + WHERE project_id = ? AND agent_name = ? + `).run(projectId, agentName) + + return NextResponse.json({ success: true }) + } catch (error) { + if (error instanceof ForbiddenError) { + return NextResponse.json({ error: error.message }, { status: error.status }) + } + logger.error({ err: error }, 'DELETE /api/projects/[id]/agents error') + return NextResponse.json({ error: 'Failed to unassign agent' }, { status: 500 }) + } +} diff --git a/src/app/api/projects/[id]/route.ts b/src/app/api/projects/[id]/route.ts index 3fe1832..a1cdfd0 100644 --- a/src/app/api/projects/[id]/route.ts +++ b/src/app/api/projects/[id]/route.ts @@ -3,6 +3,10 @@ import { getDatabase } from '@/lib/db' import { requireRole } from '@/lib/auth' import { mutationLimiter } from '@/lib/rate-limit' import { logger } from '@/lib/logger' +import { + ensureTenantWorkspaceAccess, + ForbiddenError +} from '@/lib/workspaces' function normalizePrefix(input: string): string { const normalized = input.trim().toUpperCase().replace(/[^A-Z0-9]/g, '') @@ -24,19 +28,48 @@ export async function GET( try { const db = getDatabase() const workspaceId = auth.user.workspace_id ?? 1 + const tenantId = auth.user.tenant_id ?? 1 + const forwardedFor = (request.headers.get('x-forwarded-for') || '').split(',')[0]?.trim() || null + ensureTenantWorkspaceAccess(db, tenantId, workspaceId, { + actor: auth.user.username, + actorId: auth.user.id, + route: '/api/projects/[id]', + ipAddress: forwardedFor, + userAgent: request.headers.get('user-agent'), + }) const { id } = await params const projectId = toProjectId(id) if (Number.isNaN(projectId)) return NextResponse.json({ error: 'Invalid project ID' }, { status: 400 }) + const projectScope = db.prepare(` + SELECT p.id + FROM projects p + JOIN workspaces w ON w.id = p.workspace_id + WHERE p.id = ? AND p.workspace_id = ? AND w.tenant_id = ? + LIMIT 1 + `).get(projectId, workspaceId, tenantId) + if (!projectScope) return NextResponse.json({ error: 'Project not found' }, { status: 404 }) - const project = db.prepare(` - SELECT id, workspace_id, name, slug, description, ticket_prefix, ticket_counter, status, created_at, updated_at - FROM projects - WHERE id = ? AND workspace_id = ? - `).get(projectId, workspaceId) - if (!project) return NextResponse.json({ error: 'Project not found' }, { status: 404 }) + const row = db.prepare(` + SELECT p.id, p.workspace_id, p.name, p.slug, p.description, p.ticket_prefix, p.ticket_counter, p.status, + p.github_repo, p.deadline, p.color, p.github_sync_enabled, p.github_labels_initialized, p.github_default_branch, p.created_at, p.updated_at, + (SELECT COUNT(*) FROM tasks t WHERE t.project_id = p.id) as task_count, + (SELECT GROUP_CONCAT(paa.agent_name) FROM project_agent_assignments paa WHERE paa.project_id = p.id) as assigned_agents_csv + FROM projects p + WHERE p.id = ? AND p.workspace_id = ? + `).get(projectId, workspaceId) as Record | undefined + if (!row) return NextResponse.json({ error: 'Project not found' }, { status: 404 }) + + const project = { + ...row, + assigned_agents: row.assigned_agents_csv ? String(row.assigned_agents_csv).split(',') : [], + assigned_agents_csv: undefined, + } return NextResponse.json({ project }) } catch (error) { + if (error instanceof ForbiddenError) { + return NextResponse.json({ error: error.message }, { status: error.status }) + } logger.error({ err: error }, 'GET /api/projects/[id] error') return NextResponse.json({ error: 'Failed to fetch project' }, { status: 500 }) } @@ -55,9 +88,26 @@ export async function PATCH( try { const db = getDatabase() const workspaceId = auth.user.workspace_id ?? 1 + const tenantId = auth.user.tenant_id ?? 1 + const forwardedFor = (request.headers.get('x-forwarded-for') || '').split(',')[0]?.trim() || null + ensureTenantWorkspaceAccess(db, tenantId, workspaceId, { + actor: auth.user.username, + actorId: auth.user.id, + route: '/api/projects/[id]', + ipAddress: forwardedFor, + userAgent: request.headers.get('user-agent'), + }) const { id } = await params const projectId = toProjectId(id) if (Number.isNaN(projectId)) return NextResponse.json({ error: 'Invalid project ID' }, { status: 400 }) + const projectScope = db.prepare(` + SELECT p.id + FROM projects p + JOIN workspaces w ON w.id = p.workspace_id + WHERE p.id = ? AND p.workspace_id = ? AND w.tenant_id = ? + LIMIT 1 + `).get(projectId, workspaceId, tenantId) + if (!projectScope) return NextResponse.json({ error: 'Project not found' }, { status: 404 }) const current = db.prepare(`SELECT * FROM projects WHERE id = ? AND workspace_id = ?`).get(projectId, workspaceId) as any if (!current) return NextResponse.json({ error: 'Project not found' }, { status: 404 }) @@ -99,6 +149,30 @@ export async function PATCH( updates.push('status = ?') paramsList.push(status) } + if (body?.github_repo !== undefined) { + updates.push('github_repo = ?') + paramsList.push(typeof body.github_repo === 'string' ? body.github_repo.trim() || null : null) + } + if (body?.deadline !== undefined) { + updates.push('deadline = ?') + paramsList.push(typeof body.deadline === 'number' ? body.deadline : null) + } + if (body?.color !== undefined) { + updates.push('color = ?') + paramsList.push(typeof body.color === 'string' ? body.color.trim() || null : null) + } + if (body?.github_sync_enabled !== undefined) { + updates.push('github_sync_enabled = ?') + paramsList.push(body.github_sync_enabled ? 1 : 0) + } + if (body?.github_default_branch !== undefined) { + updates.push('github_default_branch = ?') + paramsList.push(typeof body.github_default_branch === 'string' ? body.github_default_branch.trim() || 'main' : 'main') + } + if (body?.github_labels_initialized !== undefined) { + updates.push('github_labels_initialized = ?') + paramsList.push(body.github_labels_initialized ? 1 : 0) + } if (updates.length === 0) return NextResponse.json({ error: 'No fields to update' }, { status: 400 }) @@ -110,13 +184,17 @@ export async function PATCH( `).run(...paramsList, projectId, workspaceId) const project = db.prepare(` - SELECT id, workspace_id, name, slug, description, ticket_prefix, ticket_counter, status, created_at, updated_at + SELECT id, workspace_id, name, slug, description, ticket_prefix, ticket_counter, status, + github_repo, deadline, color, github_sync_enabled, github_labels_initialized, github_default_branch, created_at, updated_at FROM projects WHERE id = ? AND workspace_id = ? `).get(projectId, workspaceId) return NextResponse.json({ project }) } catch (error) { + if (error instanceof ForbiddenError) { + return NextResponse.json({ error: error.message }, { status: error.status }) + } logger.error({ err: error }, 'PATCH /api/projects/[id] error') return NextResponse.json({ error: 'Failed to update project' }, { status: 500 }) } @@ -135,9 +213,26 @@ export async function DELETE( try { const db = getDatabase() const workspaceId = auth.user.workspace_id ?? 1 + const tenantId = auth.user.tenant_id ?? 1 + const forwardedFor = (request.headers.get('x-forwarded-for') || '').split(',')[0]?.trim() || null + ensureTenantWorkspaceAccess(db, tenantId, workspaceId, { + actor: auth.user.username, + actorId: auth.user.id, + route: '/api/projects/[id]', + ipAddress: forwardedFor, + userAgent: request.headers.get('user-agent'), + }) const { id } = await params const projectId = toProjectId(id) if (Number.isNaN(projectId)) return NextResponse.json({ error: 'Invalid project ID' }, { status: 400 }) + const projectScope = db.prepare(` + SELECT p.id + FROM projects p + JOIN workspaces w ON w.id = p.workspace_id + WHERE p.id = ? AND p.workspace_id = ? AND w.tenant_id = ? + LIMIT 1 + `).get(projectId, workspaceId, tenantId) + if (!projectScope) return NextResponse.json({ error: 'Project not found' }, { status: 404 }) const current = db.prepare(`SELECT * FROM projects WHERE id = ? AND workspace_id = ?`).get(projectId, workspaceId) as any if (!current) return NextResponse.json({ error: 'Project not found' }, { status: 404 }) @@ -171,6 +266,9 @@ export async function DELETE( return NextResponse.json({ success: true, mode: 'delete' }) } catch (error) { + if (error instanceof ForbiddenError) { + return NextResponse.json({ error: error.message }, { status: error.status }) + } logger.error({ err: error }, 'DELETE /api/projects/[id] error') return NextResponse.json({ error: 'Failed to delete project' }, { status: 500 }) } diff --git a/src/app/api/projects/[id]/tasks/route.ts b/src/app/api/projects/[id]/tasks/route.ts index ada6891..2168968 100644 --- a/src/app/api/projects/[id]/tasks/route.ts +++ b/src/app/api/projects/[id]/tasks/route.ts @@ -2,6 +2,10 @@ import { NextRequest, NextResponse } from 'next/server' import { getDatabase } from '@/lib/db' import { requireRole } from '@/lib/auth' import { logger } from '@/lib/logger' +import { + ensureTenantWorkspaceAccess, + ForbiddenError +} from '@/lib/workspaces' function formatTicketRef(prefix?: string | null, num?: number | null): string | undefined { if (!prefix || typeof num !== 'number' || !Number.isFinite(num) || num <= 0) return undefined @@ -18,11 +22,28 @@ export async function GET( try { const db = getDatabase() const workspaceId = auth.user.workspace_id ?? 1 + const tenantId = auth.user.tenant_id ?? 1 + const forwardedFor = (request.headers.get('x-forwarded-for') || '').split(',')[0]?.trim() || null + ensureTenantWorkspaceAccess(db, tenantId, workspaceId, { + actor: auth.user.username, + actorId: auth.user.id, + route: '/api/projects/[id]/tasks', + ipAddress: forwardedFor, + userAgent: request.headers.get('user-agent'), + }) const { id } = await params const projectId = Number.parseInt(id, 10) if (!Number.isFinite(projectId)) { return NextResponse.json({ error: 'Invalid project ID' }, { status: 400 }) } + const projectScope = db.prepare(` + SELECT p.id + FROM projects p + JOIN workspaces w ON w.id = p.workspace_id + WHERE p.id = ? AND p.workspace_id = ? AND w.tenant_id = ? + LIMIT 1 + `).get(projectId, workspaceId, tenantId) + if (!projectScope) return NextResponse.json({ error: 'Project not found' }, { status: 404 }) const project = db.prepare(` SELECT id, workspace_id, name, slug, description, ticket_prefix, ticket_counter, status, created_at, updated_at @@ -49,6 +70,9 @@ export async function GET( })) }) } catch (error) { + if (error instanceof ForbiddenError) { + return NextResponse.json({ error: error.message }, { status: error.status }) + } logger.error({ err: error }, 'GET /api/projects/[id]/tasks error') return NextResponse.json({ error: 'Failed to fetch project tasks' }, { status: 500 }) } diff --git a/src/app/api/projects/route.ts b/src/app/api/projects/route.ts index 177fb83..09e6710 100644 --- a/src/app/api/projects/route.ts +++ b/src/app/api/projects/route.ts @@ -3,6 +3,7 @@ import { getDatabase } from '@/lib/db' import { requireRole } from '@/lib/auth' import { mutationLimiter } from '@/lib/rate-limit' import { logger } from '@/lib/logger' +import { ensureTenantWorkspaceAccess, ForbiddenError } from '@/lib/workspaces' function slugify(input: string): string { return input @@ -25,18 +26,39 @@ export async function GET(request: NextRequest) { try { const db = getDatabase() const workspaceId = auth.user.workspace_id ?? 1 + const tenantId = auth.user.tenant_id ?? 1 + const forwardedFor = (request.headers.get('x-forwarded-for') || '').split(',')[0]?.trim() || null + ensureTenantWorkspaceAccess(db, tenantId, workspaceId, { + actor: auth.user.username, + actorId: auth.user.id, + route: '/api/projects', + ipAddress: forwardedFor, + userAgent: request.headers.get('user-agent'), + }) const includeArchived = new URL(request.url).searchParams.get('includeArchived') === '1' - const projects = db.prepare(` - SELECT id, workspace_id, name, slug, description, ticket_prefix, ticket_counter, status, created_at, updated_at - FROM projects - WHERE workspace_id = ? - ${includeArchived ? '' : "AND status = 'active'"} - ORDER BY name COLLATE NOCASE ASC - `).all(workspaceId) + const rows = db.prepare(` + SELECT p.id, p.workspace_id, p.name, p.slug, p.description, p.ticket_prefix, p.ticket_counter, p.status, + p.github_repo, p.deadline, p.color, p.github_sync_enabled, p.github_labels_initialized, p.github_default_branch, p.created_at, p.updated_at, + (SELECT COUNT(*) FROM tasks t WHERE t.project_id = p.id) as task_count, + (SELECT GROUP_CONCAT(paa.agent_name) FROM project_agent_assignments paa WHERE paa.project_id = p.id) as assigned_agents_csv + FROM projects p + WHERE p.workspace_id = ? + ${includeArchived ? '' : "AND p.status = 'active'"} + ORDER BY p.name COLLATE NOCASE ASC + `).all(workspaceId) as Array> + + const projects = rows.map(row => ({ + ...row, + assigned_agents: row.assigned_agents_csv ? String(row.assigned_agents_csv).split(',') : [], + assigned_agents_csv: undefined, + })) return NextResponse.json({ projects }) } catch (error) { + if (error instanceof ForbiddenError) { + return NextResponse.json({ error: error.message }, { status: error.status }) + } logger.error({ err: error }, 'GET /api/projects error') return NextResponse.json({ error: 'Failed to fetch projects' }, { status: 500 }) } @@ -52,12 +74,24 @@ export async function POST(request: NextRequest) { try { const db = getDatabase() const workspaceId = auth.user.workspace_id ?? 1 + const tenantId = auth.user.tenant_id ?? 1 + const forwardedFor = (request.headers.get('x-forwarded-for') || '').split(',')[0]?.trim() || null + ensureTenantWorkspaceAccess(db, tenantId, workspaceId, { + actor: auth.user.username, + actorId: auth.user.id, + route: '/api/projects', + ipAddress: forwardedFor, + userAgent: request.headers.get('user-agent'), + }) const body = await request.json() const name = String(body?.name || '').trim() const description = typeof body?.description === 'string' ? body.description.trim() : '' const prefixInput = String(body?.ticket_prefix || body?.ticketPrefix || '').trim() const slugInput = String(body?.slug || '').trim() + const githubRepo = typeof body?.github_repo === 'string' ? body.github_repo.trim() || null : null + const deadline = typeof body?.deadline === 'number' ? body.deadline : null + const color = typeof body?.color === 'string' ? body.color.trim() || null : null if (!name) return NextResponse.json({ error: 'Project name is required' }, { status: 400 }) @@ -76,18 +110,22 @@ export async function POST(request: NextRequest) { } const result = db.prepare(` - INSERT INTO projects (workspace_id, name, slug, description, ticket_prefix, status, created_at, updated_at) - VALUES (?, ?, ?, ?, ?, 'active', unixepoch(), unixepoch()) - `).run(workspaceId, name, slug, description || null, ticketPrefix) + INSERT INTO projects (workspace_id, name, slug, description, ticket_prefix, github_repo, deadline, color, status, created_at, updated_at) + VALUES (?, ?, ?, ?, ?, ?, ?, ?, 'active', unixepoch(), unixepoch()) + `).run(workspaceId, name, slug, description || null, ticketPrefix, githubRepo, deadline, color) const project = db.prepare(` - SELECT id, workspace_id, name, slug, description, ticket_prefix, ticket_counter, status, created_at, updated_at + SELECT id, workspace_id, name, slug, description, ticket_prefix, ticket_counter, status, + github_repo, deadline, color, github_sync_enabled, github_labels_initialized, github_default_branch, created_at, updated_at FROM projects WHERE id = ? `).get(Number(result.lastInsertRowid)) return NextResponse.json({ project }, { status: 201 }) } catch (error) { + if (error instanceof ForbiddenError) { + return NextResponse.json({ error: error.message }, { status: error.status }) + } logger.error({ err: error }, 'POST /api/projects error') return NextResponse.json({ error: 'Failed to create project' }, { status: 500 }) } diff --git a/src/app/api/quality-review/route.ts b/src/app/api/quality-review/route.ts index d07bead..1455517 100644 --- a/src/app/api/quality-review/route.ts +++ b/src/app/api/quality-review/route.ts @@ -104,13 +104,24 @@ export async function POST(request: NextRequest) { workspaceId ) - // Auto-advance task to 'done' when aegis approves - if (status === 'approved' && reviewer === 'aegis') { + // Auto-advance task based on review outcome + if (status === 'approved') { db.prepare('UPDATE tasks SET status = ?, updated_at = unixepoch() WHERE id = ? AND workspace_id = ?') .run('done', taskId, workspaceId) eventBus.broadcast('task.status_changed', { id: taskId, status: 'done', + previous_status: 'review', + updated_at: Math.floor(Date.now() / 1000), + }) + } else if (status === 'rejected') { + // Rejected: push back to in_progress with the rejection notes as error_message + db.prepare('UPDATE tasks SET status = ?, error_message = ?, updated_at = unixepoch() WHERE id = ? AND workspace_id = ?') + .run('in_progress', `Quality review rejected by ${reviewer}: ${notes}`, taskId, workspaceId) + eventBus.broadcast('task.status_changed', { + id: taskId, + status: 'in_progress', + previous_status: 'review', updated_at: Math.floor(Date.now() / 1000), }) } diff --git a/src/app/api/releases/check/route.ts b/src/app/api/releases/check/route.ts index d5e202d..3eedbeb 100644 --- a/src/app/api/releases/check/route.ts +++ b/src/app/api/releases/check/route.ts @@ -1,4 +1,5 @@ import { NextResponse } from 'next/server' +import { existsSync } from 'node:fs' import { APP_VERSION } from '@/lib/version' const GITHUB_RELEASES_URL = @@ -35,6 +36,8 @@ export async function GET() { const latestVersion = (release.tag_name ?? '').replace(/^v/, '') const updateAvailable = compareSemver(latestVersion, APP_VERSION) > 0 + const deploymentMode = existsSync('/.dockerenv') ? 'docker' : 'bare-metal' + return NextResponse.json( { updateAvailable, @@ -42,6 +45,7 @@ export async function GET() { latestVersion, releaseUrl: release.html_url ?? '', releaseNotes: release.body ?? '', + deploymentMode, }, { headers: { 'Cache-Control': 'public, max-age=3600' } } ) diff --git a/src/app/api/releases/update/route.ts b/src/app/api/releases/update/route.ts new file mode 100644 index 0000000..98f223b --- /dev/null +++ b/src/app/api/releases/update/route.ts @@ -0,0 +1,135 @@ +import { NextResponse } from 'next/server' +import { execFileSync } from 'child_process' +import { readFileSync } from 'fs' +import { join } from 'path' +import { requireRole } from '@/lib/auth' +import { getDatabase } from '@/lib/db' +import { APP_VERSION } from '@/lib/version' + +const UPDATE_TIMEOUT = 5 * 60 * 1000 // 5 minutes +const MAX_BUFFER = 10 * 1024 * 1024 // 10 MB + +const EXEC_OPTS = { + timeout: UPDATE_TIMEOUT, + maxBuffer: MAX_BUFFER, + encoding: 'utf-8' as const, +} + +function git(args: string[], cwd: string): string { + return execFileSync('git', args, { ...EXEC_OPTS, cwd }).trim() +} + +function pnpm(args: string[], cwd: string): string { + return execFileSync('pnpm', args, { ...EXEC_OPTS, cwd }).trim() +} + +export async function POST(request: Request) { + const auth = requireRole(request, 'admin') + if (auth.error) { + return NextResponse.json({ error: auth.error }, { status: auth.status }) + } + + const user = auth.user! + const cwd = process.cwd() + const steps: { step: string; output: string }[] = [] + + try { + // Parse target version from request body + const body = await request.json().catch(() => ({})) + const targetVersion: string | undefined = body.targetVersion + if (!targetVersion) { + return NextResponse.json( + { error: 'Missing targetVersion in request body' }, + { status: 400 } + ) + } + + // Normalize to tag format (e.g. "1.2.0" -> "v1.2.0") + const tag = targetVersion.startsWith('v') ? targetVersion : `v${targetVersion}` + + // 1. Check for uncommitted changes + const status = git(['status', '--porcelain'], cwd) + if (status) { + return NextResponse.json( + { + error: 'Working tree has uncommitted changes. Please commit or stash them before updating.', + dirty: true, + files: status.split('\n').slice(0, 20), + }, + { status: 409 } + ) + } + + // 2. Fetch tags and release code from origin + const fetchOut = git(['fetch', 'origin', '--tags', '--force'], cwd) + steps.push({ step: 'git fetch', output: fetchOut || 'OK' }) + + // 3. Verify the tag exists + try { + git(['rev-parse', '--verify', `refs/tags/${tag}`], cwd) + } catch { + return NextResponse.json( + { error: `Release tag ${tag} not found in remote` }, + { status: 404 } + ) + } + + // 4. Checkout the release tag + const checkoutOut = git(['checkout', tag], cwd) + steps.push({ step: `git checkout ${tag}`, output: checkoutOut }) + + // 5. Install dependencies + const installOut = pnpm(['install', '--frozen-lockfile'], cwd) + steps.push({ step: 'pnpm install', output: installOut }) + + // 6. Build + const buildOut = pnpm(['build'], cwd) + steps.push({ step: 'pnpm build', output: buildOut }) + + // 7. Read new version from package.json + const newPkg = JSON.parse(readFileSync(join(cwd, 'package.json'), 'utf-8')) + const newVersion: string = newPkg.version ?? targetVersion + + // 8. Log to audit_log + try { + const db = getDatabase() + db.prepare( + 'INSERT INTO audit_log (action, actor, detail) VALUES (?, ?, ?)' + ).run( + 'system.update', + user.username, + JSON.stringify({ + previousVersion: APP_VERSION, + newVersion, + tag, + }) + ) + } catch { + // Non-critical -- don't fail the update if audit logging fails + } + + return NextResponse.json({ + success: true, + previousVersion: APP_VERSION, + newVersion, + tag, + steps, + restartRequired: true, + }) + } catch (err: any) { + const message = + err?.stderr?.toString?.()?.trim() || + err?.stdout?.toString?.()?.trim() || + err?.message || + 'Unknown error during update' + + return NextResponse.json( + { + error: 'Update failed', + detail: message, + steps, + }, + { status: 500 } + ) + } +} diff --git a/src/app/api/schedule-parse/route.ts b/src/app/api/schedule-parse/route.ts new file mode 100644 index 0000000..7172c3e --- /dev/null +++ b/src/app/api/schedule-parse/route.ts @@ -0,0 +1,20 @@ +import { NextRequest, NextResponse } from 'next/server' +import { parseNaturalSchedule } from '@/lib/schedule-parser' + +/** + * GET /api/schedule-parse?input=every+morning+at+9am + * Returns { cronExpr, humanReadable } or { error } + */ +export async function GET(request: NextRequest) { + const input = request.nextUrl.searchParams.get('input') + if (!input) { + return NextResponse.json({ error: 'Missing input parameter' }, { status: 400 }) + } + + const result = parseNaturalSchedule(input) + if (!result) { + return NextResponse.json({ error: 'Could not parse schedule expression' }, { status: 400 }) + } + + return NextResponse.json(result) +} diff --git a/src/app/api/search/route.ts b/src/app/api/search/route.ts index f97ef63..c03f9d5 100644 --- a/src/app/api/search/route.ts +++ b/src/app/api/search/route.ts @@ -103,8 +103,8 @@ export async function GET(request: NextRequest) { } catch { /* table might not exist */ } } - // Search audit log - if (!typeFilter || typeFilter === 'audit') { + // Search audit log (admin-only — audit_log is instance-global) + if ((!typeFilter || typeFilter === 'audit') && auth.user.role === 'admin') { try { const audits = db.prepare(` SELECT id, action, actor, detail, created_at @@ -130,9 +130,9 @@ export async function GET(request: NextRequest) { try { const messages = db.prepare(` SELECT id, from_agent, to_agent, content, conversation_id, created_at - FROM messages WHERE content LIKE ? OR from_agent LIKE ? + FROM messages WHERE workspace_id = ? AND (content LIKE ? OR from_agent LIKE ?) ORDER BY created_at DESC LIMIT ? - `).all(likeQ, likeQ, limit) as any[] + `).all(workspaceId, likeQ, likeQ, limit) as any[] for (const m of messages) { results.push({ type: 'message', @@ -152,9 +152,9 @@ export async function GET(request: NextRequest) { try { const webhooks = db.prepare(` SELECT id, name, url, events, created_at - FROM webhooks WHERE name LIKE ? OR url LIKE ? + FROM webhooks WHERE workspace_id = ? AND (name LIKE ? OR url LIKE ?) ORDER BY created_at DESC LIMIT ? - `).all(likeQ, likeQ, limit) as any[] + `).all(workspaceId, likeQ, likeQ, limit) as any[] for (const w of webhooks) { results.push({ type: 'webhook', @@ -173,9 +173,9 @@ export async function GET(request: NextRequest) { try { const pipelines = db.prepare(` SELECT id, name, description, created_at - FROM workflow_pipelines WHERE name LIKE ? OR description LIKE ? + FROM workflow_pipelines WHERE workspace_id = ? AND (name LIKE ? OR description LIKE ?) ORDER BY created_at DESC LIMIT ? - `).all(likeQ, likeQ, limit) as any[] + `).all(workspaceId, likeQ, likeQ, limit) as any[] for (const p of pipelines) { results.push({ type: 'pipeline', diff --git a/src/app/api/security-audit/route.ts b/src/app/api/security-audit/route.ts new file mode 100644 index 0000000..4169d52 --- /dev/null +++ b/src/app/api/security-audit/route.ts @@ -0,0 +1,214 @@ +import { NextRequest, NextResponse } from 'next/server' +import { getDatabase } from '@/lib/db' +import { requireRole } from '@/lib/auth' +import { readLimiter } from '@/lib/rate-limit' +import { logger } from '@/lib/logger' +import { getSecurityPosture } from '@/lib/security-events' +import { getMcpCallStats } from '@/lib/mcp-audit' +import { runSecurityScan } from '@/lib/security-scan' + +type Timeframe = 'hour' | 'day' | 'week' | 'month' + +const TIMEFRAME_SECONDS: Record = { + hour: 3600, + day: 86400, + week: 7 * 86400, + month: 30 * 86400, +} + +export async function GET(request: NextRequest) { + const auth = requireRole(request, 'admin') + if ('error' in auth) return NextResponse.json({ error: auth.error }, { status: auth.status }) + + const rateCheck = readLimiter(request) + if (rateCheck) return rateCheck + + try { + const { searchParams } = new URL(request.url) + const timeframe = (searchParams.get('timeframe') || 'day') as Timeframe + const eventTypeFilter = searchParams.get('event_type') + const severityFilter = searchParams.get('severity') + const agentFilter = searchParams.get('agent') + const workspaceId = auth.user.workspace_id ?? 1 + + const seconds = TIMEFRAME_SECONDS[timeframe] || TIMEFRAME_SECONDS.day + const since = Math.floor(Date.now() / 1000) - seconds + const db = getDatabase() + + // Infrastructure scan (same as onboarding security scan) + const scan = runSecurityScan() + + // Event-based posture (incidents, trust scores) + const eventPosture = getSecurityPosture(workspaceId) + + // Blend: weighted average — 70% infrastructure config, 30% event history + const blendedScore = Math.round(scan.score * 0.7 + eventPosture.score * 0.3) + const level = blendedScore >= 90 ? 'hardened' + : blendedScore >= 70 ? 'secure' + : blendedScore >= 40 ? 'needs-attention' + : 'at-risk' + + // Auth events + const authEventsQuery = db.prepare(` + SELECT event_type, severity, agent_name, detail, ip_address, created_at + FROM security_events + WHERE workspace_id = ? AND created_at > ? + AND event_type IN ('auth.failure', 'auth.token_rotation', 'auth.access_denied') + ORDER BY created_at DESC + LIMIT 50 + `).all(workspaceId, since) as any[] + + const loginFailures = authEventsQuery.filter(e => e.event_type === 'auth.failure').length + const tokenRotations = authEventsQuery.filter(e => e.event_type === 'auth.token_rotation').length + const accessDenials = authEventsQuery.filter(e => e.event_type === 'auth.access_denied').length + + // Agent trust + const agents = db.prepare(` + SELECT agent_name, trust_score, last_anomaly_at, + auth_failures + injection_attempts + rate_limit_hits + secret_exposures as anomalies + FROM agent_trust_scores + WHERE workspace_id = ? + ORDER BY trust_score ASC + `).all(workspaceId) as any[] + + const flaggedCount = agents.filter((a: any) => a.trust_score < 0.8).length + + // Secret exposures + const secretEvents = db.prepare(` + SELECT event_type, severity, agent_name, detail, created_at + FROM security_events + WHERE workspace_id = ? AND created_at > ? AND event_type = 'secret.exposure' + ORDER BY created_at DESC + LIMIT 20 + `).all(workspaceId, since) as any[] + + // MCP audit summary + const mcpTotals = db.prepare(` + SELECT + COUNT(*) as total_calls, + COUNT(DISTINCT tool_name) as unique_tools, + SUM(CASE WHEN success = 0 THEN 1 ELSE 0 END) as failures + FROM mcp_call_log + WHERE workspace_id = ? AND created_at > ? + `).get(workspaceId, since) as any + + const topTools = db.prepare(` + SELECT tool_name, COUNT(*) as count + FROM mcp_call_log + WHERE workspace_id = ? AND created_at > ? + GROUP BY tool_name + ORDER BY count DESC + LIMIT 10 + `).all(workspaceId, since) as any[] + + const totalCalls = mcpTotals?.total_calls ?? 0 + const failureRate = totalCalls > 0 + ? Math.round(((mcpTotals?.failures ?? 0) / totalCalls) * 10000) / 100 + : 0 + + // Rate limit hits + const rateLimitEvents = db.prepare(` + SELECT COUNT(*) as total + FROM security_events + WHERE workspace_id = ? AND created_at > ? AND event_type = 'rate_limit.hit' + `).get(workspaceId, since) as any + + const rateLimitByIp = db.prepare(` + SELECT ip_address, COUNT(*) as count + FROM security_events + WHERE workspace_id = ? AND created_at > ? AND event_type = 'rate_limit.hit' AND ip_address IS NOT NULL + GROUP BY ip_address + ORDER BY count DESC + LIMIT 10 + `).all(workspaceId, since) as any[] + + // Injection attempts + const injectionEvents = db.prepare(` + SELECT event_type, severity, agent_name, detail, ip_address, created_at + FROM security_events + WHERE workspace_id = ? AND created_at > ? AND event_type = 'injection.attempt' + ORDER BY created_at DESC + LIMIT 20 + `).all(workspaceId, since) as any[] + + // Timeline (bucketed by hour) + const bucketSize = timeframe === 'hour' ? 300 : 3600 + let timelineQuery = ` + SELECT + (created_at / ${bucketSize}) * ${bucketSize} as bucket, + COUNT(*) as event_count, + MAX(CASE WHEN severity = 'critical' THEN 3 WHEN severity = 'warning' THEN 2 ELSE 1 END) as max_severity + FROM security_events + WHERE workspace_id = ? AND created_at > ? + ` + const timelineParams: any[] = [workspaceId, since] + + if (eventTypeFilter) { + timelineQuery += ' AND event_type = ?' + timelineParams.push(eventTypeFilter) + } + if (severityFilter) { + timelineQuery += ' AND severity = ?' + timelineParams.push(severityFilter) + } + if (agentFilter) { + timelineQuery += ' AND agent_name = ?' + timelineParams.push(agentFilter) + } + + timelineQuery += ' GROUP BY bucket ORDER BY bucket ASC' + + const timeline = db.prepare(timelineQuery).all(...timelineParams) as any[] + + const severityMap: Record = { 3: 'critical', 2: 'warning', 1: 'info' } + + return NextResponse.json({ + posture: { score: blendedScore, level }, + scan: { + score: scan.score, + overall: scan.overall, + categories: scan.categories, + }, + authEvents: { + loginFailures, + tokenRotations, + accessDenials, + recentEvents: authEventsQuery.slice(0, 10), + }, + agentTrust: { + agents: agents.map((a: any) => ({ + name: a.agent_name, + score: Math.round(a.trust_score * 100) / 100, + anomalies: a.anomalies, + })), + flaggedCount, + }, + secretExposures: { + total: secretEvents.length, + recent: secretEvents.slice(0, 5), + }, + mcpAudit: { + totalCalls, + uniqueTools: mcpTotals?.unique_tools ?? 0, + failureRate, + topTools: topTools.map((t: any) => ({ name: t.tool_name, count: t.count })), + }, + rateLimits: { + totalHits: rateLimitEvents?.total ?? 0, + byIp: rateLimitByIp.map((r: any) => ({ ip: r.ip_address, count: r.count })), + }, + injectionAttempts: { + total: injectionEvents.length, + recent: injectionEvents.slice(0, 5), + }, + timeline: timeline.map((t: any) => ({ + timestamp: t.bucket, + eventCount: t.event_count, + severity: severityMap[t.max_severity] || 'info', + })), + }) + } catch (error) { + logger.error({ err: error }, 'GET /api/security-audit error') + return NextResponse.json({ error: 'Internal server error' }, { status: 500 }) + } +} diff --git a/src/app/api/security-scan/agent/route.ts b/src/app/api/security-scan/agent/route.ts new file mode 100644 index 0000000..7d4a3b6 --- /dev/null +++ b/src/app/api/security-scan/agent/route.ts @@ -0,0 +1,192 @@ +import { NextRequest, NextResponse } from 'next/server' +import { requireRole } from '@/lib/auth' +import { logger } from '@/lib/logger' +import { runSecurityScan, FIX_SAFETY, type CheckSeverity, type FixSafety, type Check } from '@/lib/security-scan' + +type FixScope = 'safe' | 'safe+restart' | 'all' + +interface AgentScanFixRequest { + action: 'scan' | 'fix' | 'scan-and-fix' + fixScope?: FixScope + ids?: string[] + force?: boolean + dryRun?: boolean +} + +function isFixableInScope(checkId: string, scope: FixScope, force: boolean): boolean { + const safety = FIX_SAFETY[checkId] + if (!safety) return false + if (safety === 'safe') return true + if (safety === 'requires-restart' && (scope === 'safe+restart' || scope === 'all')) return true + if (safety === 'requires-review' && scope === 'all' && force) return true + return false +} + +export async function POST(request: NextRequest) { + const auth = requireRole(request, 'admin') + if ('error' in auth) return NextResponse.json({ error: auth.error }, { status: auth.status }) + + let body: AgentScanFixRequest + try { + body = await request.json() + } catch { + return NextResponse.json({ error: 'Invalid JSON body' }, { status: 400 }) + } + + const { action, fixScope = 'safe+restart', ids, force = false, dryRun = false } = body + + if (!action || !['scan', 'fix', 'scan-and-fix'].includes(action)) { + return NextResponse.json({ error: 'action must be "scan", "fix", or "scan-and-fix"' }, { status: 400 }) + } + + try { + // Always scan first + const scanResult = runSecurityScan() + const allChecks = Object.values(scanResult.categories).flatMap(c => c.checks) + const failingChecks = allChecks.filter(c => c.status !== 'pass') + + const scanResponse = { + overall: scanResult.overall, + score: scanResult.score, + failingChecks: failingChecks.map(c => ({ + id: c.id, + name: c.name, + status: c.status, + severity: c.severity ?? 'medium' as CheckSeverity, + detail: c.detail, + fix: c.fix, + fixSafety: FIX_SAFETY[c.id] ?? c.fixSafety ?? ('manual-only' as FixSafety), + autoFixable: isFixableInScope(c.id, fixScope, force), + })), + passingCount: allChecks.length - failingChecks.length, + totalCount: allChecks.length, + categories: Object.fromEntries( + Object.entries(scanResult.categories).map(([key, cat]) => [ + key, + { score: cat.score, failCount: cat.checks.filter(c => c.status !== 'pass').length }, + ]) + ), + } + + if (action === 'scan') { + const criticalCount = failingChecks.filter(c => c.severity === 'critical').length + const highCount = failingChecks.filter(c => c.severity === 'high').length + return NextResponse.json({ + scan: scanResponse, + summary: `Security score: ${scanResult.score}/100 (${scanResult.overall}). ${failingChecks.length} issue(s): ${criticalCount} critical, ${highCount} high.`, + }) + } + + // Fix or scan-and-fix + const targetIds = ids ? new Set(ids) : null + const checksToFix = failingChecks.filter(c => { + if (targetIds && !targetIds.has(c.id)) return false + return isFixableInScope(c.id, fixScope, force) + }) + + const skipped: Array<{ id: string; reason: string }> = [] + const requiresManual: Array<{ id: string; name: string; instructions: string }> = [] + + // Identify skipped and manual checks + for (const c of failingChecks) { + if (targetIds && !targetIds.has(c.id)) continue + const safety = FIX_SAFETY[c.id] ?? c.fixSafety + if (!safety || safety === 'manual-only') { + requiresManual.push({ id: c.id, name: c.name, instructions: c.fix }) + } else if (!isFixableInScope(c.id, fixScope, force)) { + const reason = safety === 'requires-review' && !force + ? 'requires-review: set force=true to apply' + : safety === 'requires-restart' && fixScope === 'safe' + ? 'requires-restart: use fixScope "safe+restart" or "all"' + : `fix safety level "${safety}" not in scope "${fixScope}"` + skipped.push({ id: c.id, reason }) + } + } + + if (dryRun) { + return NextResponse.json({ + scan: scanResponse, + fixes: { + applied: checksToFix.map(c => ({ + id: c.id, + name: c.name, + fixed: false, + detail: `[dry-run] Would apply fix: ${c.fix}`, + fixSafety: FIX_SAFETY[c.id], + })), + skipped, + requiresRestart: checksToFix.some(c => FIX_SAFETY[c.id] === 'requires-restart'), + requiresManual, + }, + summary: `Dry run: ${checksToFix.length} fix(es) would be applied, ${skipped.length} skipped, ${requiresManual.length} require manual action.`, + }) + } + + // Actually apply fixes by calling the fix endpoint logic + const fixIds = checksToFix.map(c => c.id) + let fixResponse: any = { fixed: 0, failed: 0, results: [] } + + if (fixIds.length > 0) { + // Import and call the fix route handler internally + const fixUrl = new URL('/api/security-scan/fix', request.url) + const fixReq = new NextRequest(fixUrl, { + method: 'POST', + headers: request.headers, + body: JSON.stringify({ ids: fixIds }), + }) + + // Dynamically import to avoid circular deps + const { POST: fixHandler } = await import('../fix/route') + const fixRes = await fixHandler(fixReq) + fixResponse = await fixRes.json() + } + + const applied = (fixResponse.results || []).map((r: any) => ({ + ...r, + fixSafety: FIX_SAFETY[r.id], + })) + + const requiresRestart = applied.some((r: any) => r.fixed && FIX_SAFETY[r.id] === 'requires-restart') + + logger.info({ action, fixScope, force, dryRun, applied: applied.length, skipped: skipped.length }, 'Agent security scan+fix') + + // Re-scan after fixes to get updated score + const postFixScan = fixIds.length > 0 ? runSecurityScan() : scanResult + + return NextResponse.json({ + scan: { + ...scanResponse, + score: postFixScan.score, + overall: postFixScan.overall, + }, + fixes: { + applied, + skipped, + requiresRestart, + requiresManual, + }, + summary: buildSummary(applied, skipped, requiresManual, requiresRestart, postFixScan.score, postFixScan.overall), + }) + } catch (error) { + logger.error({ err: error }, 'Agent security scan error') + return NextResponse.json({ error: 'Internal server error' }, { status: 500 }) + } +} + +function buildSummary( + applied: any[], + skipped: any[], + requiresManual: any[], + requiresRestart: boolean, + score: number, + overall: string, +): string { + const parts: string[] = [] + const fixedCount = applied.filter((r: any) => r.fixed).length + if (fixedCount > 0) parts.push(`${fixedCount} issue(s) fixed`) + if (skipped.length > 0) parts.push(`${skipped.length} skipped`) + if (requiresManual.length > 0) parts.push(`${requiresManual.length} require manual action`) + if (requiresRestart) parts.push('server restart recommended') + parts.push(`score: ${score}/100 (${overall})`) + return parts.join('. ') + '.' +} diff --git a/src/app/api/security-scan/fix/route.ts b/src/app/api/security-scan/fix/route.ts new file mode 100644 index 0000000..67031a9 --- /dev/null +++ b/src/app/api/security-scan/fix/route.ts @@ -0,0 +1,377 @@ +import { NextRequest, NextResponse } from 'next/server' +import { existsSync, readFileSync, writeFileSync, chmodSync, statSync } from 'node:fs' +import { execFileSync } from 'node:child_process' +import path from 'node:path' +import crypto from 'node:crypto' +import { requireRole } from '@/lib/auth' +import { config } from '@/lib/config' +import { getDatabase } from '@/lib/db' +import { logger } from '@/lib/logger' +import { FIX_SAFETY, runSecurityScan, type FixSafety } from '@/lib/security-scan' + +export interface FixResult { + id: string + name: string + fixed: boolean + detail: string + fixSafety?: FixSafety +} + +function shouldMutateRuntimeEnv() { + return process.env.MISSION_CONTROL_TEST_MODE !== '1' +} + +function normalizeHostname(raw: string): string { + return raw.trim().replace(/^\[|\]$/g, '').split(':')[0].replace(/\.$/, '').toLowerCase() +} + +function parseForwardedHost(forwarded: string | null): string[] { + if (!forwarded) return [] + const hosts: string[] = [] + for (const part of forwarded.split(',')) { + const match = /(?:^|;)\s*host="?([^";]+)"?/i.exec(part) + if (match?.[1]) hosts.push(match[1]) + } + return hosts +} + +function getRequestHostCandidates(request: NextRequest): string[] { + const rawCandidates = [ + ...(request.headers.get('x-forwarded-host') || '').split(','), + ...(request.headers.get('x-original-host') || '').split(','), + ...(request.headers.get('x-forwarded-server') || '').split(','), + ...parseForwardedHost(request.headers.get('forwarded')), + request.headers.get('host') || '', + request.nextUrl.host || '', + request.nextUrl.hostname || '', + ] + + return [...new Set(rawCandidates.map(normalizeHostname).filter(Boolean))] +} + +function getFailingChecks() { + return Object.values(runSecurityScan().categories) + .flatMap((category) => category.checks) + .filter((check) => check.status !== 'pass') +} + +export async function POST(request: NextRequest) { + const auth = requireRole(request, 'admin') + if ('error' in auth) return NextResponse.json({ error: auth.error }, { status: auth.status }) + + // Optional: pass { ids: ["check_id"] } to fix only specific issues + let targetIds: Set | null = null + try { + const body = await request.json() + if (Array.isArray(body?.ids) && body.ids.length > 0) { + targetIds = new Set(body.ids as string[]) + } + } catch { /* no body = fix all */ } + + const shouldFix = (id: string) => !targetIds || targetIds.has(id) + + const results: FixResult[] = [] + const envPaths = [ + path.join(process.cwd(), '.env'), + path.join(process.cwd(), '.env.local'), + ] + + function readEnv(filePath: string): string { + try { return readFileSync(filePath, 'utf-8') } catch { return '' } + } + + function setEnvVar(key: string, value: string) { + let targetPath = envPaths[0] + for (const filePath of envPaths) { + const content = readEnv(filePath) + if (new RegExp(`^${key}=.*$`, 'm').test(content)) { + targetPath = filePath + break + } + } + + let content = readEnv(targetPath) + const regex = new RegExp(`^${key}=.*$`, 'm') + if (regex.test(content)) { + content = content.replace(regex, `${key}=${value}`) + } else { + content = content.trimEnd() + `\n${key}=${value}\n` + } + writeFileSync(targetPath, content, 'utf-8') + if (shouldMutateRuntimeEnv()) { + process.env[key] = value + } + } + + function unsetEnvVar(key: string) { + const regex = new RegExp(`^${key}=.*\n?`, 'm') + for (const filePath of envPaths) { + let content = readEnv(filePath) + if (regex.test(content)) { + content = content.replace(regex, '') + writeFileSync(filePath, content, 'utf-8') + } + } + if (shouldMutateRuntimeEnv()) { + delete process.env[key] + } + } + + // 1. Fix .env file permissions + const envPath = envPaths[0] + if (shouldFix('env_permissions') && existsSync(envPath)) { + try { + const stat = statSync(envPath) + const mode = (stat.mode & 0o777).toString(8) + if (mode !== '600') { + chmodSync(envPath, 0o600) + results.push({ id: 'env_permissions', name: '.env file permissions', fixed: true, detail: `Changed from ${mode} to 600`, fixSafety: FIX_SAFETY['env_permissions'] }) + } else { + results.push({ id: 'env_permissions', name: '.env file permissions', fixed: true, detail: 'Already 600', fixSafety: FIX_SAFETY['env_permissions'] }) + } + } catch (e: any) { + results.push({ id: 'env_permissions', name: '.env file permissions', fixed: false, detail: e.message, fixSafety: FIX_SAFETY['env_permissions'] }) + } + } + + // 2. Fix MC_ALLOWED_HOSTS if not set + const allowedHosts = (process.env.MC_ALLOWED_HOSTS || '').trim() + const allowAny = process.env.MC_ALLOW_ANY_HOST + if (shouldFix('allowed_hosts') && (!allowedHosts || allowAny === '1' || allowAny === 'true')) { + try { + if (allowAny) { + unsetEnvVar('MC_ALLOW_ANY_HOST') + } + const preservedHosts = new Set([ + 'localhost', + '127.0.0.1', + ...allowedHosts.split(',').map((host) => normalizeHostname(host)).filter(Boolean), + ...getRequestHostCandidates(request), + ]) + const mergedHosts = Array.from(preservedHosts) + setEnvVar('MC_ALLOWED_HOSTS', mergedHosts.join(',')) + results.push({ id: 'allowed_hosts', name: 'Host allowlist', fixed: true, detail: `Set MC_ALLOWED_HOSTS=${mergedHosts.join(',')}`, fixSafety: FIX_SAFETY['allowed_hosts'] }) + } catch (e: any) { + results.push({ id: 'allowed_hosts', name: 'Host allowlist', fixed: false, detail: e.message, fixSafety: FIX_SAFETY['allowed_hosts'] }) + } + } + + // 3. Fix MC_ENABLE_HSTS + if (shouldFix('hsts_enabled') && process.env.MC_ENABLE_HSTS !== '1') { + try { + setEnvVar('MC_ENABLE_HSTS', '1') + results.push({ id: 'hsts_enabled', name: 'HSTS enabled', fixed: true, detail: 'Set MC_ENABLE_HSTS=1', fixSafety: FIX_SAFETY['hsts_enabled'] }) + } catch (e: any) { + results.push({ id: 'hsts_enabled', name: 'HSTS', fixed: false, detail: e.message, fixSafety: FIX_SAFETY['hsts_enabled'] }) + } + } + + // 4. Fix MC_COOKIE_SECURE + const cookieSecure = process.env.MC_COOKIE_SECURE + if (shouldFix('cookie_secure') && cookieSecure !== '1' && cookieSecure !== 'true') { + try { + setEnvVar('MC_COOKIE_SECURE', '1') + results.push({ id: 'cookie_secure', name: 'Secure cookies', fixed: true, detail: 'Set MC_COOKIE_SECURE=1', fixSafety: FIX_SAFETY['cookie_secure'] }) + } catch (e: any) { + results.push({ id: 'cookie_secure', name: 'Secure cookies', fixed: false, detail: e.message, fixSafety: FIX_SAFETY['cookie_secure'] }) + } + } + + // 4b. Re-enable runtime rate limiting + const rateLimitDisabled = process.env.MC_DISABLE_RATE_LIMIT + if (shouldFix('rate_limiting') && rateLimitDisabled) { + try { + unsetEnvVar('MC_DISABLE_RATE_LIMIT') + results.push({ id: 'rate_limiting', name: 'Rate limiting active', fixed: true, detail: 'Removed MC_DISABLE_RATE_LIMIT', fixSafety: FIX_SAFETY['rate_limiting'] }) + } catch (e: any) { + results.push({ id: 'rate_limiting', name: 'Rate limiting active', fixed: false, detail: e.message, fixSafety: FIX_SAFETY['rate_limiting'] }) + } + } + + // 5. Fix API_KEY if it's a known default + const apiKey = process.env.API_KEY || '' + if (shouldFix('api_key_set') && (!apiKey || apiKey === 'generate-a-random-key')) { + try { + const newKey = crypto.randomBytes(32).toString('hex') + setEnvVar('API_KEY', newKey) + results.push({ id: 'api_key_set', name: 'API key', fixed: true, detail: 'Generated new random API key', fixSafety: FIX_SAFETY['api_key_set'] }) + } catch (e: any) { + results.push({ id: 'api_key_set', name: 'API key', fixed: false, detail: e.message, fixSafety: FIX_SAFETY['api_key_set'] }) + } + } + + // 6. Fix OpenClaw config + const ocFixIds = ['config_permissions', 'gateway_auth', 'gateway_bind', 'elevated_disabled', 'dm_isolation', 'exec_restricted', 'control_ui_device_auth', 'control_ui_insecure_auth', 'fs_workspace_only', 'log_redaction'] + const configPath = config.openclawConfigPath + if (ocFixIds.some(id => shouldFix(id)) && configPath && existsSync(configPath)) { + let ocConfig: any + try { + ocConfig = JSON.parse(readFileSync(configPath, 'utf-8')) + } catch { ocConfig = null } + + if (ocConfig) { + let configChanged = false + + // Fix config file permissions + if (shouldFix('config_permissions')) try { + const stat = statSync(configPath) + const mode = (stat.mode & 0o777).toString(8) + if (mode !== '600') { + chmodSync(configPath, 0o600) + results.push({ id: 'config_permissions', name: 'OpenClaw config permissions', fixed: true, detail: `Changed from ${mode} to 600`, fixSafety: FIX_SAFETY['config_permissions'] }) + } + } catch (e: any) { + results.push({ id: 'config_permissions', name: 'OpenClaw config permissions', fixed: false, detail: e.message, fixSafety: FIX_SAFETY['config_permissions'] }) + } + + // Fix gateway auth + if (shouldFix('gateway_auth')) { + if (!ocConfig.gateway) ocConfig.gateway = {} + if (!ocConfig.gateway.auth) ocConfig.gateway.auth = {} + if (ocConfig.gateway.auth.mode !== 'token') { + ocConfig.gateway.auth.mode = 'token' + if (!ocConfig.gateway.auth.token) { + ocConfig.gateway.auth.token = crypto.randomBytes(32).toString('hex') + } + configChanged = true + results.push({ id: 'gateway_auth', name: 'Gateway authentication', fixed: true, detail: 'Set auth.mode to "token" with generated token', fixSafety: FIX_SAFETY['gateway_auth'] }) + } + } + + // Fix gateway bind + if (shouldFix('gateway_bind')) { + if (!ocConfig.gateway) ocConfig.gateway = {} + if (ocConfig.gateway.bind !== 'loopback' && ocConfig.gateway.bind !== '127.0.0.1') { + ocConfig.gateway.bind = 'loopback' + configChanged = true + results.push({ id: 'gateway_bind', name: 'Gateway bind address', fixed: true, detail: 'Set bind to "loopback"', fixSafety: FIX_SAFETY['gateway_bind'] }) + } + } + + // Fix elevated mode + if (shouldFix('elevated_disabled')) { + if (!ocConfig.elevated) ocConfig.elevated = {} + if (ocConfig.elevated.enabled === true) { + ocConfig.elevated.enabled = false + configChanged = true + results.push({ id: 'elevated_disabled', name: 'Elevated mode', fixed: true, detail: 'Disabled elevated mode', fixSafety: FIX_SAFETY['elevated_disabled'] }) + } + } + + // Fix DM isolation + if (shouldFix('dm_isolation')) { + if (!ocConfig.session) ocConfig.session = {} + if (ocConfig.session.dmScope !== 'per-channel-peer') { + ocConfig.session.dmScope = 'per-channel-peer' + configChanged = true + results.push({ id: 'dm_isolation', name: 'DM session isolation', fixed: true, detail: 'Set dmScope to "per-channel-peer"', fixSafety: FIX_SAFETY['dm_isolation'] }) + } + } + + // Fix exec security + if (shouldFix('exec_restricted')) { + if (!ocConfig.tools) ocConfig.tools = {} + if (!ocConfig.tools.exec) ocConfig.tools.exec = {} + if (ocConfig.tools.exec.security !== 'sandbox' && ocConfig.tools.exec.security !== 'deny') { + ocConfig.tools.exec.security = 'sandbox' + configChanged = true + results.push({ id: 'exec_restricted', name: 'Exec tool restriction', fixed: true, detail: 'Set exec security to "sandbox"', fixSafety: FIX_SAFETY['exec_restricted'] }) + } + } + + // Fix Control UI device auth + if (shouldFix('control_ui_device_auth')) { + if (ocConfig.gateway?.controlUi?.dangerouslyDisableDeviceAuth === true) { + ocConfig.gateway.controlUi.dangerouslyDisableDeviceAuth = false + configChanged = true + results.push({ id: 'control_ui_device_auth', name: 'Control UI device auth', fixed: true, detail: 'Disabled dangerouslyDisableDeviceAuth', fixSafety: FIX_SAFETY['control_ui_device_auth'] }) + } + } + + // Fix Control UI insecure auth + if (shouldFix('control_ui_insecure_auth')) { + if (ocConfig.gateway?.controlUi?.allowInsecureAuth === true) { + ocConfig.gateway.controlUi.allowInsecureAuth = false + configChanged = true + results.push({ id: 'control_ui_insecure_auth', name: 'Control UI secure auth', fixed: true, detail: 'Disabled allowInsecureAuth', fixSafety: FIX_SAFETY['control_ui_insecure_auth'] }) + } + } + + // Fix filesystem workspace isolation + if (shouldFix('fs_workspace_only')) { + if (!ocConfig.tools) ocConfig.tools = {} + if (!ocConfig.tools.fs) ocConfig.tools.fs = {} + if (ocConfig.tools.fs.workspaceOnly !== true) { + ocConfig.tools.fs.workspaceOnly = true + configChanged = true + results.push({ id: 'fs_workspace_only', name: 'Filesystem workspace isolation', fixed: true, detail: 'Set tools.fs.workspaceOnly to true', fixSafety: FIX_SAFETY['fs_workspace_only'] }) + } + } + + // Fix log redaction + if (shouldFix('log_redaction')) { + if (!ocConfig.logging) ocConfig.logging = {} + if (!ocConfig.logging.redactSensitive) { + ocConfig.logging.redactSensitive = 'tools' + configChanged = true + results.push({ id: 'log_redaction', name: 'Log redaction', fixed: true, detail: 'Set logging.redactSensitive to "tools"', fixSafety: FIX_SAFETY['log_redaction'] }) + } + } + + if (configChanged) { + try { + writeFileSync(configPath, JSON.stringify(ocConfig, null, 2) + '\n', 'utf-8') + } catch (e: any) { + results.push({ id: 'config_write', name: 'Write OpenClaw config', fixed: false, detail: e.message }) + } + } + } + } + + // 7. Fix world-writable files (uses execFileSync with find — no user input) + if (shouldFix('world_writable')) try { + const cwd = process.cwd() + const wwOutput = execFileSync('find', [cwd, '-maxdepth', '2', '-perm', '-o+w', '-not', '-type', 'l'], { + encoding: 'utf-8', timeout: 5000, stdio: ['pipe', 'pipe', 'pipe'], + }).trim() + if (wwOutput) { + const files = wwOutput.split('\n').filter(Boolean).slice(0, 20) + let fixedCount = 0 + for (const f of files) { + try { chmodSync(f, 0o755); fixedCount++ } catch { /* skip */ } + } + if (fixedCount > 0) { + results.push({ id: 'world_writable', name: 'World-writable files', fixed: true, detail: `Fixed permissions on ${fixedCount} file(s)`, fixSafety: FIX_SAFETY['world_writable'] }) + } + } + } catch { /* no world-writable files or find not available */ } + + // Audit log + try { + const db = getDatabase() + db.prepare( + 'INSERT INTO audit_log (action, actor, detail) VALUES (?, ?, ?)' + ).run('security.auto_fix', auth.user.username, JSON.stringify({ fixes: results.filter(r => r.fixed).map(r => r.id) })) + } catch { /* non-critical */ } + + const fixed = results.filter(r => r.fixed).length + const failed = results.filter(r => !r.fixed).length + const remainingChecks = getFailingChecks() + const remainingAutoFixable = remainingChecks.filter((check) => check.id in FIX_SAFETY).length + const remainingManual = remainingChecks.length - remainingAutoFixable + + logger.info({ fixed, failed, actor: auth.user.username }, 'Security auto-fix completed') + + return NextResponse.json({ + attempted: results.length, + fixed, + failed, + remaining: remainingChecks.length, + remainingAutoFixable, + remainingManual, + results, + note: remainingChecks.length > 0 + ? 'Some issues require manual action or additional review. Environment-backed fixes may still require a server restart to fully apply.' + : 'All currently detected auto-fixable issues have been resolved. Restart the server if you changed environment-backed settings.', + }) +} diff --git a/src/app/api/security-scan/route.ts b/src/app/api/security-scan/route.ts new file mode 100644 index 0000000..2bc36e3 --- /dev/null +++ b/src/app/api/security-scan/route.ts @@ -0,0 +1,16 @@ +import { NextRequest, NextResponse } from 'next/server' +import { requireRole } from '@/lib/auth' +import { logger } from '@/lib/logger' +import { runSecurityScan } from '@/lib/security-scan' + +export async function GET(request: NextRequest) { + const auth = requireRole(request, 'admin') + if ('error' in auth) return NextResponse.json({ error: auth.error }, { status: auth.status }) + + try { + return NextResponse.json(runSecurityScan()) + } catch (error) { + logger.error({ err: error }, 'Security scan error') + return NextResponse.json({ error: 'Internal server error' }, { status: 500 }) + } +} diff --git a/src/app/api/sessions/continue/route.ts b/src/app/api/sessions/continue/route.ts new file mode 100644 index 0000000..29dea87 --- /dev/null +++ b/src/app/api/sessions/continue/route.ts @@ -0,0 +1,79 @@ +import { promises as fs } from 'node:fs' +import path from 'node:path' +import { NextRequest, NextResponse } from 'next/server' +import { requireRole } from '@/lib/auth' +import { logger } from '@/lib/logger' +import { runCommand } from '@/lib/command' + +type ContinueKind = 'claude-code' | 'codex-cli' + +function sanitizePrompt(value: unknown): string { + return typeof value === 'string' ? value.trim() : '' +} + +/** + * POST /api/sessions/continue + * Body: { kind: 'claude-code'|'codex-cli', id: string, prompt: string } + */ +export async function POST(request: NextRequest) { + const auth = requireRole(request, 'operator') + if ('error' in auth) return NextResponse.json({ error: auth.error }, { status: auth.status }) + + try { + const body = await request.json().catch(() => ({})) + const kind = body?.kind as ContinueKind + const sessionId = typeof body?.id === 'string' ? body.id.trim() : '' + const prompt = sanitizePrompt(body?.prompt) + + if (!sessionId || !/^[a-zA-Z0-9._:-]+$/.test(sessionId)) { + return NextResponse.json({ error: 'Invalid session id' }, { status: 400 }) + } + if (kind !== 'claude-code' && kind !== 'codex-cli') { + return NextResponse.json({ error: 'Invalid kind' }, { status: 400 }) + } + if (!prompt || prompt.length > 6000) { + return NextResponse.json({ error: 'prompt is required (max 6000 chars)' }, { status: 400 }) + } + + let reply = '' + + if (kind === 'claude-code') { + const result = await runCommand('claude', ['--print', '--resume', sessionId, prompt], { + timeoutMs: 180000, + }) + reply = (result.stdout || '').trim() || (result.stderr || '').trim() + } else { + const outputPath = path.join('/tmp', `mc-codex-last-${Date.now()}-${Math.random().toString(36).slice(2)}.txt`) + try { + await runCommand('codex', ['exec', 'resume', sessionId, prompt, '--skip-git-repo-check', '-o', outputPath], { + timeoutMs: 180000, + }) + } finally { + // Read after run attempt either way for best-effort output + } + + try { + reply = (await fs.readFile(outputPath, 'utf-8')).trim() + } catch { + reply = '' + } + + try { + await fs.unlink(outputPath) + } catch { + // ignore + } + } + + if (!reply) { + reply = 'Session continued, but no text response was returned.' + } + + return NextResponse.json({ ok: true, reply }) + } catch (error: any) { + logger.error({ err: error }, 'POST /api/sessions/continue error') + return NextResponse.json({ error: error?.message || 'Failed to continue session' }, { status: 500 }) + } +} + +export const dynamic = 'force-dynamic' diff --git a/src/app/api/sessions/route.ts b/src/app/api/sessions/route.ts index 5e02004..d777620 100644 --- a/src/app/api/sessions/route.ts +++ b/src/app/api/sessions/route.ts @@ -2,59 +2,43 @@ import { NextRequest, NextResponse } from 'next/server' import { getAllGatewaySessions } from '@/lib/sessions' import { syncClaudeSessions } from '@/lib/claude-sessions' import { scanCodexSessions } from '@/lib/codex-sessions' -import { getDatabase } from '@/lib/db' +import { scanHermesSessions } from '@/lib/hermes-sessions' +import { getDatabase, db_helpers } from '@/lib/db' import { requireRole } from '@/lib/auth' +import { runClawdbot } from '@/lib/command' +import { mutationLimiter } from '@/lib/rate-limit' import { logger } from '@/lib/logger' +const LOCAL_SESSION_ACTIVE_WINDOW_MS = 90 * 60 * 1000 + export async function GET(request: NextRequest) { const auth = requireRole(request, 'viewer') if ('error' in auth) return NextResponse.json({ error: auth.error }, { status: auth.status }) try { + const { searchParams } = new URL(request.url) + const includeLocal = searchParams.get('include_local') === '1' const gatewaySessions = getAllGatewaySessions() + const mappedGatewaySessions = mapGatewaySessions(gatewaySessions) - // If gateway sessions exist, deduplicate and return those - if (gatewaySessions.length > 0) { - // Deduplicate by sessionId — OpenClaw tracks cron runs under the same - // session ID as the parent session, causing duplicate React keys (#80). - // Keep the most recently updated entry when duplicates exist. - const sessionMap = new Map() - for (const s of gatewaySessions) { - const id = s.sessionId || `${s.agent}:${s.key}` - const existing = sessionMap.get(id) - if (!existing || s.updatedAt > existing.updatedAt) { - sessionMap.set(id, s) - } - } - - const sessions = Array.from(sessionMap.values()).map((s) => { - const total = s.totalTokens || 0 - const context = s.contextTokens || 35000 - const pct = context > 0 ? Math.round((total / context) * 100) : 0 - return { - id: s.sessionId || `${s.agent}:${s.key}`, - key: s.key, - agent: s.agent, - kind: s.chatType || 'unknown', - age: formatAge(s.updatedAt), - model: s.model, - tokens: `${formatTokens(total)}/${formatTokens(context)} (${pct}%)`, - channel: s.channel, - flags: [], - active: s.active, - startTime: s.updatedAt, - lastActivity: s.updatedAt, - source: 'gateway' as const, - } - }) - return NextResponse.json({ sessions }) + // Preserve existing behavior by default: when gateway sessions are present, + // return only gateway-backed sessions unless include_local=1 is requested. + if (mappedGatewaySessions.length > 0 && !includeLocal) { + return NextResponse.json({ sessions: mappedGatewaySessions }) } - // Fallback: sync and read local Claude + Codex sessions from disk/SQLite + // Local Claude + Codex sessions from disk/SQLite await syncClaudeSessions() const claudeSessions = getLocalClaudeSessions() const codexSessions = getLocalCodexSessions() - const merged = mergeLocalSessions(claudeSessions, codexSessions) + const hermesSessions = getLocalHermesSessions() + const localMerged = mergeLocalSessions(claudeSessions, codexSessions, hermesSessions) + + if (mappedGatewaySessions.length === 0) { + return NextResponse.json({ sessions: localMerged }) + } + + const merged = dedupeAndSortSessions([...mappedGatewaySessions, ...localMerged]) return NextResponse.json({ sessions: merged }) } catch (error) { logger.error({ err: error }, 'Sessions API error') @@ -62,6 +46,161 @@ export async function GET(request: NextRequest) { } } +const VALID_THINKING_LEVELS = ['off', 'minimal', 'low', 'medium', 'high', 'xhigh'] as const +const VALID_VERBOSE_LEVELS = ['off', 'on', 'full'] as const +const VALID_REASONING_LEVELS = ['off', 'on', 'stream'] as const +const SESSION_KEY_RE = /^[a-zA-Z0-9:_.-]+$/ + +export async function POST(request: NextRequest) { + const auth = requireRole(request, 'operator') + if ('error' in auth) return NextResponse.json({ error: auth.error }, { status: auth.status }) + + const rateCheck = mutationLimiter(request) + if (rateCheck) return rateCheck + + try { + const { searchParams } = new URL(request.url) + const action = searchParams.get('action') + const body = await request.json() + const { sessionKey } = body + + if (!sessionKey || !SESSION_KEY_RE.test(sessionKey)) { + return NextResponse.json({ error: 'Invalid session key' }, { status: 400 }) + } + + let rpcFn: string + let logDetail: string + + switch (action) { + case 'set-thinking': { + const { level } = body + if (!VALID_THINKING_LEVELS.includes(level)) { + return NextResponse.json({ error: `Invalid thinking level. Must be: ${VALID_THINKING_LEVELS.join(', ')}` }, { status: 400 }) + } + rpcFn = `session_setThinking("${sessionKey}", "${level}")` + logDetail = `Set thinking=${level} on ${sessionKey}` + break + } + case 'set-verbose': { + const { level } = body + if (!VALID_VERBOSE_LEVELS.includes(level)) { + return NextResponse.json({ error: `Invalid verbose level. Must be: ${VALID_VERBOSE_LEVELS.join(', ')}` }, { status: 400 }) + } + rpcFn = `session_setVerbose("${sessionKey}", "${level}")` + logDetail = `Set verbose=${level} on ${sessionKey}` + break + } + case 'set-reasoning': { + const { level } = body + if (!VALID_REASONING_LEVELS.includes(level)) { + return NextResponse.json({ error: `Invalid reasoning level. Must be: ${VALID_REASONING_LEVELS.join(', ')}` }, { status: 400 }) + } + rpcFn = `session_setReasoning("${sessionKey}", "${level}")` + logDetail = `Set reasoning=${level} on ${sessionKey}` + break + } + case 'set-label': { + const { label } = body + if (typeof label !== 'string' || label.length > 100) { + return NextResponse.json({ error: 'Label must be a string up to 100 characters' }, { status: 400 }) + } + rpcFn = `session_setLabel("${sessionKey}", ${JSON.stringify(label)})` + logDetail = `Set label="${label}" on ${sessionKey}` + break + } + default: + return NextResponse.json({ error: 'Invalid action. Must be: set-thinking, set-verbose, set-reasoning, set-label' }, { status: 400 }) + } + + const result = await runClawdbot(['-c', rpcFn], { timeoutMs: 10000 }) + + db_helpers.logActivity( + 'session_control', + 'session', + 0, + auth.user.username, + logDetail, + { session_key: sessionKey, action } + ) + + return NextResponse.json({ success: true, action, sessionKey, stdout: result.stdout.trim() }) + } catch (error: any) { + logger.error({ err: error }, 'Session POST error') + return NextResponse.json({ error: error.message || 'Session action failed' }, { status: 500 }) + } +} + +export async function DELETE(request: NextRequest) { + const auth = requireRole(request, 'operator') + if ('error' in auth) return NextResponse.json({ error: auth.error }, { status: auth.status }) + + const rateCheck = mutationLimiter(request) + if (rateCheck) return rateCheck + + try { + const body = await request.json() + const { sessionKey } = body + + if (!sessionKey || !SESSION_KEY_RE.test(sessionKey)) { + return NextResponse.json({ error: 'Invalid session key' }, { status: 400 }) + } + + const result = await runClawdbot( + ['-c', `session_delete("${sessionKey}")`], + { timeoutMs: 10000 } + ) + + db_helpers.logActivity( + 'session_control', + 'session', + 0, + auth.user.username, + `Deleted session ${sessionKey}`, + { session_key: sessionKey, action: 'delete' } + ) + + return NextResponse.json({ success: true, sessionKey, stdout: result.stdout.trim() }) + } catch (error: any) { + logger.error({ err: error }, 'Session DELETE error') + return NextResponse.json({ error: error.message || 'Session deletion failed' }, { status: 500 }) + } +} + +function mapGatewaySessions(gatewaySessions: ReturnType) { + // Deduplicate by sessionId — OpenClaw tracks cron runs under the same + // session ID as the parent session, causing duplicate React keys (#80). + // Keep the most recently updated entry when duplicates exist. + const sessionMap = new Map() + for (const s of gatewaySessions) { + const id = s.sessionId || `${s.agent}:${s.key}` + const existing = sessionMap.get(id) + if (!existing || s.updatedAt > existing.updatedAt) { + sessionMap.set(id, s) + } + } + + return Array.from(sessionMap.values()).map((s) => { + const total = s.totalTokens || 0 + const context = s.contextTokens || 35000 + const pct = context > 0 ? Math.round((total / context) * 100) : 0 + return { + id: s.sessionId || `${s.agent}:${s.key}`, + key: s.key, + agent: s.agent, + kind: s.chatType || 'unknown', + age: formatAge(s.updatedAt), + model: s.model, + tokens: `${formatTokens(total)}/${formatTokens(context)} (${pct}%)`, + channel: s.channel, + flags: [], + active: s.active, + startTime: s.updatedAt, + lastActivity: s.updatedAt, + source: 'gateway' as const, + } + }) +} + /** Read Claude Code sessions from the local SQLite database */ function getLocalClaudeSessions() { try { @@ -73,19 +212,24 @@ function getLocalClaudeSessions() { return rows.map((s) => { const total = (s.input_tokens || 0) + (s.output_tokens || 0) const lastMsg = s.last_message_at ? new Date(s.last_message_at).getTime() : 0 + // Trust scanner state first, but fall back to derived recency so UI doesn't + // show stale "xh ago" when the active flag lags behind disk updates. + const derivedActive = lastMsg > 0 && (Date.now() - lastMsg) < LOCAL_SESSION_ACTIVE_WINDOW_MS + const isActive = s.is_active === 1 || derivedActive + const effectiveLastActivity = isActive ? Date.now() : lastMsg return { id: s.session_id, key: s.project_slug || s.session_id, agent: s.project_slug || 'local', kind: 'claude-code', - age: formatAge(lastMsg), + age: isActive ? 'now' : formatAge(lastMsg), model: s.model || 'unknown', tokens: `${formatTokens(s.input_tokens || 0)}/${formatTokens(s.output_tokens || 0)}`, channel: 'local', flags: s.git_branch ? [s.git_branch] : [], - active: s.is_active === 1, + active: isActive, startTime: s.first_message_at ? new Date(s.first_message_at).getTime() : 0, - lastActivity: lastMsg, + lastActivity: effectiveLastActivity, source: 'local' as const, userMessages: s.user_messages || 0, assistantMessages: s.assistant_messages || 0, @@ -109,19 +253,20 @@ function getLocalCodexSessions() { const total = s.totalTokens || (s.inputTokens + s.outputTokens) const lastMsg = s.lastMessageAt ? new Date(s.lastMessageAt).getTime() : 0 const firstMsg = s.firstMessageAt ? new Date(s.firstMessageAt).getTime() : 0 + const effectiveLastActivity = s.isActive ? Date.now() : lastMsg return { id: s.sessionId, key: s.projectSlug || s.sessionId, agent: s.projectSlug || 'codex-local', kind: 'codex-cli', - age: formatAge(lastMsg), + age: s.isActive ? 'now' : formatAge(lastMsg), model: s.model || 'codex', tokens: `${formatTokens(s.inputTokens || 0)}/${formatTokens(s.outputTokens || 0)}`, channel: 'local', flags: [], active: s.isActive, startTime: firstMsg, - lastActivity: lastMsg, + lastActivity: effectiveLastActivity, source: 'local' as const, userMessages: s.userMessages || 0, assistantMessages: s.assistantMessages || 0, @@ -138,20 +283,65 @@ function getLocalCodexSessions() { } } +function getLocalHermesSessions() { + try { + const rows = scanHermesSessions(100) + + return rows.map((s) => { + const total = s.inputTokens + s.outputTokens + const lastMsg = s.lastMessageAt ? new Date(s.lastMessageAt).getTime() : 0 + const firstMsg = s.firstMessageAt ? new Date(s.firstMessageAt).getTime() : 0 + const effectiveLastActivity = s.isActive ? Date.now() : lastMsg + return { + id: s.sessionId, + key: s.title || s.sessionId, + agent: 'hermes', + kind: 'hermes', + age: s.isActive ? 'now' : formatAge(lastMsg), + model: s.model || 'hermes', + tokens: `${formatTokens(s.inputTokens)}/${formatTokens(s.outputTokens)}`, + channel: s.source || 'cli', + flags: s.source && s.source !== 'cli' ? [s.source] : [], + active: s.isActive, + startTime: firstMsg, + lastActivity: effectiveLastActivity, + source: 'local' as const, + userMessages: s.messageCount, + assistantMessages: 0, + toolUses: s.toolCallCount, + estimatedCost: 0, + lastUserPrompt: s.title || null, + totalTokens: total, + workingDir: null, + } + }) + } catch (err) { + logger.warn({ err }, 'Failed to read local Hermes sessions') + return [] + } +} + function mergeLocalSessions( claudeSessions: Array>, codexSessions: Array>, + hermesSessions: Array> = [], ) { - const merged = [...claudeSessions, ...codexSessions] + const merged = [...claudeSessions, ...codexSessions, ...hermesSessions] + return dedupeAndSortSessions(merged) +} + +function dedupeAndSortSessions(merged: Array>) { const deduped = new Map>() for (const session of merged) { const id = String(session?.id || '') + const source = String(session?.source || '') + const key = `${source}:${id}` if (!id) continue - const existing = deduped.get(id) + const existing = deduped.get(key) const currentActivity = Number(session?.lastActivity || 0) const existingActivity = Number(existing?.lastActivity || 0) - if (!existing || currentActivity > existingActivity) deduped.set(id, session) + if (!existing || currentActivity > existingActivity) deduped.set(key, session) } return Array.from(deduped.values()) @@ -168,6 +358,7 @@ function formatTokens(n: number): string { function formatAge(timestamp: number): string { if (!timestamp) return '-' const diff = Date.now() - timestamp + if (diff <= 0) return 'now' const mins = Math.floor(diff / 60000) const hours = Math.floor(mins / 60) const days = Math.floor(hours / 24) diff --git a/src/app/api/sessions/transcript/aggregate/route.ts b/src/app/api/sessions/transcript/aggregate/route.ts new file mode 100644 index 0000000..675d354 --- /dev/null +++ b/src/app/api/sessions/transcript/aggregate/route.ts @@ -0,0 +1,92 @@ +import { NextRequest, NextResponse } from 'next/server' +import { requireRole } from '@/lib/auth' +import { config } from '@/lib/config' +import { getAllGatewaySessions } from '@/lib/sessions' +import { parseJsonlTranscript, readSessionJsonl, type TranscriptMessage, type MessageContentPart } from '@/lib/transcript-parser' + +export interface AggregateEvent { + id: string + ts: number + sessionKey: string + agentName: string + role: string + type: string + content: string + metadata?: Record +} + +/** + * GET /api/sessions/transcript/aggregate?limit=100&since= + * + * Fan out to all active session JSONL files on disk, parse, merge into + * a single chronological event stream for the agent-feed panel. + */ +export async function GET(request: NextRequest) { + const auth = requireRole(request, 'viewer') + if ('error' in auth) return NextResponse.json({ error: auth.error }, { status: auth.status }) + + const { searchParams } = new URL(request.url) + const limit = Math.min(Math.max(parseInt(searchParams.get('limit') || '100', 10), 1), 500) + const since = parseInt(searchParams.get('since') || '0', 10) || 0 + + const stateDir = config.openclawStateDir + if (!stateDir) { + return NextResponse.json({ events: [], sessionCount: 0 }) + } + + const sessions = getAllGatewaySessions() + const allEvents: AggregateEvent[] = [] + + for (const session of sessions) { + if (!session.sessionId) continue + + const raw = readSessionJsonl(stateDir, session.agent, session.sessionId) + if (!raw) continue + + const messages = parseJsonlTranscript(raw, 500) + let lineIndex = 0 + + for (const msg of messages) { + const ts = msg.timestamp ? new Date(msg.timestamp).getTime() : session.updatedAt + if (since && ts <= since) { lineIndex++; continue } + + for (const part of msg.parts) { + allEvents.push(partToEvent(part, msg.role, ts, session.key, session.agent, lineIndex)) + lineIndex++ + } + } + } + + // Sort chronologically (newest last), take the last `limit` entries + allEvents.sort((a, b) => a.ts - b.ts) + const trimmed = allEvents.slice(-limit) + + return NextResponse.json({ + events: trimmed, + sessionCount: sessions.length, + }) +} + +function partToEvent( + part: MessageContentPart, + role: string, + ts: number, + sessionKey: string, + agentName: string, + lineIndex: number, +): AggregateEvent { + const id = `tx-${sessionKey}-${lineIndex}` + + switch (part.type) { + case 'text': + return { id, ts, sessionKey, agentName, role, type: 'text', content: part.text.slice(0, 500) } + case 'thinking': + return { id, ts, sessionKey, agentName, role, type: 'thinking', content: part.thinking.slice(0, 300) } + case 'tool_use': + return { id, ts, sessionKey, agentName, role, type: 'tool_use', content: part.name, metadata: { toolId: part.id, input: part.input } } + case 'tool_result': + return { id, ts, sessionKey, agentName, role, type: 'tool_result', content: part.content.slice(0, 500), metadata: { toolUseId: part.toolUseId, isError: part.isError } } + } +} + +export const dynamic = 'force-dynamic' diff --git a/src/app/api/sessions/transcript/gateway/route.ts b/src/app/api/sessions/transcript/gateway/route.ts new file mode 100644 index 0000000..d22bf5a --- /dev/null +++ b/src/app/api/sessions/transcript/gateway/route.ts @@ -0,0 +1,101 @@ +import { NextRequest, NextResponse } from 'next/server' +import { existsSync, readFileSync } from 'node:fs' +import path from 'node:path' +import { requireRole } from '@/lib/auth' +import { config } from '@/lib/config' +import { logger } from '@/lib/logger' +import { parseGatewayHistoryTranscript, parseJsonlTranscript } from '@/lib/transcript-parser' +import { callOpenClawGateway } from '@/lib/openclaw-gateway' + +/** + * GET /api/sessions/transcript/gateway?key=&limit=50 + * + * Reads the JSONL transcript file for a gateway session directly from disk. + * OpenClaw stores session transcripts at: + * {OPENCLAW_STATE_DIR}/agents/{agent}/sessions/{sessionId}.jsonl + * + * The session key (e.g. "agent:jarv:cron:task-name") is used to look up + * the sessionId from the agent's sessions.json, then the JSONL file is read. + */ +export async function GET(request: NextRequest) { + const auth = requireRole(request, 'viewer') + if ('error' in auth) return NextResponse.json({ error: auth.error }, { status: auth.status }) + + const { searchParams } = new URL(request.url) + const sessionKey = searchParams.get('key') || '' + const limit = Math.min(parseInt(searchParams.get('limit') || '50', 10), 200) + + if (!sessionKey) { + return NextResponse.json({ error: 'key is required' }, { status: 400 }) + } + + const stateDir = config.openclawStateDir + if (!stateDir) { + return NextResponse.json({ messages: [], source: 'gateway', error: 'OPENCLAW_STATE_DIR not configured' }) + } + + try { + try { + const history = await callOpenClawGateway<{ messages?: unknown[] }>( + 'chat.history', + { sessionKey, limit }, + 15000, + ) + const liveMessages = parseGatewayHistoryTranscript(Array.isArray(history?.messages) ? history.messages : [], limit) + if (liveMessages.length > 0) { + return NextResponse.json({ messages: liveMessages, source: 'gateway-rpc' }) + } + } catch (rpcErr) { + logger.warn({ err: rpcErr, sessionKey }, 'Gateway chat.history failed, falling back to disk transcript') + } + + // Extract agent name from session key (e.g. "agent:jarv:main" -> "jarv") + const agentName = extractAgentName(sessionKey) + if (!agentName) { + return NextResponse.json({ messages: [], source: 'gateway', error: 'Could not determine agent from session key' }) + } + + // Look up the sessionId from the agent's sessions.json + const sessionsFile = path.join(stateDir, 'agents', agentName, 'sessions', 'sessions.json') + if (!existsSync(sessionsFile)) { + return NextResponse.json({ messages: [], source: 'gateway', error: 'Agent sessions file not found' }) + } + + let sessionsData: Record + try { + sessionsData = JSON.parse(readFileSync(sessionsFile, 'utf-8')) + } catch { + return NextResponse.json({ messages: [], source: 'gateway', error: 'Could not parse sessions.json' }) + } + + const sessionEntry = sessionsData[sessionKey] + if (!sessionEntry?.sessionId) { + return NextResponse.json({ messages: [], source: 'gateway', error: 'Session not found in sessions.json' }) + } + + const sessionId = sessionEntry.sessionId + const jsonlPath = path.join(stateDir, 'agents', agentName, 'sessions', `${sessionId}.jsonl`) + if (!existsSync(jsonlPath)) { + return NextResponse.json({ messages: [], source: 'gateway', error: 'Session JSONL file not found' }) + } + + // Read and parse the JSONL file + const raw = readFileSync(jsonlPath, 'utf-8') + const messages = parseJsonlTranscript(raw, limit) + + return NextResponse.json({ messages, source: 'gateway' }) + } catch (err: any) { + logger.warn({ err, sessionKey }, 'Gateway session transcript read failed') + return NextResponse.json({ messages: [], source: 'gateway', error: 'Failed to read session transcript' }) + } +} + +function extractAgentName(sessionKey: string): string | null { + const parts = sessionKey.split(':') + if (parts.length >= 2 && parts[0] === 'agent') { + return parts[1] + } + return null +} + +export const dynamic = 'force-dynamic' diff --git a/src/app/api/sessions/transcript/route.ts b/src/app/api/sessions/transcript/route.ts new file mode 100644 index 0000000..f42a67c --- /dev/null +++ b/src/app/api/sessions/transcript/route.ts @@ -0,0 +1,375 @@ +import fs from 'node:fs' +import path from 'node:path' +import { NextRequest, NextResponse } from 'next/server' +import Database from 'better-sqlite3' +import { config } from '@/lib/config' +import { requireRole } from '@/lib/auth' +import { logger } from '@/lib/logger' + +type MessageContentPart = + | { type: 'text'; text: string } + | { type: 'thinking'; thinking: string } + | { type: 'tool_use'; id: string; name: string; input: string } + | { type: 'tool_result'; toolUseId: string; content: string; isError?: boolean } + +type TranscriptMessage = { + role: 'user' | 'assistant' | 'system' + parts: MessageContentPart[] + timestamp?: string +} + +function messageTimestampMs(message: TranscriptMessage): number { + if (!message.timestamp) return 0 + const ts = new Date(message.timestamp).getTime() + return Number.isFinite(ts) ? ts : 0 +} + +function listRecentFiles(root: string, ext: string, limit: number): string[] { + if (!root || !fs.existsSync(root)) return [] + + const files: Array<{ path: string; mtimeMs: number }> = [] + const stack = [root] + + while (stack.length > 0) { + const dir = stack.pop() + if (!dir) continue + + let entries: string[] = [] + try { + entries = fs.readdirSync(dir) + } catch { + continue + } + + for (const entry of entries) { + const full = path.join(dir, entry) + let stat: fs.Stats + try { + stat = fs.statSync(full) + } catch { + continue + } + + if (stat.isDirectory()) { + stack.push(full) + continue + } + + if (!stat.isFile() || !full.endsWith(ext)) continue + files.push({ path: full, mtimeMs: stat.mtimeMs }) + } + } + + files.sort((a, b) => b.mtimeMs - a.mtimeMs) + return files.slice(0, Math.max(1, limit)).map((f) => f.path) +} + +function pushMessage( + list: TranscriptMessage[], + role: TranscriptMessage['role'], + parts: MessageContentPart[], + timestamp?: string, +) { + if (parts.length === 0) return + list.push({ role, parts, timestamp }) +} + +function textPart(content: string | null, limit = 8000): MessageContentPart | null { + const text = String(content || '').trim() + if (!text) return null + return { type: 'text', text: text.slice(0, limit) } +} + +function readClaudeTranscript(sessionId: string, limit: number): TranscriptMessage[] { + const root = path.join(config.claudeHome, 'projects') + const files = listRecentFiles(root, '.jsonl', 300) + const out: TranscriptMessage[] = [] + + for (const file of files) { + let raw = '' + try { + raw = fs.readFileSync(file, 'utf-8') + } catch { + continue + } + + const lines = raw.split('\n').filter(Boolean) + for (const line of lines) { + let parsed: any + try { + parsed = JSON.parse(line) + } catch { + continue + } + + if (parsed?.sessionId !== sessionId || parsed?.isSidechain) continue + + const ts = typeof parsed?.timestamp === 'string' ? parsed.timestamp : undefined + if (parsed?.type === 'user') { + const rawContent = parsed?.message?.content + // Check if this is a tool_result array (not real user input) + if (Array.isArray(rawContent) && rawContent.some((b: any) => b?.type === 'tool_result')) { + const parts: MessageContentPart[] = [] + for (const block of rawContent) { + if (block?.type === 'tool_result') { + const resultContent = typeof block.content === 'string' + ? block.content + : Array.isArray(block.content) + ? block.content.map((c: any) => c?.text || '').join('\n') + : '' + if (resultContent.trim()) { + parts.push({ + type: 'tool_result', + toolUseId: block.tool_use_id || '', + content: resultContent.trim().slice(0, 8000), + isError: block.is_error === true, + }) + } + } + } + pushMessage(out, 'system', parts, ts) + } else { + const content = typeof rawContent === 'string' + ? rawContent + : Array.isArray(rawContent) + ? rawContent.map((b: any) => b?.text || '').join('\n').trim() + : '' + const part = textPart(content) + if (part) pushMessage(out, 'user', [part], ts) + } + } else if (parsed?.type === 'assistant') { + const parts: MessageContentPart[] = [] + if (Array.isArray(parsed?.message?.content)) { + for (const block of parsed.message.content) { + if (block?.type === 'thinking' && typeof block?.thinking === 'string') { + const thinking = block.thinking.trim() + if (thinking) { + parts.push({ type: 'thinking', thinking: thinking.slice(0, 4000) }) + } + } else if (block?.type === 'text' && typeof block?.text === 'string') { + const part = textPart(block.text) + if (part) parts.push(part) + } else if (block?.type === 'tool_use') { + parts.push({ + type: 'tool_use', + id: block.id || '', + name: block.name || 'unknown', + input: JSON.stringify(block.input || {}).slice(0, 500), + }) + } + } + } + pushMessage(out, 'assistant', parts, ts) + } + } + } + + const sorted = out + .slice() + .sort((a, b) => messageTimestampMs(a) - messageTimestampMs(b)) + return sorted.slice(-limit) +} + +function readCodexTranscript(sessionId: string, limit: number): TranscriptMessage[] { + const root = path.join(config.homeDir, '.codex', 'sessions') + const files = listRecentFiles(root, '.jsonl', 300) + const out: TranscriptMessage[] = [] + + for (const file of files) { + let raw = '' + try { + raw = fs.readFileSync(file, 'utf-8') + } catch { + continue + } + + let matchedSession = file.includes(sessionId) + const lines = raw.split('\n').filter(Boolean) + for (const line of lines) { + let parsed: any + try { + parsed = JSON.parse(line) + } catch { + continue + } + + if (!matchedSession && parsed?.type === 'session_meta' && parsed?.payload?.id === sessionId) { + matchedSession = true + } + if (!matchedSession) continue + + const ts = typeof parsed?.timestamp === 'string' ? parsed.timestamp : undefined + if (parsed?.type === 'response_item') { + const payload = parsed?.payload + if (payload?.type === 'message') { + const role = payload?.role === 'assistant' ? 'assistant' as const : 'user' as const + const parts: MessageContentPart[] = [] + if (typeof payload?.content === 'string') { + const part = textPart(payload.content) + if (part) parts.push(part) + } else if (Array.isArray(payload?.content)) { + for (const block of payload.content) { + const blockType = String(block?.type || '') + // Codex CLI emits message content as input_text/output_text. + if ( + (blockType === 'text' || blockType === 'input_text' || blockType === 'output_text') + && typeof block?.text === 'string' + ) { + const part = textPart(block.text) + if (part) parts.push(part) + } + } + } + pushMessage(out, role, parts, ts) + } + } + } + } + + const sorted = out + .slice() + .sort((a, b) => messageTimestampMs(a) - messageTimestampMs(b)) + return sorted.slice(-limit) +} + +type HermesMessageRow = { + role: string + content: string | null + tool_call_id: string | null + tool_calls: string | null + tool_name: string | null + timestamp: number +} + +function epochSecondsToISO(epoch: number | null | undefined): string | undefined { + if (!epoch || !Number.isFinite(epoch) || epoch <= 0) return undefined + return new Date(epoch * 1000).toISOString() +} + +function readHermesTranscriptFromDbPath(dbPath: string, sessionId: string, limit: number): TranscriptMessage[] { + if (!dbPath || !fs.existsSync(dbPath)) return [] + + let db: Database.Database | null = null + try { + db = new Database(dbPath, { readonly: true, fileMustExist: true }) + + const rows = db.prepare(` + SELECT role, content, tool_call_id, tool_calls, tool_name, timestamp + FROM messages + WHERE session_id = ? + ORDER BY timestamp ASC + LIMIT ? + `).all(sessionId, Math.max(1, limit * 4)) as HermesMessageRow[] + + const messages: TranscriptMessage[] = [] + + for (const row of rows) { + const timestamp = epochSecondsToISO(row.timestamp) + const parts: MessageContentPart[] = [] + + if (row.role === 'assistant' && row.tool_calls) { + try { + const toolCalls = JSON.parse(row.tool_calls) as Array> + for (const call of toolCalls) { + const fn = call.function + const fnRecord = fn && typeof fn === 'object' ? fn as Record : null + const name = typeof fnRecord?.name === 'string' + ? fnRecord.name + : typeof call.tool_name === 'string' + ? String(call.tool_name) + : typeof row.tool_name === 'string' + ? row.tool_name + : 'tool' + const id = typeof call.call_id === 'string' + ? call.call_id + : typeof call.id === 'string' + ? call.id + : '' + const input = typeof fnRecord?.arguments === 'string' + ? fnRecord.arguments + : JSON.stringify(fnRecord?.arguments || {}) + parts.push({ + type: 'tool_use', + id, + name, + input: String(input).slice(0, 4000), + }) + } + } catch { + // Ignore malformed tool call payloads and fall back to text content if present. + } + } + + const text = textPart(row.content) + if (text) parts.push(text) + + if (row.role === 'tool') { + pushMessage(messages, 'system', [{ + type: 'tool_result', + toolUseId: row.tool_call_id || '', + content: String(row.content || '').trim().slice(0, 8000), + isError: row.content?.includes('"success": false') || row.content?.includes('"error"'), + }], timestamp) + continue + } + + if (row.role === 'assistant') { + pushMessage(messages, 'assistant', parts, timestamp) + continue + } + + if (row.role === 'user') { + pushMessage(messages, 'user', parts, timestamp) + } + } + + return messages.slice(-limit) + } catch (error) { + logger.warn({ err: error, dbPath, sessionId }, 'Failed to read Hermes transcript') + return [] + } finally { + try { db?.close() } catch { /* noop */ } + } +} + +function readHermesTranscript(sessionId: string, limit: number): TranscriptMessage[] { + const dbPath = path.join(config.homeDir, '.hermes', 'state.db') + return readHermesTranscriptFromDbPath(dbPath, sessionId, limit) +} + +/** + * GET /api/sessions/transcript + * Query params: + * kind=claude-code|codex-cli|hermes + * id= + * limit=40 + */ +export async function GET(request: NextRequest) { + const auth = requireRole(request, 'viewer') + if ('error' in auth) return NextResponse.json({ error: auth.error }, { status: auth.status }) + + try { + const { searchParams } = new URL(request.url) + const kind = searchParams.get('kind') || '' + const sessionId = searchParams.get('id') || '' + const limit = Math.min(parseInt(searchParams.get('limit') || '40', 10), 200) + + if (!sessionId || (kind !== 'claude-code' && kind !== 'codex-cli' && kind !== 'hermes')) { + return NextResponse.json({ error: 'kind and id are required' }, { status: 400 }) + } + + const messages = kind === 'claude-code' + ? readClaudeTranscript(sessionId, limit) + : kind === 'codex-cli' + ? readCodexTranscript(sessionId, limit) + : readHermesTranscript(sessionId, limit) + + return NextResponse.json({ messages }) + } catch (error) { + logger.error({ err: error }, 'GET /api/sessions/transcript error') + return NextResponse.json({ error: 'Failed to fetch transcript' }, { status: 500 }) + } +} + +export const dynamic = 'force-dynamic' +export const __testables = { readHermesTranscriptFromDbPath } diff --git a/src/app/api/settings/route.ts b/src/app/api/settings/route.ts index d88130f..c8cd73c 100644 --- a/src/app/api/settings/route.ts +++ b/src/app/api/settings/route.ts @@ -34,6 +34,20 @@ const settingDefinitions: Record ({})) + const { source, slug, targetRoot } = body as { + source?: RegistrySource + slug?: string + targetRoot?: string + } + + if (!source || !VALID_SOURCES.includes(source)) { + return NextResponse.json({ error: `Invalid source. Use: ${VALID_SOURCES.join(', ')}` }, { status: 400 }) + } + if (!slug || typeof slug !== 'string' || slug.length > 200) { + return NextResponse.json({ error: 'Valid slug is required' }, { status: 400 }) + } + if (!targetRoot || !VALID_TARGETS.includes(targetRoot)) { + return NextResponse.json({ error: `Invalid targetRoot. Use: ${VALID_TARGETS.join(', ')}` }, { status: 400 }) + } + + const result = await installFromRegistry({ source, slug, targetRoot }) + + if (!result.ok) { + return NextResponse.json(result, { status: 422 }) + } + + return NextResponse.json(result) +} + +/** + * PUT /api/skills/registry — Security-check content without installing. + * Useful for preview/audit before install. + */ +export async function PUT(request: NextRequest) { + const auth = requireRole(request, 'viewer') + if ('error' in auth) return NextResponse.json({ error: auth.error }, { status: auth.status }) + + const body = await request.json().catch(() => ({})) + const content = typeof body?.content === 'string' ? body.content : '' + + if (!content.trim()) { + return NextResponse.json({ error: 'Content is required' }, { status: 400 }) + } + + const report = checkSkillSecurity(content) + return NextResponse.json({ security: report }) +} + +export const dynamic = 'force-dynamic' diff --git a/src/app/api/skills/route.ts b/src/app/api/skills/route.ts new file mode 100644 index 0000000..9576793 --- /dev/null +++ b/src/app/api/skills/route.ts @@ -0,0 +1,349 @@ +import { NextRequest, NextResponse } from 'next/server' +import { createHash } from 'node:crypto' +import { access, mkdir, readdir, readFile, rm, writeFile } from 'node:fs/promises' +import { constants } from 'node:fs' +import { join } from 'node:path' +import { homedir } from 'node:os' +import { requireRole } from '@/lib/auth' +import { resolveWithin } from '@/lib/paths' +import { checkSkillSecurity } from '@/lib/skill-registry' + +interface SkillSummary { + id: string + name: string + source: string + path: string + description?: string + registry_slug?: string | null + security_status?: string | null +} + +type SkillRoot = { source: string; path: string } + +function resolveSkillRoot( + envName: string, + fallback: string, +): string { + const override = process.env[envName] + return override && override.trim().length > 0 ? override.trim() : fallback +} + +async function pathReadable(path: string): Promise { + try { + await access(path, constants.R_OK) + return true + } catch { + return false + } +} + +async function extractDescription(skillPath: string): Promise { + const skillDocPath = join(skillPath, 'SKILL.md') + if (!(await pathReadable(skillDocPath))) return undefined + try { + const content = await readFile(skillDocPath, 'utf8') + const lines = content.split('\n').map((line) => line.trim()).filter(Boolean) + const firstParagraph = lines.find((line) => !line.startsWith('#')) + if (!firstParagraph) return undefined + return firstParagraph.length > 220 ? `${firstParagraph.slice(0, 217)}...` : firstParagraph + } catch { + return undefined + } +} + +async function collectSkillsFromDir(baseDir: string, source: string): Promise { + if (!(await pathReadable(baseDir))) return [] + try { + const entries = await readdir(baseDir, { withFileTypes: true }) + const out: SkillSummary[] = [] + for (const entry of entries) { + if (!entry.isDirectory()) continue + const skillPath = join(baseDir, entry.name) + const skillDocPath = join(skillPath, 'SKILL.md') + if (!(await pathReadable(skillDocPath))) continue + out.push({ + id: `${source}:${entry.name}`, + name: entry.name, + source, + path: skillPath, + description: await extractDescription(skillPath), + }) + } + return out.sort((a, b) => a.name.localeCompare(b.name)) + } catch { + return [] + } +} + +function getSkillRoots(): SkillRoot[] { + const home = homedir() + const cwd = process.cwd() + const roots: SkillRoot[] = [ + { source: 'user-agents', path: resolveSkillRoot('MC_SKILLS_USER_AGENTS_DIR', join(home, '.agents', 'skills')) }, + { source: 'user-codex', path: resolveSkillRoot('MC_SKILLS_USER_CODEX_DIR', join(home, '.codex', 'skills')) }, + { source: 'project-agents', path: resolveSkillRoot('MC_SKILLS_PROJECT_AGENTS_DIR', join(cwd, '.agents', 'skills')) }, + { source: 'project-codex', path: resolveSkillRoot('MC_SKILLS_PROJECT_CODEX_DIR', join(cwd, '.codex', 'skills')) }, + ] + // Add OpenClaw gateway skill roots when configured + const openclawState = process.env.OPENCLAW_STATE_DIR || process.env.OPENCLAW_HOME || join(home, '.openclaw') + const openclawSkills = resolveSkillRoot('MC_SKILLS_OPENCLAW_DIR', join(openclawState, 'skills')) + roots.push({ source: 'openclaw', path: openclawSkills }) + return roots +} + +function normalizeSkillName(raw: string): string | null { + const value = raw.trim() + if (!value) return null + if (!/^[a-zA-Z0-9._-]+$/.test(value)) return null + return value +} + +function getRootBySource(roots: SkillRoot[], sourceRaw: string | null): SkillRoot | null { + const source = String(sourceRaw || '').trim() + if (!source) return null + return roots.find((r) => r.source === source) || null +} + +async function upsertSkill(root: SkillRoot, name: string, content: string) { + const skillPath = resolveWithin(root.path, name) + const skillDocPath = resolveWithin(skillPath, 'SKILL.md') + await mkdir(skillPath, { recursive: true }) + await writeFile(skillDocPath, content, 'utf8') + + // Update DB hash so next sync cycle detects our write + try { + const { getDatabase } = await import('@/lib/db') + const db = getDatabase() + const hash = createHash('sha256').update(content, 'utf8').digest('hex') + const now = new Date().toISOString() + const descLines = content.split('\n').map(l => l.trim()).filter(Boolean) + const desc = descLines.find(l => !l.startsWith('#')) + db.prepare(` + INSERT INTO skills (name, source, path, description, content_hash, installed_at, updated_at) + VALUES (?, ?, ?, ?, ?, ?, ?) + ON CONFLICT(source, name) DO UPDATE SET + path = excluded.path, + description = excluded.description, + content_hash = excluded.content_hash, + updated_at = excluded.updated_at + `).run( + name, + root.source, + skillPath, + desc ? (desc.length > 220 ? `${desc.slice(0, 217)}...` : desc) : null, + hash, + now, + now + ) + } catch { /* DB not ready yet — sync will catch it */ } + + return { skillPath, skillDocPath } +} + +async function deleteSkill(root: SkillRoot, name: string) { + const skillPath = resolveWithin(root.path, name) + await rm(skillPath, { recursive: true, force: true }) + + // Remove from DB + try { + const { getDatabase } = await import('@/lib/db') + const db = getDatabase() + db.prepare('DELETE FROM skills WHERE source = ? AND name = ?').run(root.source, name) + } catch { /* best-effort */ } + + return { skillPath } +} + +/** + * Try to serve skill list from DB (fast path). + * Falls back to filesystem scan if DB has no data yet. + */ +function getSkillsFromDB(): SkillSummary[] | null { + try { + const { getDatabase } = require('@/lib/db') + const db = getDatabase() + const rows = db.prepare('SELECT name, source, path, description, registry_slug, security_status FROM skills ORDER BY name').all() as Array<{ + name: string; source: string; path: string; description: string | null; registry_slug: string | null; security_status: string | null + }> + if (rows.length === 0) return null // DB empty — fall back to fs scan + return rows.map(r => ({ + id: `${r.source}:${r.name}`, + name: r.name, + source: r.source, + path: r.path, + description: r.description || undefined, + registry_slug: r.registry_slug, + security_status: r.security_status, + })) + } catch { + return null + } +} + +export async function GET(request: NextRequest) { + const auth = requireRole(request, 'viewer') + if ('error' in auth) return NextResponse.json({ error: auth.error }, { status: auth.status }) + + const roots = getSkillRoots() + const { searchParams } = new URL(request.url) + const mode = searchParams.get('mode') + + if (mode === 'content') { + const source = String(searchParams.get('source') || '') + const name = normalizeSkillName(String(searchParams.get('name') || '')) + if (!source || !name) { + return NextResponse.json({ error: 'source and valid name are required' }, { status: 400 }) + } + const root = roots.find((r) => r.source === source) + if (!root) return NextResponse.json({ error: 'Invalid source' }, { status: 400 }) + const skillPath = join(root.path, name) + const skillDocPath = join(skillPath, 'SKILL.md') + if (!(await pathReadable(skillDocPath))) { + return NextResponse.json({ error: 'SKILL.md not found' }, { status: 404 }) + } + const content = await readFile(skillDocPath, 'utf8') + + // Run security check inline + const security = checkSkillSecurity(content) + + return NextResponse.json({ + source, + name, + skillPath, + skillDocPath, + content, + security, + }) + } + + if (mode === 'check') { + // Security-check a specific skill's content + const source = String(searchParams.get('source') || '') + const name = normalizeSkillName(String(searchParams.get('name') || '')) + if (!source || !name) { + return NextResponse.json({ error: 'source and valid name are required' }, { status: 400 }) + } + const root = roots.find((r) => r.source === source) + if (!root) return NextResponse.json({ error: 'Invalid source' }, { status: 400 }) + const skillPath = join(root.path, name) + const skillDocPath = join(skillPath, 'SKILL.md') + if (!(await pathReadable(skillDocPath))) { + return NextResponse.json({ error: 'SKILL.md not found' }, { status: 404 }) + } + const content = await readFile(skillDocPath, 'utf8') + const security = checkSkillSecurity(content) + + // Update DB with security status + try { + const { getDatabase } = await import('@/lib/db') + const db = getDatabase() + db.prepare('UPDATE skills SET security_status = ?, updated_at = ? WHERE source = ? AND name = ?') + .run(security.status, new Date().toISOString(), source, name) + } catch { /* best-effort */ } + + return NextResponse.json({ source, name, security }) + } + + // Try DB-backed fast path first + const dbSkills = getSkillsFromDB() + if (dbSkills) { + // Group by source for the groups response + const groupMap = new Map() + for (const root of roots) { + groupMap.set(root.source, { source: root.source, path: root.path, skills: [] }) + } + for (const skill of dbSkills) { + const group = groupMap.get(skill.source) + if (group) group.skills.push(skill) + } + + const deduped = new Map() + for (const skill of dbSkills) { + if (!deduped.has(skill.name)) deduped.set(skill.name, skill) + } + + return NextResponse.json({ + skills: Array.from(deduped.values()).sort((a, b) => a.name.localeCompare(b.name)), + groups: Array.from(groupMap.values()), + total: deduped.size, + }) + } + + // Fallback: filesystem scan (first load before sync runs) + const bySource = await Promise.all( + roots.map(async (root) => ({ + source: root.source, + path: root.path, + skills: await collectSkillsFromDir(root.path, root.source), + })) + ) + + const all = bySource.flatMap((group) => group.skills) + const deduped = new Map() + for (const skill of all) { + if (!deduped.has(skill.name)) deduped.set(skill.name, skill) + } + + return NextResponse.json({ + skills: Array.from(deduped.values()).sort((a, b) => a.name.localeCompare(b.name)), + groups: bySource, + total: deduped.size, + }) +} + +export async function POST(request: NextRequest) { + const auth = requireRole(request, 'operator') + if ('error' in auth) return NextResponse.json({ error: auth.error }, { status: auth.status }) + + const roots = getSkillRoots() + const body = await request.json().catch(() => ({})) + const root = getRootBySource(roots, body?.source) + const name = normalizeSkillName(String(body?.name || '')) + const contentRaw = typeof body?.content === 'string' ? body.content : '' + const content = contentRaw.trim() || `# ${name || 'skill'}\n\nDescribe this skill.\n` + + if (!root || !name) { + return NextResponse.json({ error: 'Valid source and name are required' }, { status: 400 }) + } + + await mkdir(root.path, { recursive: true }) + const { skillPath, skillDocPath } = await upsertSkill(root, name, content) + return NextResponse.json({ ok: true, source: root.source, name, skillPath, skillDocPath }) +} + +export async function PUT(request: NextRequest) { + const auth = requireRole(request, 'operator') + if ('error' in auth) return NextResponse.json({ error: auth.error }, { status: auth.status }) + + const roots = getSkillRoots() + const body = await request.json().catch(() => ({})) + const root = getRootBySource(roots, body?.source) + const name = normalizeSkillName(String(body?.name || '')) + const content = typeof body?.content === 'string' ? body.content : null + + if (!root || !name || content == null) { + return NextResponse.json({ error: 'Valid source, name, and content are required' }, { status: 400 }) + } + + await mkdir(root.path, { recursive: true }) + const { skillPath, skillDocPath } = await upsertSkill(root, name, content) + return NextResponse.json({ ok: true, source: root.source, name, skillPath, skillDocPath }) +} + +export async function DELETE(request: NextRequest) { + const auth = requireRole(request, 'operator') + if ('error' in auth) return NextResponse.json({ error: auth.error }, { status: auth.status }) + + const { searchParams } = new URL(request.url) + const roots = getSkillRoots() + const root = getRootBySource(roots, searchParams.get('source')) + const name = normalizeSkillName(String(searchParams.get('name') || '')) + if (!root || !name) { + return NextResponse.json({ error: 'Valid source and name are required' }, { status: 400 }) + } + + const { skillPath } = await deleteSkill(root, name) + return NextResponse.json({ ok: true, source: root.source, name, skillPath }) +} + +export const dynamic = 'force-dynamic' diff --git a/src/app/api/spawn/route.ts b/src/app/api/spawn/route.ts index cfe2951..68106f9 100644 --- a/src/app/api/spawn/route.ts +++ b/src/app/api/spawn/route.ts @@ -7,6 +7,8 @@ import { join } from 'path' import { heavyLimiter } from '@/lib/rate-limit' import { logger } from '@/lib/logger' import { validateBody, spawnAgentSchema } from '@/lib/validation' +import { scanForInjection } from '@/lib/injection-guard' +import { logAuditEvent } from '@/lib/db' function getPreferredToolsProfile(): string { return String(process.env.OPENCLAW_TOOLS_PROFILE || 'coding').trim() || 'coding' @@ -29,6 +31,25 @@ export async function POST(request: NextRequest) { if ('error' in result) return result.error const { task, model, label, timeoutSeconds } = result.data + // Scan the task prompt and label for injection before sending to an agent + const fieldsToScan = [ + { name: 'task', value: task }, + ...(label ? [{ name: 'label', value: label }] : []), + ] + for (const field of fieldsToScan) { + const injectionReport = scanForInjection(field.value, { context: 'prompt' }) + if (!injectionReport.safe) { + const criticals = injectionReport.matches.filter(m => m.severity === 'critical') + if (criticals.length > 0) { + logger.warn({ field: field.name, rules: criticals.map(m => m.rule) }, `Blocked spawn: injection detected in ${field.name}`) + return NextResponse.json( + { error: `${field.name} blocked: potentially unsafe content detected`, injection: criticals.map(m => ({ rule: m.rule, description: m.description })) }, + { status: 422 } + ) + } + } + } + const timeout = timeoutSeconds // Generate spawn ID @@ -57,13 +78,13 @@ export async function POST(request: NextRequest) { stderr = result.stderr } catch (firstError: any) { const rawErr = String(firstError?.stderr || firstError?.message || '').toLowerCase() - const likelySchemaMismatch = - rawErr.includes('unknown field') || - rawErr.includes('unknown key') || - rawErr.includes('invalid argument') || - rawErr.includes('tools') || - rawErr.includes('profile') - if (!likelySchemaMismatch) throw firstError + // Only retry without tools.profile when the error specifically indicates the + // gateway doesn't recognize the tools/profile fields. Other errors (auth, + // network, model not found, etc.) should propagate immediately. + const isToolsSchemaError = + (rawErr.includes('unknown field') || rawErr.includes('unknown key') || rawErr.includes('invalid argument')) && + (rawErr.includes('tools') || rawErr.includes('profile')) + if (!isToolsSchemaError) throw firstError const fallbackPayload = { ...spawnPayload } delete (fallbackPayload as any).tools @@ -85,6 +106,22 @@ export async function POST(request: NextRequest) { logger.error({ err: parseError }, 'Failed to parse session info') } + const ipAddress = request.headers.get('x-forwarded-for') || request.headers.get('x-real-ip') || 'unknown' + logAuditEvent({ + action: 'agent_spawn', + actor: auth.user.username, + actor_id: auth.user.id, + detail: { + spawnId, + model, + label, + task_summary: task.length > 120 ? task.slice(0, 120) + '...' : task, + toolsProfile: getPreferredToolsProfile(), + compatibilityFallbackUsed, + }, + ip_address: ipAddress, + }) + return NextResponse.json({ success: true, spawnId, @@ -131,6 +168,9 @@ export async function GET(request: NextRequest) { const auth = requireRole(request, 'viewer') if ('error' in auth) return NextResponse.json({ error: auth.error }, { status: auth.status }) + const rateCheck = heavyLimiter(request) + if (rateCheck) return rateCheck + try { const { searchParams } = new URL(request.url) const limit = Math.min(parseInt(searchParams.get('limit') || '50'), 200) diff --git a/src/app/api/status/route.ts b/src/app/api/status/route.ts index c8cb536..869545c 100644 --- a/src/app/api/status/route.ts +++ b/src/app/api/status/route.ts @@ -11,6 +11,9 @@ import { requireRole } from '@/lib/auth' import { MODEL_CATALOG } from '@/lib/models' import { logger } from '@/lib/logger' import { detectProviderSubscriptions, getPrimarySubscription } from '@/lib/provider-subscriptions' +import { APP_VERSION } from '@/lib/version' +import { isHermesInstalled, scanHermesSessions } from '@/lib/hermes-sessions' +import { registerMcAsDashboard } from '@/lib/gateway-runtime' export async function GET(request: NextRequest) { const auth = requireRole(request, 'viewer') @@ -46,7 +49,7 @@ export async function GET(request: NextRequest) { } if (action === 'capabilities') { - const capabilities = await getCapabilities() + const capabilities = await getCapabilities(request) return NextResponse.json(capabilities) } @@ -70,6 +73,58 @@ async function getDashboardData(workspaceId: number) { return { ...system, db: dbStats } } +async function getMemorySnapshot() { + const totalBytes = os.totalmem() + let availableBytes = os.freemem() + + if (process.platform === 'darwin') { + try { + const { stdout } = await runCommand('vm_stat', [], { timeoutMs: 3000 }) + const pageSizeMatch = stdout.match(/page size of (\d+) bytes/i) + const pageSize = parseInt(pageSizeMatch?.[1] || '4096', 10) + const pageLabels = ['Pages free', 'Pages inactive', 'Pages speculative', 'Pages purgeable'] + + const availablePages = pageLabels.reduce((sum, label) => { + const escapedLabel = label.replace(/[.*+?^${}()|[\]\\]/g, '\\$&') + const match = stdout.match(new RegExp(`${escapedLabel}:\\s+([\\d.]+)`, 'i')) + const pages = parseInt((match?.[1] || '0').replace(/\./g, ''), 10) + return sum + (Number.isFinite(pages) ? pages : 0) + }, 0) + + const vmAvailableBytes = availablePages * pageSize + if (vmAvailableBytes > 0) { + availableBytes = Math.min(vmAvailableBytes, totalBytes) + } + } catch { + // Fall back to os.freemem() + } + } else { + try { + const { stdout } = await runCommand('free', ['-b'], { timeoutMs: 3000 }) + const memLine = stdout.split('\n').find((line) => line.startsWith('Mem:')) + if (memLine) { + const parts = memLine.trim().split(/\s+/) + const available = parseInt(parts[6] || parts[3] || '0', 10) + if (Number.isFinite(available) && available > 0) { + availableBytes = Math.min(available, totalBytes) + } + } + } catch { + // Fall back to os.freemem() + } + } + + const usedBytes = Math.max(0, totalBytes - availableBytes) + const usagePercent = totalBytes > 0 ? Math.round((usedBytes / totalBytes) * 100) : 0 + + return { + totalBytes, + availableBytes, + usedBytes, + usagePercent, + } +} + function getDbStats(workspaceId: number) { try { const db = getDatabase() @@ -219,26 +274,11 @@ async function getSystemStatus(workspaceId: number) { try { // Memory info (cross-platform) - if (process.platform === 'darwin') { - const totalBytes = os.totalmem() - const freeBytes = os.freemem() - const totalMB = Math.round(totalBytes / (1024 * 1024)) - const usedMB = Math.round((totalBytes - freeBytes) / (1024 * 1024)) - const availableMB = Math.round(freeBytes / (1024 * 1024)) - status.memory = { total: totalMB, used: usedMB, available: availableMB } - } else { - const { stdout: memOutput } = await runCommand('free', ['-m'], { - timeoutMs: 3000 - }) - const memLine = memOutput.split('\n').find(line => line.startsWith('Mem:')) - if (memLine) { - const parts = memLine.split(/\s+/) - status.memory = { - total: parseInt(parts[1]) || 0, - used: parseInt(parts[2]) || 0, - available: parseInt(parts[6]) || 0 - } - } + const snapshot = await getMemorySnapshot() + status.memory = { + total: Math.round(snapshot.totalBytes / (1024 * 1024)), + used: Math.round(snapshot.usedBytes / (1024 * 1024)), + available: Math.round(snapshot.availableBytes / (1024 * 1024)), } } catch (error) { logger.error({ err: error }, 'Error getting memory info') @@ -413,11 +453,69 @@ async function getAvailableModels() { async function performHealthCheck() { const health: any = { - overall: 'healthy', + status: 'healthy', + version: APP_VERSION, + uptime: process.uptime(), checks: [], timestamp: Date.now() } + // Check DB connectivity + try { + const db = getDatabase() + const start = Date.now() + db.prepare('SELECT 1').get() + const elapsed = Date.now() - start + + let dbStatus: string + if (elapsed > 1000) { + dbStatus = 'warning' + } else { + dbStatus = 'healthy' + } + + health.checks.push({ + name: 'Database', + status: dbStatus, + message: dbStatus === 'healthy' ? `DB reachable (${elapsed}ms)` : `DB slow (${elapsed}ms)` + }) + } catch (error) { + health.checks.push({ + name: 'Database', + status: 'unhealthy', + message: 'DB connectivity failed' + }) + } + + // Check process memory + try { + const mem = process.memoryUsage() + const rssMB = Math.round(mem.rss / (1024 * 1024)) + let memStatus = 'healthy' + if (mem.rss > 800 * 1024 * 1024) { + memStatus = 'critical' + } else if (mem.rss > 400 * 1024 * 1024) { + memStatus = 'warning' + } + + health.checks.push({ + name: 'Process Memory', + status: memStatus, + message: `RSS: ${rssMB}MB, Heap: ${Math.round(mem.heapUsed / (1024 * 1024))}/${Math.round(mem.heapTotal / (1024 * 1024))}MB`, + detail: { + rss: mem.rss, + heapUsed: mem.heapUsed, + heapTotal: mem.heapTotal, + } + }) + } catch (error) { + health.checks.push({ + name: 'Process Memory', + status: 'error', + message: 'Failed to check process memory' + }) + } + // Check gateway connection try { const gatewayStatus = await getGatewayStatus() @@ -445,7 +543,7 @@ async function performHealthCheck() { // On macOS capacity is col 4 ("85%"), on Linux use% is col 4 as well const pctField = parts.find(p => p.endsWith('%')) || '0%' const usagePercent = parseInt(pctField.replace('%', '') || '0') - + health.checks.push({ name: 'Disk Space', status: usagePercent < 90 ? 'healthy' : usagePercent < 95 ? 'warning' : 'critical', @@ -461,19 +559,7 @@ async function performHealthCheck() { // Check memory usage (cross-platform) try { - let usagePercent: number - if (process.platform === 'darwin') { - const totalBytes = os.totalmem() - const freeBytes = os.freemem() - usagePercent = Math.round(((totalBytes - freeBytes) / totalBytes) * 100) - } else { - const { stdout } = await runCommand('free', ['-m'], { timeoutMs: 3000 }) - const memLine = stdout.split('\n').find((line) => line.startsWith('Mem:')) - const parts = (memLine || '').split(/\s+/) - const total = parseInt(parts[1] || '0') - const available = parseInt(parts[6] || '0') - usagePercent = Math.round(((total - available) / total) * 100) - } + const usagePercent = (await getMemorySnapshot()).usagePercent health.checks.push({ name: 'Memory Usage', @@ -492,18 +578,43 @@ async function performHealthCheck() { const hasError = health.checks.some((check: any) => check.status === 'error') const hasCritical = health.checks.some((check: any) => check.status === 'critical') const hasWarning = health.checks.some((check: any) => check.status === 'warning') + const hasDegraded = health.checks.some((check: any) => + check.name === 'Database' && check.status === 'warning' + ) if (hasError || hasCritical) { - health.overall = 'unhealthy' + health.status = 'unhealthy' + } else if (hasDegraded) { + health.status = 'degraded' } else if (hasWarning) { - health.overall = 'warning' + health.status = 'warning' } return health } -async function getCapabilities() { - const gateway = await isPortOpen(config.gatewayHost, config.gatewayPort) +async function getCapabilities(request?: NextRequest) { + // Probe configured gateways (if any) or fall back to the default port. + // A DB row alone isn't enough — the gateway must actually be reachable. + let gatewayReachable = false + try { + const db = getDatabase() + const table = db.prepare( + "SELECT name FROM sqlite_master WHERE type='table' AND name='gateways'" + ).get() as { name?: string } | undefined + if (table?.name) { + const rows = db.prepare('SELECT host, port FROM gateways').all() as { host: string; port: number }[] + if (rows.length > 0) { + const probes = rows.map(r => isPortOpen(r.host, Number(r.port))) + const results = await Promise.all(probes) + gatewayReachable = results.some(Boolean) + } + } + } catch { + // ignore — fall through to default probe + } + + const gateway = gatewayReachable || await isPortOpen(config.gatewayHost, config.gatewayPort) const openclawHome = Boolean( (config.openclawStateDir && existsSync(config.openclawStateDir)) || @@ -531,7 +642,62 @@ async function getCapabilities() { provider: primary.provider, } : null - return { gateway, openclawHome, claudeHome, claudeSessions, subscription, subscriptions } + // Apply subscription overrides from settings + try { + const settingsDb = getDatabase() + const planOverride = settingsDb.prepare("SELECT value FROM settings WHERE key = 'subscription.plan_override'").get() as { value: string } | undefined + if (planOverride?.value && subscription) { + subscription.type = planOverride.value + } + const codexPlan = settingsDb.prepare("SELECT value FROM settings WHERE key = 'subscription.codex_plan'").get() as { value: string } | undefined + if (codexPlan?.value) { + subscriptions['openai'] = { provider: 'openai', type: codexPlan.value, source: 'env' as const } + } + } catch { + // settings table may not exist yet + } + + const processUser = process.env.MC_DEFAULT_ORG_NAME || os.userInfo().username + + // Interface mode preference + let interfaceMode = 'essential' + try { + const settingsDb = getDatabase() + const modeRow = settingsDb.prepare("SELECT value FROM settings WHERE key = 'general.interface_mode'").get() as { value: string } | undefined + if (modeRow?.value === 'full' || modeRow?.value === 'essential') { + interfaceMode = modeRow.value + } + } catch { + // settings table may not exist yet + } + + const hermesInstalled = isHermesInstalled() + let hermesSessions = 0 + if (hermesInstalled) { + try { + hermesSessions = scanHermesSessions(50).filter(s => s.isActive).length + } catch { /* ignore */ } + } + + // Auto-register MC as default dashboard when gateway + openclaw home detected + let dashboardRegistration: { registered: boolean; alreadySet: boolean } | null = null + if (gateway && openclawHome) { + try { + let mcUrl = process.env.MC_BASE_URL || '' + if (!mcUrl && request) { + const host = request.headers.get('host') + const proto = request.headers.get('x-forwarded-proto') || 'http' + if (host) mcUrl = `${proto}://${host}` + } + if (mcUrl) { + dashboardRegistration = registerMcAsDashboard(mcUrl) + } + } catch (err) { + logger.error({ err }, 'Dashboard registration failed') + } + } + + return { gateway, openclawHome, claudeHome, claudeSessions, hermesInstalled, hermesSessions, subscription, subscriptions, processUser, interfaceMode, dashboardRegistration } } function isPortOpen(host: string, port: number): Promise { diff --git a/src/app/api/super/os-users/route.ts b/src/app/api/super/os-users/route.ts new file mode 100644 index 0000000..7760192 --- /dev/null +++ b/src/app/api/super/os-users/route.ts @@ -0,0 +1,430 @@ +import { NextRequest, NextResponse } from 'next/server' +import { execFileSync } from 'child_process' +import fs from 'fs' +import os from 'os' +import path from 'path' +import { requireRole, getUserFromRequest } from '@/lib/auth' +import { getDatabase, logAuditEvent } from '@/lib/db' +import { logger } from '@/lib/logger' + +export interface OsUser { + username: string + uid: number + home_dir: string + shell: string + /** Whether this OS user is already linked to a tenant in the DB */ + linked_tenant_id: number | null + /** Whether claude CLI is installed/accessible for this user */ + has_claude: boolean + /** Whether codex CLI is installed/accessible for this user */ + has_codex: boolean + /** Whether openclaw is installed for this user */ + has_openclaw: boolean + /** Whether this OS user is the one running the MC process (i.e. "Default" org) */ + is_process_owner: boolean +} + +// Well-known service account usernames to exclude from OS user discovery. +// These are created by package managers (Homebrew, apt, etc.) and are not real users. +const SERVICE_ACCOUNTS = new Set([ + 'postgres', 'mysql', 'redis', 'mongodb', 'memcached', 'rabbitmq', + 'elasticsearch', 'kibana', 'logstash', 'grafana', 'prometheus', + 'nginx', 'apache', 'www-data', 'httpd', 'caddy', + 'git', 'svn', 'jenkins', 'gitlab-runner', 'circleci', + 'docker', 'containerd', 'podman', + 'node', 'npm', 'yarn', + 'sshd', 'ftp', 'mail', 'postfix', 'dovecot', + 'solr', 'kafka', 'zookeeper', 'consul', 'vault', 'nomad', + 'influxdb', 'clickhouse', 'cassandra', 'couchdb', + 'puppet', 'chef', 'ansible', 'terraform', + 'ntp', 'chrony', 'systemd-network', 'systemd-resolve', +]) + +/** Check if a CLI tool (claude, codex) is accessible for a given user home dir */ +function checkToolExists(homeDir: string, tool: string): boolean { + // Check common install locations relative to user home + const candidates = [ + path.join(homeDir, '.local', 'bin', tool), + path.join(homeDir, '.npm-global', 'bin', tool), + path.join(homeDir, `.${tool}`), // e.g. ~/.claude, ~/.openclaw config dir = installed + ] + for (const p of candidates) { + try { if (fs.existsSync(p)) return true } catch {} + } + // Also check system-wide + try { + execFileSync('/usr/bin/which', [tool], { encoding: 'utf-8', timeout: 2000, stdio: 'pipe' }) + return true + } catch {} + return false +} + +/** Install a tool (openclaw, claude, codex) for a given OS user. Non-fatal — returns success/error. */ +function installToolForUser( + homeDir: string, + username: string, + tool: 'openclaw' | 'claude' | 'codex' +): { success: boolean; error?: string } { + try { + if (tool === 'openclaw') { + // openclaw is managed by MC — create dir structure + install latest from npm + const openclawDir = path.join(homeDir, '.openclaw') + const workspaceDir = path.join(homeDir, 'workspace') + for (const dir of [openclawDir, workspaceDir]) { + try { + execFileSync('/usr/bin/sudo', ['-n', 'install', '-d', '-o', username, dir], { timeout: 5000, stdio: 'pipe' }) + } catch { + // Fallback: mkdir directly (works if running as that user or root) + fs.mkdirSync(dir, { recursive: true }) + } + } + // Install latest openclaw from GitHub (always latest) with npm fallback + try { + execFileSync('/usr/bin/sudo', ['-n', '-u', username, 'npm', 'install', '-g', 'openclaw/openclaw'], { + timeout: 120000, + stdio: 'pipe', + env: { ...process.env, HOME: homeDir }, + }) + } catch (npmErr: any) { + // Dir structure created but npm install failed — still partially useful + const msg = npmErr?.stderr?.toString?.()?.slice(0, 200) || npmErr?.message || 'npm install failed' + logger.warn({ tool, username, err: msg }, 'openclaw npm install failed, dir structure created') + return { success: true, error: `dirs created but npm install failed: ${msg}` } + } + return { success: true } + } + + if (tool === 'claude') { + // Install claude code CLI globally for the user + try { + execFileSync('/usr/bin/sudo', ['-n', '-u', username, 'npm', 'install', '-g', '@anthropic-ai/claude-code@latest'], { + timeout: 120000, + stdio: 'pipe', + env: { ...process.env, HOME: homeDir }, + }) + } catch (npmErr: any) { + // Fallback: create config dir so checkToolExists detects it + const claudeDir = path.join(homeDir, '.claude') + try { + execFileSync('/usr/bin/sudo', ['-n', 'install', '-d', '-o', username, claudeDir], { timeout: 5000, stdio: 'pipe' }) + } catch { + fs.mkdirSync(claudeDir, { recursive: true }) + } + const msg = npmErr?.stderr?.toString?.()?.slice(0, 200) || npmErr?.message || 'npm install failed' + return { success: false, error: msg } + } + return { success: true } + } + + if (tool === 'codex') { + // Install codex CLI globally for the user + try { + execFileSync('/usr/bin/sudo', ['-n', '-u', username, 'npm', 'install', '-g', '@openai/codex@latest'], { + timeout: 120000, + stdio: 'pipe', + env: { ...process.env, HOME: homeDir }, + }) + } catch (npmErr: any) { + // Fallback: create config dir so checkToolExists detects it + const codexDir = path.join(homeDir, '.codex') + try { + execFileSync('/usr/bin/sudo', ['-n', 'install', '-d', '-o', username, codexDir], { timeout: 5000, stdio: 'pipe' }) + } catch { + fs.mkdirSync(codexDir, { recursive: true }) + } + const msg = npmErr?.stderr?.toString?.()?.slice(0, 200) || npmErr?.message || 'npm install failed' + return { success: false, error: msg } + } + return { success: true } + } + + return { success: false, error: `Unknown tool: ${tool}` } + } catch (e: any) { + return { success: false, error: e?.message || 'Unknown error' } + } +} + +/** + * Discover real (non-system, non-service) OS-level user accounts. + * macOS: dscl (Directory Services) + * Linux: getent passwd + * + * Uses execFileSync (no shell) to prevent command injection. + */ +function discoverOsUsers(): OsUser[] { + const platform = os.platform() + const users: OsUser[] = [] + + try { + if (platform === 'darwin') { + // macOS: list users + UIDs via dscl (no shell needed) + const raw = execFileSync('/usr/bin/dscl', ['.', 'list', '/Users', 'UniqueID'], { encoding: 'utf-8', timeout: 5000 }) + for (const line of raw.split('\n')) { + const match = line.match(/^(\S+)\s+(\d+)$/) + if (!match) continue + const [, username, uidStr] = match + const uid = parseInt(uidStr, 10) + // Skip system accounts (uid < 500 on macOS), special users, and known service accounts + if (uid < 500 || username.startsWith('_') || username === 'nobody' || username === 'root' || username === 'daemon') continue + if (SERVICE_ACCOUNTS.has(username)) continue + + let homeDir = `/Users/${username}` + let shell = '/bin/zsh' + try { + const info = execFileSync('/usr/bin/dscl', ['.', 'read', `/Users/${username}`, 'NFSHomeDirectory', 'UserShell'], { encoding: 'utf-8', timeout: 3000 }) + const homeMatch = info.match(/NFSHomeDirectory:\s*(.+)/) + const shellMatch = info.match(/UserShell:\s*(.+)/) + if (homeMatch) homeDir = homeMatch[1].trim() + if (shellMatch) shell = shellMatch[1].trim() + } catch {} + + const hasClaude = checkToolExists(homeDir, 'claude') + const hasCodex = checkToolExists(homeDir, 'codex') + const hasOpenclaw = checkToolExists(homeDir, 'openclaw') + users.push({ username, uid, home_dir: homeDir, shell, linked_tenant_id: null, has_claude: hasClaude, has_codex: hasCodex, has_openclaw: hasOpenclaw, is_process_owner: false }) + } + } else if (platform === 'linux') { + // Linux: getent passwd returns colon-separated fields (no shell needed) + const raw = execFileSync('/usr/bin/getent', ['passwd'], { encoding: 'utf-8', timeout: 5000 }) + for (const line of raw.split('\n')) { + const parts = line.split(':') + if (parts.length < 7) continue + const [username, , uidStr, , , homeDir, shell] = parts + const uid = parseInt(uidStr, 10) + // Skip system accounts (uid < 1000 on Linux), nfsnobody, and known service accounts + if (uid < 1000 || username === 'nobody' || username === 'nfsnobody') continue + if (SERVICE_ACCOUNTS.has(username)) continue + // Skip users with non-interactive shells (service accounts that slipped through) + if (shell.endsWith('/nologin') || shell.endsWith('/false')) continue + + const hasClaude = checkToolExists(homeDir, 'claude') + const hasCodex = checkToolExists(homeDir, 'codex') + const hasOpenclaw = checkToolExists(homeDir, 'openclaw') + users.push({ username, uid, home_dir: homeDir, shell, linked_tenant_id: null, has_claude: hasClaude, has_codex: hasCodex, has_openclaw: hasOpenclaw, is_process_owner: false }) + } + } + } catch { + // If discovery fails (permissions, missing binary), return empty + } + + return users.sort((a, b) => a.uid - b.uid) +} + +/** + * GET /api/super/os-users - Discover OS-level user accounts (admin only) + * + * Returns discovered OS users cross-referenced with existing tenants. + * Users already linked to a tenant have linked_tenant_id set. + */ +export async function GET(request: NextRequest) { + const auth = requireRole(request, 'admin') + if ('error' in auth) return NextResponse.json({ error: auth.error }, { status: auth.status }) + + const users = discoverOsUsers() + + // Mark the OS user that owns the MC process (represented by "Default" org) + const processHomeDir = os.homedir() + for (const user of users) { + if (user.home_dir === processHomeDir) { + user.is_process_owner = true + } + } + + // Cross-reference with existing tenants to mark linked ones + try { + const { listTenants } = await import('@/lib/super-admin') + const tenants = listTenants() + const tenantByLinuxUser = new Map(tenants.map(t => [t.linux_user, t.id])) + for (const user of users) { + user.linked_tenant_id = tenantByLinuxUser.get(user.username) ?? null + } + } catch {} + + return NextResponse.json({ users, platform: os.platform() }) +} + +/** + * POST /api/super/os-users - Create a new OS-level user and register as tenant (admin only) + * + * Local mode: creates OS user + home dir, registers in tenants table as active + * Gateway mode: creates OS user + delegates to full bootstrap pipeline (openclaw + workspace + agents) + * + * Body: { username, display_name, password?, gateway_mode?: boolean, gateway_port?, owner_gateway? } + */ +export async function POST(request: NextRequest) { + const auth = requireRole(request, 'admin') + if ('error' in auth) return NextResponse.json({ error: auth.error }, { status: auth.status }) + + const currentUser = getUserFromRequest(request) + const actor = currentUser?.username || 'system' + + let body: any + try { body = await request.json() } catch { + return NextResponse.json({ error: 'Request body required' }, { status: 400 }) + } + + const username = String(body.username || '').trim().toLowerCase() + const displayName = String(body.display_name || '').trim() + const password = body.password ? String(body.password) : undefined + const gatewayMode = !!body.gateway_mode + const installOpenclaw = !!body.install_openclaw + const installClaude = !!body.install_claude + const installCodex = !!body.install_codex + + // Validate username (safe for OS user creation — alphanumeric + dash/underscore) + if (!/^[a-z][a-z0-9_-]{1,30}[a-z0-9]$/.test(username)) { + return NextResponse.json({ error: 'Invalid username. Use lowercase letters, numbers, dashes, and underscores (3-32 chars).' }, { status: 400 }) + } + if (!displayName) { + return NextResponse.json({ error: 'display_name is required' }, { status: 400 }) + } + if (SERVICE_ACCOUNTS.has(username)) { + return NextResponse.json({ error: 'Cannot use a reserved service account name' }, { status: 400 }) + } + + // Check if user already exists on OS + const existingUsers = discoverOsUsers() + const alreadyExists = existingUsers.some(u => u.username === username) + + // Check if already registered as tenant + const db = getDatabase() + const existingTenant = db.prepare('SELECT id FROM tenants WHERE linux_user = ? OR slug = ?').get(username, username) as any + if (existingTenant) { + return NextResponse.json({ error: 'This user is already registered as an organization' }, { status: 409 }) + } + + const platform = os.platform() + + // Gateway mode: delegate to full provisioning pipeline + if (gatewayMode) { + try { + const { createTenantAndBootstrapJob } = await import('@/lib/super-admin') + const result = createTenantAndBootstrapJob({ + slug: username, + display_name: displayName, + linux_user: username, + gateway_port: body.gateway_port ? Number(body.gateway_port) : undefined, + owner_gateway: body.owner_gateway || undefined, + dry_run: body.dry_run !== false, + config: { install_openclaw: installOpenclaw, install_claude: installClaude, install_codex: installCodex }, + }, actor) + return NextResponse.json(result, { status: 201 }) + } catch (e: any) { + return NextResponse.json({ error: e?.message || 'Failed to create tenant bootstrap job' }, { status: 400 }) + } + } + + // Local mode: create OS user directly + register in tenants table + try { + if (!alreadyExists) { + if (platform === 'darwin') { + // macOS: use sysadminctl to create user (requires admin/sudo) + const args = ['-addUser', username, '-fullName', displayName, '-home', `/Users/${username}`] + if (password) { + args.push('-password', password) + } else { + args.push('-password', '') // empty password, can be set later + } + try { + execFileSync('/usr/sbin/sysadminctl', args, { timeout: 15000, stdio: 'pipe' }) + } catch (e: any) { + // sysadminctl may need sudo — try with sudo + try { + execFileSync('/usr/bin/sudo', ['-n', '/usr/sbin/sysadminctl', ...args], { timeout: 15000, stdio: 'pipe' }) + } catch (sudoErr: any) { + const msg = sudoErr?.stderr?.toString?.() || sudoErr?.message || 'Failed to create OS user' + logger.error({ err: sudoErr }, 'Failed to create macOS user') + return NextResponse.json({ + error: `Failed to create OS user. This requires admin privileges. ${msg}`, + hint: 'Run Mission Control with sudo or grant the current user admin rights.', + }, { status: 500 }) + } + } + } else if (platform === 'linux') { + // Linux: useradd + const args = ['-m', '-s', '/bin/bash', '-c', displayName, username] + try { + execFileSync('/usr/bin/sudo', ['-n', '/usr/sbin/useradd', ...args], { timeout: 15000, stdio: 'pipe' }) + } catch (e: any) { + const msg = e?.stderr?.toString?.() || e?.message || 'Failed to create OS user' + logger.error({ err: e }, 'Failed to create Linux user') + return NextResponse.json({ + error: `Failed to create OS user: ${msg}`, + hint: 'Ensure the MC process user has passwordless sudo for useradd.', + }, { status: 500 }) + } + + // Set password if provided + if (password) { + try { + execFileSync('/usr/bin/sudo', ['-n', '/usr/sbin/chpasswd'], { + timeout: 5000, + input: `${username}:${password}`, + stdio: ['pipe', 'pipe', 'pipe'], + }) + } catch { + // Non-critical — user created but password not set + } + } + } else { + return NextResponse.json({ error: `OS user creation not supported on ${platform}` }, { status: 400 }) + } + } + + // Determine home directory for the new user + const homeDir = platform === 'darwin' ? `/Users/${username}` : `/home/${username}` + const openclawHome = path.posix.join(homeDir, '.openclaw') + const workspaceRoot = path.posix.join(homeDir, 'workspace') + + // Register as tenant in DB + const tenantRes = db.prepare(` + INSERT INTO tenants (slug, display_name, linux_user, plan_tier, status, openclaw_home, workspace_root, gateway_port, dashboard_port, config, created_by, owner_gateway) + VALUES (?, ?, ?, 'local', 'active', ?, ?, NULL, NULL, '{}', ?, 'local') + `).run(username, displayName, username, openclawHome, workspaceRoot, actor) + + const tenantId = Number(tenantRes.lastInsertRowid) + + logAuditEvent({ + action: 'tenant_local_created', + actor, + target_type: 'tenant', + target_id: tenantId, + detail: { username, display_name: displayName, os_user_existed: alreadyExists, platform }, + }) + + const tenant = db.prepare('SELECT * FROM tenants WHERE id = ?').get(tenantId) + + // Install requested tools (non-fatal) + const installResults: Record = {} + const toolsToInstall: Array<'openclaw' | 'claude' | 'codex'> = [] + if (installOpenclaw) toolsToInstall.push('openclaw') + // When openclaw is selected, claude+codex are bundled — skip separate installs + if (installClaude && !installOpenclaw) toolsToInstall.push('claude') + if (installCodex && !installOpenclaw) toolsToInstall.push('codex') + + for (const tool of toolsToInstall) { + installResults[tool] = installToolForUser(homeDir, username, tool) + } + + const installSummary = Object.entries(installResults) + .map(([tool, r]) => r.success ? `${tool} installed` : `${tool} failed: ${r.error}`) + .join('. ') + + const baseMsg = alreadyExists + ? `OS user "${username}" already existed. Registered as organization.` + : `OS user "${username}" created and registered as organization.` + + return NextResponse.json({ + tenant, + os_user_created: !alreadyExists, + install_results: Object.keys(installResults).length > 0 ? installResults : undefined, + message: installSummary ? `${baseMsg} ${installSummary}.` : baseMsg, + }, { status: 201 }) + } catch (e: any) { + if (String(e?.message || '').includes('UNIQUE')) { + return NextResponse.json({ error: 'Organization slug or user already exists' }, { status: 409 }) + } + logger.error({ err: e }, 'POST /api/super/os-users error') + return NextResponse.json({ error: e?.message || 'Failed to create organization' }, { status: 500 }) + } +} diff --git a/src/app/api/tasks/[id]/branch/route.ts b/src/app/api/tasks/[id]/branch/route.ts new file mode 100644 index 0000000..b9166ff --- /dev/null +++ b/src/app/api/tasks/[id]/branch/route.ts @@ -0,0 +1,241 @@ +import { NextRequest, NextResponse } from 'next/server' +import { getDatabase, db_helpers } from '@/lib/db' +import { eventBus } from '@/lib/event-bus' +import { requireRole } from '@/lib/auth' +import { mutationLimiter } from '@/lib/rate-limit' +import { logger } from '@/lib/logger' +import { createRef, getRef, fetchPullRequests, createPullRequest } from '@/lib/github' + +function slugify(title: string, maxLen: number): string { + return title + .toLowerCase() + .replace(/[^a-z0-9]+/g, '-') + .replace(/^-|-$/g, '') + .slice(0, maxLen) + .replace(/-$/, '') +} + +/** + * GET /api/tasks/[id]/branch - Get branch and PR status for a task + */ +export async function GET( + request: NextRequest, + { params }: { params: Promise<{ id: string }> } +) { + const auth = requireRole(request, 'viewer') + if ('error' in auth) return NextResponse.json({ error: auth.error }, { status: auth.status }) + + try { + const db = getDatabase() + const resolvedParams = await params + const taskId = parseInt(resolvedParams.id) + const workspaceId = auth.user.workspace_id ?? 1 + + if (isNaN(taskId)) { + return NextResponse.json({ error: 'Invalid task ID' }, { status: 400 }) + } + + const task = db.prepare(` + SELECT t.*, p.github_repo, p.github_default_branch, p.ticket_prefix + FROM tasks t + LEFT JOIN projects p ON p.id = t.project_id AND p.workspace_id = t.workspace_id + WHERE t.id = ? AND t.workspace_id = ? + `).get(taskId, workspaceId) as any + + if (!task) { + return NextResponse.json({ error: 'Task not found' }, { status: 404 }) + } + + const result: Record = { + branch: task.github_branch || null, + pr_number: task.github_pr_number || null, + pr_state: task.github_pr_state || null, + repo: task.github_repo || null, + } + + // If task has a branch but no PR info, check GitHub (fire-and-forget) + if (task.github_branch && !task.github_pr_number && task.github_repo) { + const repo = task.github_repo as string + const branch = task.github_branch as string + fetchPullRequests(repo, { head: branch, state: 'all' }) + .then((prs) => { + if (prs.length > 0) { + const pr = prs[0] + db.prepare(` + UPDATE tasks SET github_pr_number = ?, github_pr_state = ?, updated_at = ? + WHERE id = ? AND workspace_id = ? + `).run(pr.number, pr.state, Math.floor(Date.now() / 1000), taskId, workspaceId) + } + }) + .catch((err) => { + logger.warn({ err }, 'Failed to check PRs for task branch') + }) + } + + return NextResponse.json(result) + } catch (error) { + logger.error({ err: error }, 'GET /api/tasks/[id]/branch error') + return NextResponse.json({ error: 'Failed to fetch branch info' }, { status: 500 }) + } +} + +/** + * POST /api/tasks/[id]/branch - Create a branch or PR for a task + * + * Body: {} to create a branch + * Body: { action: 'create-pr', base?, title?, body? } to create a PR + */ +export async function POST( + request: NextRequest, + { params }: { params: Promise<{ id: string }> } +) { + const auth = requireRole(request, 'operator') + if ('error' in auth) return NextResponse.json({ error: auth.error }, { status: auth.status }) + + const rateCheck = mutationLimiter(request) + if (rateCheck) return rateCheck + + try { + const db = getDatabase() + const resolvedParams = await params + const taskId = parseInt(resolvedParams.id) + const workspaceId = auth.user.workspace_id ?? 1 + + if (isNaN(taskId)) { + return NextResponse.json({ error: 'Invalid task ID' }, { status: 400 }) + } + + const task = db.prepare(` + SELECT t.*, p.github_repo, p.github_default_branch, p.ticket_prefix + FROM tasks t + LEFT JOIN projects p ON p.id = t.project_id AND p.workspace_id = t.workspace_id + WHERE t.id = ? AND t.workspace_id = ? + `).get(taskId, workspaceId) as any + + if (!task) { + return NextResponse.json({ error: 'Task not found' }, { status: 404 }) + } + + if (!task.github_repo) { + return NextResponse.json( + { error: 'Task project does not have a GitHub repo configured' }, + { status: 400 } + ) + } + + const repo = task.github_repo as string + const defaultBranch = (task.github_default_branch as string) || 'main' + + let body: Record = {} + try { + body = await request.json() + } catch { + // empty body is fine for branch creation + } + + // --- Create PR --- + if (body.action === 'create-pr') { + if (!task.github_branch) { + return NextResponse.json( + { error: 'Task does not have a branch yet. Create a branch first.' }, + { status: 400 } + ) + } + + const prTitle = (body.title as string) || `${task.ticket_prefix ? task.ticket_prefix + ': ' : ''}${task.title}` + const prBody = (body.body as string) || `Resolves task #${taskId}` + const prBase = (body.base as string) || defaultBranch + + const pr = await createPullRequest(repo, { + title: prTitle, + head: task.github_branch, + base: prBase, + body: prBody, + }) + + const now = Math.floor(Date.now() / 1000) + db.prepare(` + UPDATE tasks SET github_pr_number = ?, github_pr_state = 'open', updated_at = ? + WHERE id = ? AND workspace_id = ? + `).run(pr.number, now, taskId, workspaceId) + + db_helpers.logActivity( + 'task_updated', + 'task', + taskId, + auth.user.username, + `Created PR #${pr.number} for task`, + { pr_number: pr.number, pr_url: pr.html_url }, + workspaceId + ) + + eventBus.broadcast('task.updated', { + id: taskId, + github_pr_number: pr.number, + github_pr_state: 'open', + }) + + return NextResponse.json({ + pr_number: pr.number, + pr_url: pr.html_url, + branch: task.github_branch, + }) + } + + // --- Create Branch --- + + // Idempotent: if branch already exists, return it + if (task.github_branch) { + return NextResponse.json({ + branch: task.github_branch, + url: `https://github.com/${repo}/tree/${task.github_branch}`, + }) + } + + // Build branch name: feat/{prefix}-{issue_or_id}-{slug} + const prefix = task.ticket_prefix + ? (task.ticket_prefix as string).toLowerCase() + : 'task' + const identifier = task.github_issue_number || taskId + const basePrefix = `feat/${prefix}-${identifier}-` + const maxSlugLen = 60 - basePrefix.length + const slug = slugify(task.title || 'untitled', Math.max(maxSlugLen, 1)) + const branchName = `${basePrefix}${slug}`.slice(0, 60) + + // Get base branch SHA + const { sha } = await getRef(repo, `heads/${defaultBranch}`) + + // Create the branch + await createRef(repo, `refs/heads/${branchName}`, sha) + + const now = Math.floor(Date.now() / 1000) + db.prepare(` + UPDATE tasks SET github_branch = ?, updated_at = ? + WHERE id = ? AND workspace_id = ? + `).run(branchName, now, taskId, workspaceId) + + db_helpers.logActivity( + 'task_updated', + 'task', + taskId, + auth.user.username, + `Created branch ${branchName} for task`, + { branch: branchName, repo }, + workspaceId + ) + + eventBus.broadcast('task.updated', { + id: taskId, + github_branch: branchName, + }) + + return NextResponse.json({ + branch: branchName, + url: `https://github.com/${repo}/tree/${branchName}`, + }) + } catch (error) { + logger.error({ err: error }, 'POST /api/tasks/[id]/branch error') + const message = error instanceof Error ? error.message : 'Failed to create branch' + return NextResponse.json({ error: message }, { status: 500 }) + } +} diff --git a/src/app/api/tasks/[id]/comments/route.ts b/src/app/api/tasks/[id]/comments/route.ts index 83b1c00..2ed5e20 100644 --- a/src/app/api/tasks/[id]/comments/route.ts +++ b/src/app/api/tasks/[id]/comments/route.ts @@ -109,9 +109,31 @@ export async function POST( const result = await validateBody(request, createCommentSchema); if ('error' in result) return result.error; - const { content, parent_id } = result.data; + const { content: rawContent, parent_id } = result.data; const author = auth.user.display_name || auth.user.username || 'system'; - + + // Normalize agent payload JSON — extract text from OpenClaw result format + let content = rawContent; + try { + const stripped = rawContent.replace(/\x1b\[[0-9;]*m/g, '').replace(/\[3[0-9]m/g, '').replace(/\[39m/g, ''); + const parsed = JSON.parse(stripped); + if (parsed && typeof parsed === 'object' && Array.isArray(parsed.payloads)) { + const text = parsed.payloads + .map((p: any) => (typeof p === 'string' ? p : p?.text || '').trim()) + .filter(Boolean) + .join('\n'); + if (text) { + const meta = parsed.meta?.agentMeta; + const metaLine = meta + ? `\n\n_${[meta.model, meta.usage?.total ? `${meta.usage.total} tokens` : '', parsed.meta?.durationMs ? `${(parsed.meta.durationMs / 1000).toFixed(1)}s` : ''].filter(Boolean).join(' · ')}_` + : ''; + content = text + metaLine; + } + } + } catch { + // Not JSON — keep original content + } + // Verify task exists const task = db .prepare('SELECT * FROM tasks WHERE id = ? AND workspace_id = ?') diff --git a/src/app/api/tasks/[id]/route.ts b/src/app/api/tasks/[id]/route.ts index 40bf44f..c6afb6a 100644 --- a/src/app/api/tasks/[id]/route.ts +++ b/src/app/api/tasks/[id]/route.ts @@ -7,6 +7,7 @@ import { logger } from '@/lib/logger'; import { validateBody, updateTaskSchema } from '@/lib/validation'; import { resolveMentionRecipients } from '@/lib/mentions'; import { normalizeTaskUpdateStatus } from '@/lib/task-status'; +import { pushTaskToGitHub } from '@/lib/github-sync-engine'; function formatTicketRef(prefix?: string | null, num?: number | null): string | undefined { if (!prefix || typeof num !== 'number' || !Number.isFinite(num) || num <= 0) return undefined @@ -385,6 +386,22 @@ export async function PUT( `).get(taskId, workspaceId) as Task; const parsedTask = mapTaskRow(updatedTask); + // Fire-and-forget outbound GitHub sync for relevant changes + const syncRelevantChanges = changes.some(c => + c.startsWith('status:') || c.startsWith('priority:') || c.includes('title') || c.includes('assigned') + ) + if (syncRelevantChanges && (updatedTask as any).github_repo) { + const project = db.prepare(` + SELECT id, github_repo, github_sync_enabled FROM projects + WHERE id = ? AND workspace_id = ? + `).get((updatedTask as any).project_id, workspaceId) as any + if (project?.github_sync_enabled) { + pushTaskToGitHub(updatedTask as any, project).catch(err => + logger.error({ err, taskId }, 'Outbound GitHub sync failed') + ) + } + } + // Broadcast to SSE clients eventBus.broadcast('task.updated', parsedTask); diff --git a/src/app/api/tasks/queue/route.ts b/src/app/api/tasks/queue/route.ts index f0aa68c..707b9cd 100644 --- a/src/app/api/tasks/queue/route.ts +++ b/src/app/api/tasks/queue/route.ts @@ -1,6 +1,7 @@ import { NextRequest, NextResponse } from 'next/server' import { getDatabase } from '@/lib/db' import { requireRole } from '@/lib/auth' +import { agentTaskLimiter } from '@/lib/rate-limit' import { logger } from '@/lib/logger' type QueueReason = 'continue_current' | 'assigned' | 'at_capacity' | 'no_tasks_available' @@ -45,6 +46,9 @@ export async function GET(request: NextRequest) { const auth = requireRole(request, 'operator') if ('error' in auth) return NextResponse.json({ error: auth.error }, { status: auth.status }) + const rateLimited = agentTaskLimiter(request) + if (rateLimited) return rateLimited + try { const db = getDatabase() const workspaceId = auth.user.workspace_id diff --git a/src/app/api/tasks/route.ts b/src/app/api/tasks/route.ts index 0cc366c..2a18acb 100644 --- a/src/app/api/tasks/route.ts +++ b/src/app/api/tasks/route.ts @@ -7,6 +7,7 @@ import { logger } from '@/lib/logger'; import { validateBody, createTaskSchema, bulkUpdateTaskStatusSchema } from '@/lib/validation'; import { resolveMentionRecipients } from '@/lib/mentions'; import { normalizeTaskCreateStatus } from '@/lib/task-status'; +import { pushTaskToGitHub } from '@/lib/github-sync-engine'; function formatTicketRef(prefix?: string | null, num?: number | null): string | undefined { if (!prefix || typeof num !== 'number' || !Number.isFinite(num) || num <= 0) return undefined @@ -304,6 +305,19 @@ export async function POST(request: NextRequest) { `).get(taskId, workspaceId) as Task; const parsedTask = mapTaskRow(createdTask); + // Fire-and-forget outbound GitHub sync for new tasks + if (parsedTask.project_id) { + const project = db.prepare(` + SELECT id, github_repo, github_sync_enabled FROM projects + WHERE id = ? AND workspace_id = ? + `).get(parsedTask.project_id, workspaceId) as any + if (project?.github_sync_enabled && project?.github_repo) { + pushTaskToGitHub(parsedTask as any, project).catch(err => + logger.error({ err, taskId }, 'Outbound GitHub sync failed for new task') + ) + } + } + // Broadcast to SSE clients eventBus.broadcast('task.created', parsedTask); diff --git a/src/app/api/tokens/by-agent/route.ts b/src/app/api/tokens/by-agent/route.ts new file mode 100644 index 0000000..8d2e2ef --- /dev/null +++ b/src/app/api/tokens/by-agent/route.ts @@ -0,0 +1,148 @@ +import { NextRequest, NextResponse } from 'next/server' +import { requireRole } from '@/lib/auth' +import { getDatabase } from '@/lib/db' +import { calculateTokenCost } from '@/lib/token-pricing' +import { getProviderSubscriptionFlags } from '@/lib/provider-subscriptions' +import { logger } from '@/lib/logger' + +interface AgentBreakdownRow { + agent_name: string + total_input_tokens: number + total_output_tokens: number + session_count: number + request_count: number + last_active: number + models_json: string +} + +interface ModelBreakdown { + model: string + input_tokens: number + output_tokens: number + request_count: number + cost: number +} + +interface AgentBreakdown { + agent: string + total_input_tokens: number + total_output_tokens: number + total_tokens: number + total_cost: number + session_count: number + request_count: number + last_active: string + models: ModelBreakdown[] +} + +/** + * GET /api/tokens/by-agent - Per-agent cost breakdown from token_usage table + * Query params: + * days=N - Time window in days (default 30) + */ +export async function GET(request: NextRequest) { + const auth = requireRole(request, 'viewer') + if ('error' in auth) return NextResponse.json({ error: auth.error }, { status: auth.status }) + + try { + const { searchParams } = new URL(request.url) + const days = Math.max(1, Math.min(365, Number(searchParams.get('days') || 30))) + const workspaceId = auth.user.workspace_id ?? 1 + + const db = getDatabase() + const cutoff = Math.floor(Date.now() / 1000) - days * 86400 + const providerSubscriptions = getProviderSubscriptionFlags() + + // Query per-agent totals with per-model breakdown embedded as JSON + const rows = db.prepare(` + SELECT + CASE + WHEN INSTR(session_id, ':') > 0 THEN SUBSTR(session_id, 1, INSTR(session_id, ':') - 1) + ELSE session_id + END AS agent_name, + SUM(input_tokens) AS total_input_tokens, + SUM(output_tokens) AS total_output_tokens, + COUNT(DISTINCT session_id) AS session_count, + COUNT(*) AS request_count, + MAX(created_at) AS last_active, + GROUP_CONCAT(DISTINCT model) AS models_json + FROM token_usage + WHERE workspace_id = ? + AND created_at >= ? + GROUP BY agent_name + ORDER BY (SUM(input_tokens) + SUM(output_tokens)) DESC + `).all(workspaceId, cutoff) as AgentBreakdownRow[] + + // For accurate per-model cost we need a second pass grouping by agent+model + const modelRows = db.prepare(` + SELECT + CASE + WHEN INSTR(session_id, ':') > 0 THEN SUBSTR(session_id, 1, INSTR(session_id, ':') - 1) + ELSE session_id + END AS agent_name, + model, + SUM(input_tokens) AS input_tokens, + SUM(output_tokens) AS output_tokens, + COUNT(*) AS request_count + FROM token_usage + WHERE workspace_id = ? + AND created_at >= ? + GROUP BY agent_name, model + ORDER BY agent_name, (SUM(input_tokens) + SUM(output_tokens)) DESC + `).all(workspaceId, cutoff) as Array<{ + agent_name: string + model: string + input_tokens: number + output_tokens: number + request_count: number + }> + + // Build model map keyed by agent name + const modelsByAgent = new Map() + for (const row of modelRows) { + const cost = calculateTokenCost(row.model, row.input_tokens, row.output_tokens, { providerSubscriptions }) + const list = modelsByAgent.get(row.agent_name) || [] + list.push({ + model: row.model, + input_tokens: row.input_tokens, + output_tokens: row.output_tokens, + request_count: row.request_count, + cost, + }) + modelsByAgent.set(row.agent_name, list) + } + + // Assemble final response + const agents: AgentBreakdown[] = rows.map((row) => { + const models = modelsByAgent.get(row.agent_name) || [] + const totalCost = models.reduce((sum, m) => sum + m.cost, 0) + return { + agent: row.agent_name, + total_input_tokens: row.total_input_tokens, + total_output_tokens: row.total_output_tokens, + total_tokens: row.total_input_tokens + row.total_output_tokens, + total_cost: totalCost, + session_count: row.session_count, + request_count: row.request_count, + last_active: new Date(row.last_active * 1000).toISOString(), + models, + } + }) + + const totalCost = agents.reduce((sum, a) => sum + a.total_cost, 0) + const totalTokens = agents.reduce((sum, a) => sum + a.total_tokens, 0) + + return NextResponse.json({ + agents, + summary: { + total_cost: totalCost, + total_tokens: totalTokens, + agent_count: agents.length, + days, + }, + }) + } catch (error) { + logger.error({ err: error }, 'GET /api/tokens/by-agent error') + return NextResponse.json({ error: 'Internal server error' }, { status: 500 }) + } +} diff --git a/src/app/api/tokens/rotate/route.ts b/src/app/api/tokens/rotate/route.ts new file mode 100644 index 0000000..cb09ced --- /dev/null +++ b/src/app/api/tokens/rotate/route.ts @@ -0,0 +1,122 @@ +import { NextRequest, NextResponse } from 'next/server' +import { randomBytes } from 'crypto' +import { requireRole } from '@/lib/auth' +import { getDatabase, logAuditEvent } from '@/lib/db' +import { mutationLimiter } from '@/lib/rate-limit' + +interface ApiKeyRow { + value: string + updated_by: string | null + updated_at: number +} + +/** + * Mask an API key for display: show first 4 and last 5 chars. + * e.g. "mc_a1b2c3d4e5f6g7h8i9j0" -> "mc_a****j0" + */ +function maskApiKey(key: string): string { + if (key.length <= 9) return '****' + return key.slice(0, 4) + '-****-****-' + key.slice(-5) +} + +/** + * GET /api/tokens/rotate - Get metadata about the current API key + */ +export async function GET(request: NextRequest) { + const auth = requireRole(request, 'admin') + if ('error' in auth) return NextResponse.json({ error: auth.error }, { status: auth.status }) + + const db = getDatabase() + + // Check for DB-stored override first + const row = db.prepare( + "SELECT value, updated_by, updated_at FROM settings WHERE key = 'security.api_key'" + ).get() as ApiKeyRow | undefined + + if (row) { + return NextResponse.json({ + masked_key: maskApiKey(row.value), + source: 'database', + last_rotated_at: row.updated_at, + last_rotated_by: row.updated_by, + }) + } + + // Fall back to env var + const envKey = (process.env.API_KEY || '').trim() + if (envKey) { + return NextResponse.json({ + masked_key: maskApiKey(envKey), + source: 'environment', + last_rotated_at: null, + last_rotated_by: null, + }) + } + + return NextResponse.json({ + masked_key: null, + source: 'none', + last_rotated_at: null, + last_rotated_by: null, + }) +} + +/** + * POST /api/tokens/rotate - Generate and store a new API key + */ +export async function POST(request: NextRequest) { + const auth = requireRole(request, 'admin') + if ('error' in auth) return NextResponse.json({ error: auth.error }, { status: auth.status }) + + const rateCheck = mutationLimiter(request) + if (rateCheck) return rateCheck + + // Generate a new key: mc_ prefix + 32 random hex chars + const newKey = 'mc_' + randomBytes(24).toString('hex') + + const db = getDatabase() + + // Get old key info for audit trail + const existing = db.prepare( + "SELECT value FROM settings WHERE key = 'security.api_key'" + ).get() as { value: string } | undefined + + const oldSource = existing ? 'database' : (process.env.API_KEY || '').trim() ? 'environment' : 'none' + const oldMasked = existing + ? maskApiKey(existing.value) + : (process.env.API_KEY || '').trim() + ? maskApiKey((process.env.API_KEY || '').trim()) + : null + + // Store new key in settings table (overrides env var) + db.prepare(` + INSERT INTO settings (key, value, description, category, updated_by, updated_at) + VALUES ('security.api_key', ?, 'Active API key (overrides API_KEY env var)', 'security', ?, unixepoch()) + ON CONFLICT(key) DO UPDATE SET + value = excluded.value, + updated_by = excluded.updated_by, + updated_at = unixepoch() + `).run(newKey, auth.user.username) + + // Audit log + const ipAddress = request.headers.get('x-forwarded-for') || request.headers.get('x-real-ip') || 'unknown' + logAuditEvent({ + action: 'api_key_rotated', + actor: auth.user.username, + actor_id: auth.user.id, + detail: { + old_source: oldSource, + old_key_masked: oldMasked, + new_key_masked: maskApiKey(newKey), + }, + ip_address: ipAddress, + }) + + return NextResponse.json({ + key: newKey, + masked_key: maskApiKey(newKey), + rotated_at: Math.floor(Date.now() / 1000), + rotated_by: auth.user.username, + message: 'API key rotated successfully. Copy the key now — it will not be shown again.', + }) +} diff --git a/src/app/api/tokens/route.ts b/src/app/api/tokens/route.ts index 87e58cc..1a33c28 100644 --- a/src/app/api/tokens/route.ts +++ b/src/app/api/tokens/route.ts @@ -170,19 +170,16 @@ async function loadTokenDataFromFile(workspaceId: number, providerSubscriptions: } /** - * Load token data from persistent file, falling back to deriving from session stores. + * Load token data from all sources: DB, file, and gateway session stores. + * All sources are merged and deduplicated so session-derived data is always included. */ async function loadTokenData(workspaceId: number): Promise { const providerSubscriptions = getProviderSubscriptionFlags() const dbRecords = loadTokenDataFromDb(workspaceId, providerSubscriptions) const fileRecords = await loadTokenDataFromFile(workspaceId, providerSubscriptions) - const combined = dedupeTokenRecords([...dbRecords, ...fileRecords]).sort((a, b) => b.timestamp - a.timestamp) - if (combined.length > 0) { - return combined - } - - // Final fallback: derive from in-memory sessions - return deriveFromSessions(workspaceId, providerSubscriptions) + const sessionRecords = deriveFromSessions(workspaceId, providerSubscriptions) + return dedupeTokenRecords([...dbRecords, ...fileRecords, ...sessionRecords]) + .sort((a, b) => b.timestamp - a.timestamp) } /** diff --git a/src/app/api/webhooks/route.ts b/src/app/api/webhooks/route.ts index 08233b2..235ce60 100644 --- a/src/app/api/webhooks/route.ts +++ b/src/app/api/webhooks/route.ts @@ -6,6 +6,32 @@ import { mutationLimiter } from '@/lib/rate-limit' import { logger } from '@/lib/logger' import { validateBody, createWebhookSchema } from '@/lib/validation' +const WEBHOOK_BLOCKED_HOSTNAMES = new Set([ + 'localhost', '127.0.0.1', '::1', '0.0.0.0', + 'metadata.google.internal', 'metadata.internal', 'instance-data', +]) + +function isBlockedWebhookUrl(urlStr: string): boolean { + try { + const url = new URL(urlStr) + const hostname = url.hostname + if (WEBHOOK_BLOCKED_HOSTNAMES.has(hostname)) return true + if (hostname.endsWith('.local')) return true + // Block private IPv4 ranges + if (/^\d{1,3}(\.\d{1,3}){3}$/.test(hostname)) { + const parts = hostname.split('.').map(Number) + if (parts[0] === 10) return true + if (parts[0] === 172 && parts[1] >= 16 && parts[1] <= 31) return true + if (parts[0] === 192 && parts[1] === 168) return true + if (parts[0] === 169 && parts[1] === 254) return true + if (parts[0] === 127) return true + } + return false + } catch { + return true + } +} + /** * GET /api/webhooks - List all webhooks with delivery stats */ @@ -62,6 +88,10 @@ export async function POST(request: NextRequest) { const body = validated.data const { name, url, events, generate_secret } = body + if (isBlockedWebhookUrl(url)) { + return NextResponse.json({ error: 'Webhook URL cannot point to internal or private services' }, { status: 400 }) + } + const secret = generate_secret !== false ? randomBytes(32).toString('hex') : null const eventsJson = JSON.stringify(events || ['*']) @@ -114,6 +144,9 @@ export async function PUT(request: NextRequest) { try { new URL(url) } catch { return NextResponse.json({ error: 'Invalid URL' }, { status: 400 }) } + if (isBlockedWebhookUrl(url)) { + return NextResponse.json({ error: 'Webhook URL cannot point to internal or private services' }, { status: 400 }) + } } const updates: string[] = ['updated_at = unixepoch()'] diff --git a/src/app/api/workflows/route.ts b/src/app/api/workflows/route.ts index 2970dfb..c867b94 100644 --- a/src/app/api/workflows/route.ts +++ b/src/app/api/workflows/route.ts @@ -4,6 +4,7 @@ import { requireRole } from '@/lib/auth' import { validateBody, createWorkflowSchema } from '@/lib/validation' import { mutationLimiter } from '@/lib/rate-limit' import { logger } from '@/lib/logger' +import { scanForInjection } from '@/lib/injection-guard' export interface WorkflowTemplate { id: number @@ -62,6 +63,19 @@ export async function POST(request: NextRequest) { if ('error' in result) return result.error const { name, description, model, task_prompt, timeout_seconds, agent_role, tags } = result.data + // Scan task_prompt for injection — this gets sent directly to AI agents + const injectionReport = scanForInjection(task_prompt, { context: 'prompt' }) + if (!injectionReport.safe) { + const criticals = injectionReport.matches.filter(m => m.severity === 'critical') + if (criticals.length > 0) { + logger.warn({ name, rules: criticals.map(m => m.rule) }, 'Blocked workflow: injection detected in task_prompt') + return NextResponse.json( + { error: 'Task prompt blocked: potentially unsafe content detected', injection: criticals.map(m => ({ rule: m.rule, description: m.description })) }, + { status: 422 } + ) + } + } + const db = getDatabase() const user = auth.user const workspaceId = auth.user.workspace_id ?? 1 @@ -111,6 +125,9 @@ export async function PUT(request: NextRequest) { const auth = requireRole(request, 'operator') if ('error' in auth) return NextResponse.json({ error: auth.error }, { status: auth.status }) + const rateCheck = mutationLimiter(request) + if (rateCheck) return rateCheck + try { const db = getDatabase() const workspaceId = auth.user.workspace_id ?? 1 @@ -169,6 +186,9 @@ export async function DELETE(request: NextRequest) { const auth = requireRole(request, 'operator') if ('error' in auth) return NextResponse.json({ error: auth.error }, { status: auth.status }) + const rateCheck = mutationLimiter(request) + if (rateCheck) return rateCheck + try { const db = getDatabase() const workspaceId = auth.user.workspace_id ?? 1 diff --git a/src/app/api/workspaces/[id]/route.ts b/src/app/api/workspaces/[id]/route.ts new file mode 100644 index 0000000..a84b06a --- /dev/null +++ b/src/app/api/workspaces/[id]/route.ts @@ -0,0 +1,173 @@ +import { NextRequest, NextResponse } from 'next/server' +import { requireRole } from '@/lib/auth' +import { getDatabase, logAuditEvent } from '@/lib/db' +import { logger } from '@/lib/logger' + +/** + * GET /api/workspaces/[id] - Get a single workspace + */ +export async function GET( + request: NextRequest, + { params }: { params: Promise<{ id: string }> } +) { + const auth = requireRole(request, 'viewer') + if ('error' in auth) return NextResponse.json({ error: auth.error }, { status: auth.status }) + + try { + const db = getDatabase() + const { id } = await params + const tenantId = auth.user.tenant_id ?? 1 + + const workspace = db.prepare( + 'SELECT * FROM workspaces WHERE id = ? AND tenant_id = ?' + ).get(Number(id), tenantId) + + if (!workspace) { + return NextResponse.json({ error: 'Workspace not found' }, { status: 404 }) + } + + // Include agent count + const stats = db.prepare( + 'SELECT COUNT(*) as agent_count FROM agents WHERE workspace_id = ?' + ).get(Number(id)) as { agent_count: number } + + return NextResponse.json({ + workspace: { ...(workspace as any), agent_count: stats.agent_count }, + }) + } catch (error) { + logger.error({ err: error }, 'GET /api/workspaces/[id] error') + return NextResponse.json({ error: 'Failed to fetch workspace' }, { status: 500 }) + } +} + +/** + * PUT /api/workspaces/[id] - Update workspace name + */ +export async function PUT( + request: NextRequest, + { params }: { params: Promise<{ id: string }> } +) { + const auth = requireRole(request, 'admin') + if ('error' in auth) return NextResponse.json({ error: auth.error }, { status: auth.status }) + + try { + const db = getDatabase() + const { id } = await params + const tenantId = auth.user.tenant_id ?? 1 + const body = await request.json() + const { name } = body + + if (!name || typeof name !== 'string' || name.trim().length === 0) { + return NextResponse.json({ error: 'Name is required' }, { status: 400 }) + } + + const existing = db.prepare( + 'SELECT * FROM workspaces WHERE id = ? AND tenant_id = ?' + ).get(Number(id), tenantId) as any + + if (!existing) { + return NextResponse.json({ error: 'Workspace not found' }, { status: 404 }) + } + + // Don't allow renaming the default workspace slug + const now = Math.floor(Date.now() / 1000) + db.prepare( + 'UPDATE workspaces SET name = ?, updated_at = ? WHERE id = ? AND tenant_id = ?' + ).run(name.trim(), now, Number(id), tenantId) + + logAuditEvent({ + action: 'workspace_updated', + actor: auth.user.username, + actor_id: auth.user.id, + target_type: 'workspace', + target_id: Number(id), + detail: { old_name: existing.name, new_name: name.trim() }, + }) + + const updated = db.prepare('SELECT * FROM workspaces WHERE id = ?').get(Number(id)) + return NextResponse.json({ workspace: updated }) + } catch (error) { + logger.error({ err: error }, 'PUT /api/workspaces/[id] error') + return NextResponse.json({ error: 'Failed to update workspace' }, { status: 500 }) + } +} + +/** + * DELETE /api/workspaces/[id] - Delete a workspace (moves agents to default workspace) + */ +export async function DELETE( + request: NextRequest, + { params }: { params: Promise<{ id: string }> } +) { + const auth = requireRole(request, 'admin') + if ('error' in auth) return NextResponse.json({ error: auth.error }, { status: auth.status }) + + try { + const db = getDatabase() + const { id } = await params + const tenantId = auth.user.tenant_id ?? 1 + const workspaceId = Number(id) + + const existing = db.prepare( + 'SELECT * FROM workspaces WHERE id = ? AND tenant_id = ?' + ).get(workspaceId, tenantId) as any + + if (!existing) { + return NextResponse.json({ error: 'Workspace not found' }, { status: 404 }) + } + + if (existing.slug === 'default') { + return NextResponse.json({ error: 'Cannot delete the default workspace' }, { status: 400 }) + } + + // Find default workspace to reassign agents + const defaultWs = db.prepare( + "SELECT id FROM workspaces WHERE slug = 'default' AND tenant_id = ? LIMIT 1" + ).get(tenantId) as { id: number } | undefined + + const fallbackId = defaultWs?.id ?? 1 + + db.transaction(() => { + // Reassign agents to default workspace + const moved = db.prepare( + 'UPDATE agents SET workspace_id = ?, updated_at = ? WHERE workspace_id = ?' + ).run(fallbackId, Math.floor(Date.now() / 1000), workspaceId) + + // Reassign users to default workspace + db.prepare( + 'UPDATE users SET workspace_id = ?, updated_at = ? WHERE workspace_id = ?' + ).run(fallbackId, Math.floor(Date.now() / 1000), workspaceId) + + // Reassign projects to default workspace + db.prepare( + 'UPDATE projects SET workspace_id = ?, updated_at = ? WHERE workspace_id = ?' + ).run(fallbackId, Math.floor(Date.now() / 1000), workspaceId) + + // Delete workspace + db.prepare('DELETE FROM workspaces WHERE id = ?').run(workspaceId) + + logAuditEvent({ + action: 'workspace_deleted', + actor: auth.user.username, + actor_id: auth.user.id, + target_type: 'workspace', + target_id: workspaceId, + detail: { + name: existing.name, + slug: existing.slug, + agents_moved: (moved as any).changes, + moved_to_workspace: fallbackId, + }, + }) + })() + + return NextResponse.json({ + success: true, + deleted: existing.name, + agents_moved_to: fallbackId, + }) + } catch (error) { + logger.error({ err: error }, 'DELETE /api/workspaces/[id] error') + return NextResponse.json({ error: 'Failed to delete workspace' }, { status: 500 }) + } +} diff --git a/src/app/api/workspaces/route.ts b/src/app/api/workspaces/route.ts new file mode 100644 index 0000000..23c2b2c --- /dev/null +++ b/src/app/api/workspaces/route.ts @@ -0,0 +1,78 @@ +import { NextRequest, NextResponse } from 'next/server' +import { requireRole } from '@/lib/auth' +import { getDatabase, logAuditEvent } from '@/lib/db' +import { listWorkspacesForTenant } from '@/lib/workspaces' +import { logger } from '@/lib/logger' + +export async function GET(request: NextRequest) { + const auth = requireRole(request, 'viewer') + if ('error' in auth) return NextResponse.json({ error: auth.error }, { status: auth.status }) + + try { + const db = getDatabase() + const tenantId = auth.user.tenant_id ?? 1 + const workspaces = listWorkspacesForTenant(db, tenantId) + return NextResponse.json({ + workspaces, + active_workspace_id: auth.user.workspace_id, + tenant_id: tenantId, + }) + } catch { + return NextResponse.json({ error: 'Failed to fetch workspaces' }, { status: 500 }) + } +} + +/** + * POST /api/workspaces - Create a new workspace + */ +export async function POST(request: NextRequest) { + const auth = requireRole(request, 'admin') + if ('error' in auth) return NextResponse.json({ error: auth.error }, { status: auth.status }) + + try { + const db = getDatabase() + const tenantId = auth.user.tenant_id ?? 1 + const body = await request.json() + const { name, slug } = body + + if (!name || typeof name !== 'string' || name.trim().length === 0) { + return NextResponse.json({ error: 'Name is required' }, { status: 400 }) + } + + const resolvedSlug = (slug || name) + .toLowerCase() + .replace(/[^a-z0-9]+/g, '-') + .replace(/^-|-$/g, '') + + if (!resolvedSlug) { + return NextResponse.json({ error: 'Invalid slug' }, { status: 400 }) + } + + // Check uniqueness + const existing = db.prepare('SELECT id FROM workspaces WHERE slug = ?').get(resolvedSlug) + if (existing) { + return NextResponse.json({ error: 'Workspace slug already exists' }, { status: 409 }) + } + + const now = Math.floor(Date.now() / 1000) + const result = db.prepare( + 'INSERT INTO workspaces (slug, name, tenant_id, created_at, updated_at) VALUES (?, ?, ?, ?, ?)' + ).run(resolvedSlug, name.trim(), tenantId, now, now) + + const workspace = db.prepare('SELECT * FROM workspaces WHERE id = ?').get(result.lastInsertRowid) + + logAuditEvent({ + action: 'workspace_created', + actor: auth.user.username, + actor_id: auth.user.id, + target_type: 'workspace', + target_id: Number(result.lastInsertRowid), + detail: { name: name.trim(), slug: resolvedSlug }, + }) + + return NextResponse.json({ workspace }, { status: 201 }) + } catch (error) { + logger.error({ err: error }, 'POST /api/workspaces error') + return NextResponse.json({ error: 'Failed to create workspace' }, { status: 500 }) + } +} diff --git a/src/app/apple-icon.png b/src/app/apple-icon.png new file mode 100644 index 0000000..2858e83 Binary files /dev/null and b/src/app/apple-icon.png differ diff --git a/src/app/globals.css b/src/app/globals.css index 2a32e9f..5f23a36 100644 --- a/src/app/globals.css +++ b/src/app/globals.css @@ -24,7 +24,7 @@ --border: 240 5.9% 90%; --input: 240 5.9% 90%; --ring: 240 5.9% 10%; - --radius: 0.625rem; + --radius: 0.375rem; /* Semantic status colors */ --success: 142 71% 45%; @@ -34,6 +34,13 @@ --info: 217 91% 60%; --info-foreground: 0 0% 98%; + /* Void accent colors (light fallbacks) */ + --void-cyan: 187 82% 40%; + --void-mint: 160 60% 40%; + --void-amber: 38 92% 42%; + --void-violet: 263 70% 55%; + --void-crimson: 0 72% 45%; + /* Surface hierarchy */ --surface-0: 0 0% 100%; --surface-1: 240 5% 96%; @@ -42,39 +49,47 @@ } .dark { - --background: 240 10% 3.9%; - --foreground: 0 0% 95%; - --card: 240 10% 5.5%; - --card-foreground: 0 0% 95%; - --popover: 240 10% 5.5%; - --popover-foreground: 0 0% 95%; - --primary: 210 100% 52%; - --primary-foreground: 0 0% 100%; - --secondary: 240 5% 12%; - --secondary-foreground: 0 0% 95%; - --muted: 240 5% 15%; - --muted-foreground: 240 5% 55%; - --accent: 240 5% 15%; - --accent-foreground: 0 0% 95%; - --destructive: 0 63% 31%; + /* Void palette */ + --background: 215 27% 4%; + --foreground: 210 20% 92%; + --card: 220 30% 8%; + --card-foreground: 210 20% 92%; + --popover: 220 30% 8%; + --popover-foreground: 210 20% 92%; + --primary: 187 82% 53%; + --primary-foreground: 220 30% 6%; + --secondary: 220 25% 11%; + --secondary-foreground: 210 20% 92%; + --muted: 220 20% 14%; + --muted-foreground: 220 15% 50%; + --accent: 220 20% 14%; + --accent-foreground: 210 20% 92%; + --destructive: 0 72% 51%; --destructive-foreground: 0 0% 98%; - --border: 240 4% 16%; - --input: 240 4% 16%; - --ring: 210 100% 52%; + --border: 220 20% 14%; + --input: 220 20% 14%; + --ring: 187 82% 53%; - /* Dark mode status */ - --success: 142 71% 45%; + /* Void accent colors */ + --void-cyan: 187 82% 53%; + --void-mint: 160 60% 52%; + --void-amber: 38 92% 50%; + --void-violet: 263 90% 66%; + --void-crimson: 0 72% 51%; + + /* Void status */ + --success: 160 60% 52%; --success-foreground: 0 0% 98%; --warning: 38 92% 50%; --warning-foreground: 0 0% 9%; - --info: 217 91% 60%; + --info: 187 82% 53%; --info-foreground: 0 0% 98%; - /* Dark surface hierarchy */ - --surface-0: 240 10% 3.9%; - --surface-1: 240 8% 6%; - --surface-2: 240 6% 9%; - --surface-3: 240 5% 13%; + /* Void surface hierarchy */ + --surface-0: 215 27% 4%; + --surface-1: 222 35% 7%; + --surface-2: 220 30% 10%; + --surface-3: 220 25% 14%; } } @@ -107,9 +122,23 @@ ::-webkit-scrollbar-thumb:hover { @apply bg-muted-foreground; } + + /* Modern button defaults — enhances ALL raw +
{/* Nav groups */} @@ -120,9 +130,10 @@ export function NavRail() { {/* Group header (expanded mode, only for groups with labels) */} {sidebarExpanded && group.label && ( - + )} {/* Group items */} @@ -185,18 +196,19 @@ export function NavRail() {