fix: security hardening from audit (closes #460)
- Hash session tokens (SHA-256) before storing in DB; migration for existing tokens - Enforce 12-char password minimum on self-service change (was 8, creation was 12) - Increase scrypt cost N=16384→65536 with progressive rehash on login - Add MC_PROXY_AUTH_TRUSTED_IPS to restrict proxy auth header spoofing - Enable HSTS by default in production (opt-out via MC_DISABLE_HSTS=1) - Restrict debug endpoint to allowlisted gateway API paths (SSRF prevention) - Default session cookie secure=true in production - Gate MC_DISABLE_RATE_LIMIT on NODE_ENV !== 'production' - Remove password value from insecure-default log warning - chmod 600 generated secrets file in Docker entrypoint
This commit is contained in:
parent
69e89a97a1
commit
afa8e9dacb
|
|
@ -20,6 +20,11 @@ generate_secret() {
|
|||
|
||||
SECRETS_FILE="/app/.data/.generated-secrets"
|
||||
|
||||
# Ensure secrets file has restrictive permissions if it exists
|
||||
if [ -f "$SECRETS_FILE" ]; then
|
||||
chmod 600 "$SECRETS_FILE"
|
||||
fi
|
||||
|
||||
# Load previously generated secrets if they exist
|
||||
if [ -f "$SECRETS_FILE" ]; then
|
||||
printf '[entrypoint] Loading persisted secrets from .data\n'
|
||||
|
|
|
|||
|
|
@ -21,7 +21,7 @@ const nextConfig = {
|
|||
{ key: 'X-Content-Type-Options', value: 'nosniff' },
|
||||
{ key: 'Referrer-Policy', value: 'strict-origin-when-cross-origin' },
|
||||
{ key: 'Permissions-Policy', value: 'camera=(), microphone=(), geolocation=()' },
|
||||
...(process.env.MC_ENABLE_HSTS === '1' ? [
|
||||
...(process.env.NODE_ENV === 'production' && process.env.MC_DISABLE_HSTS !== '1' || process.env.MC_ENABLE_HSTS === '1' ? [
|
||||
{ key: 'Strict-Transport-Security', value: 'max-age=63072000; includeSubDomains; preload' }
|
||||
] : []),
|
||||
],
|
||||
|
|
|
|||
|
|
@ -56,8 +56,8 @@ export async function PATCH(request: NextRequest) {
|
|||
return NextResponse.json({ error: 'Current password is required' }, { status: 400 })
|
||||
}
|
||||
|
||||
if (new_password.length < 8) {
|
||||
return NextResponse.json({ error: 'New password must be at least 8 characters' }, { status: 400 })
|
||||
if (new_password.length < 12) {
|
||||
return NextResponse.json({ error: 'New password must be at least 12 characters' }, { status: 400 })
|
||||
}
|
||||
|
||||
// Verify current password by fetching stored hash
|
||||
|
|
|
|||
|
|
@ -118,6 +118,13 @@ export async function POST(request: Request) {
|
|||
return NextResponse.json({ error: 'path must start with /api/' }, { status: 400 })
|
||||
}
|
||||
|
||||
// Restrict to known safe gateway API paths to prevent SSRF
|
||||
const ALLOWED_GATEWAY_PATHS = ['/api/status', '/api/health', '/api/models', '/api/heartbeat', '/api/agents', '/api/config']
|
||||
const normalizedPath = path.split('?')[0]
|
||||
if (!ALLOWED_GATEWAY_PATHS.some(allowed => normalizedPath === allowed || normalizedPath.startsWith(allowed + '/'))) {
|
||||
return NextResponse.json({ error: 'Path not in allowed gateway API paths' }, { status: 403 })
|
||||
}
|
||||
|
||||
try {
|
||||
const res = await gatewayFetch(path, {
|
||||
method,
|
||||
|
|
|
|||
|
|
@ -1,9 +1,14 @@
|
|||
import { createHash, randomBytes, timingSafeEqual } from 'crypto'
|
||||
import { getDatabase } from './db'
|
||||
import { hashPassword, verifyPassword } from './password'
|
||||
import { hashPassword, verifyPassword, verifyPasswordWithRehashCheck } from './password'
|
||||
import { logSecurityEvent } from './security-events'
|
||||
import { parseMcSessionCookieHeader } from './session-cookie'
|
||||
|
||||
// Trusted IPs for proxy auth header (comma-separated)
|
||||
const PROXY_AUTH_TRUSTED_IPS = new Set(
|
||||
(process.env.MC_PROXY_AUTH_TRUSTED_IPS || '').split(',').map(s => s.trim()).filter(Boolean)
|
||||
)
|
||||
|
||||
// Plugin hook: extensions can register a custom API key resolver without modifying this file.
|
||||
type AuthResolverHook = (apiKey: string, agentName: string | null) => User | null
|
||||
let _authResolverHook: AuthResolverHook | null = null
|
||||
|
|
@ -141,10 +146,11 @@ export function createSession(
|
|||
const resolvedWorkspaceId = workspaceId ?? ((db.prepare('SELECT workspace_id FROM users WHERE id = ?').get(userId) as { workspace_id?: number } | undefined)?.workspace_id || getDefaultWorkspaceContext().workspaceId)
|
||||
const resolvedTenantId = resolveTenantForWorkspace(resolvedWorkspaceId)
|
||||
|
||||
const tokenHash = hashSessionToken(token)
|
||||
db.prepare(`
|
||||
INSERT INTO user_sessions (token, user_id, expires_at, ip_address, user_agent, workspace_id, tenant_id)
|
||||
VALUES (?, ?, ?, ?, ?, ?, ?)
|
||||
`).run(token, userId, expiresAt, ipAddress || null, userAgent || null, resolvedWorkspaceId, resolvedTenantId)
|
||||
`).run(tokenHash, userId, expiresAt, ipAddress || null, userAgent || null, resolvedWorkspaceId, resolvedTenantId)
|
||||
|
||||
// Update user's last login
|
||||
db.prepare('UPDATE users SET last_login_at = ?, updated_at = ? WHERE id = ?').run(now, now, userId)
|
||||
|
|
@ -159,6 +165,7 @@ export function validateSession(token: string): (User & { sessionId: number }) |
|
|||
if (!token) return null
|
||||
const db = getDatabase()
|
||||
const now = Math.floor(Date.now() / 1000)
|
||||
const tokenHash = hashSessionToken(token)
|
||||
|
||||
const row = db.prepare(`
|
||||
SELECT u.id, u.username, u.display_name, u.role, u.provider, u.email, u.avatar_url, u.is_approved,
|
||||
|
|
@ -170,7 +177,7 @@ export function validateSession(token: string): (User & { sessionId: number }) |
|
|||
JOIN users u ON u.id = s.user_id
|
||||
LEFT JOIN workspaces w ON w.id = COALESCE(s.workspace_id, u.workspace_id, 1)
|
||||
WHERE s.token = ? AND s.expires_at > ?
|
||||
`).get(token, now) as SessionQueryRow | undefined
|
||||
`).get(tokenHash, now) as SessionQueryRow | undefined
|
||||
|
||||
if (!row) return null
|
||||
|
||||
|
|
@ -194,7 +201,8 @@ export function validateSession(token: string): (User & { sessionId: number }) |
|
|||
|
||||
export function destroySession(token: string): void {
|
||||
const db = getDatabase()
|
||||
db.prepare('DELETE FROM user_sessions WHERE token = ?').run(token)
|
||||
const tokenHash = hashSessionToken(token)
|
||||
db.prepare('DELETE FROM user_sessions WHERE token = ?').run(tokenHash)
|
||||
}
|
||||
|
||||
export function destroyAllUserSessions(userId: number): void {
|
||||
|
|
@ -227,10 +235,18 @@ export function authenticateUser(username: string, password: string): User | nul
|
|||
try { logSecurityEvent({ event_type: 'auth_failure', severity: 'warning', source: 'auth', detail: JSON.stringify({ username, reason: 'not_approved' }), workspace_id: 1, tenant_id: 1 }) } catch {}
|
||||
return null
|
||||
}
|
||||
if (!verifyPassword(password, row.password_hash)) {
|
||||
const { valid, needsRehash } = verifyPasswordWithRehashCheck(password, row.password_hash)
|
||||
if (!valid) {
|
||||
try { logSecurityEvent({ event_type: 'auth_failure', severity: 'warning', source: 'auth', detail: JSON.stringify({ username, reason: 'invalid_password' }), workspace_id: 1, tenant_id: 1 }) } catch {}
|
||||
return null
|
||||
}
|
||||
// Progressive rehash: upgrade hash to current scrypt cost on successful login
|
||||
if (needsRehash) {
|
||||
try {
|
||||
db.prepare('UPDATE users SET password_hash = ?, updated_at = ? WHERE id = ?')
|
||||
.run(hashPassword(password), Math.floor(Date.now() / 1000), row.id)
|
||||
} catch { /* non-fatal — will rehash on next login */ }
|
||||
}
|
||||
return {
|
||||
id: row.id,
|
||||
username: row.username,
|
||||
|
|
@ -401,12 +417,32 @@ export function getUserFromRequest(request: Request): User | null {
|
|||
// When the gateway has already authenticated the user and injects their username
|
||||
// as a trusted header (e.g. X-Auth-Username from Envoy OIDC claimToHeaders),
|
||||
// skip the local login form entirely.
|
||||
// SECURITY: MC_PROXY_AUTH_TRUSTED_IPS must be set to restrict which IPs can send
|
||||
// the proxy auth header. Without it, any client reaching MC directly could spoof
|
||||
// the header and impersonate any user.
|
||||
const proxyAuthHeader = (process.env.MC_PROXY_AUTH_HEADER || '').trim()
|
||||
if (proxyAuthHeader) {
|
||||
const proxyUsername = (request.headers.get(proxyAuthHeader) || '').trim()
|
||||
if (proxyUsername) {
|
||||
const user = resolveOrProvisionProxyUser(proxyUsername)
|
||||
if (user) return { ...user, agent_name: agentName }
|
||||
const trustedIps = PROXY_AUTH_TRUSTED_IPS
|
||||
if (trustedIps.size > 0) {
|
||||
const clientIp = request.headers.get('x-real-ip')?.trim()
|
||||
|| request.headers.get('x-forwarded-for')?.split(',')[0]?.trim()
|
||||
|| ''
|
||||
if (!trustedIps.has(clientIp)) {
|
||||
// Request not from trusted proxy — ignore the proxy auth header
|
||||
} else {
|
||||
const proxyUsername = (request.headers.get(proxyAuthHeader) || '').trim()
|
||||
if (proxyUsername) {
|
||||
const user = resolveOrProvisionProxyUser(proxyUsername)
|
||||
if (user) return { ...user, agent_name: agentName }
|
||||
}
|
||||
}
|
||||
} else {
|
||||
// No trusted IPs configured — log warning and still allow (backward compat)
|
||||
const proxyUsername = (request.headers.get(proxyAuthHeader) || '').trim()
|
||||
if (proxyUsername) {
|
||||
const user = resolveOrProvisionProxyUser(proxyUsername)
|
||||
if (user) return { ...user, agent_name: agentName }
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -536,6 +572,10 @@ function hashApiKey(rawKey: string): string {
|
|||
return createHash('sha256').update(rawKey).digest('hex')
|
||||
}
|
||||
|
||||
function hashSessionToken(rawToken: string): string {
|
||||
return createHash('sha256').update(rawToken).digest('hex')
|
||||
}
|
||||
|
||||
function parseAgentScopes(raw: string): Set<string> {
|
||||
try {
|
||||
const parsed = JSON.parse(raw)
|
||||
|
|
|
|||
|
|
@ -141,7 +141,7 @@ function seedAdminUserFromEnv(dbConn: Database.Database): void {
|
|||
|
||||
if (INSECURE_PASSWORDS.has(password)) {
|
||||
logger.warn(
|
||||
`AUTH_PASS matches a known insecure default ("${password}"). ` +
|
||||
'AUTH_PASS matches a known insecure default. ' +
|
||||
'Please set a strong, unique password in your .env file. ' +
|
||||
'Skipping admin user seeding until credentials are changed.'
|
||||
)
|
||||
|
|
|
|||
|
|
@ -1,3 +1,4 @@
|
|||
import { createHash } from 'crypto'
|
||||
import { readFileSync } from 'fs'
|
||||
import { join } from 'path'
|
||||
import type Database from 'better-sqlite3'
|
||||
|
|
@ -1268,6 +1269,21 @@ const migrations: Migration[] = [
|
|||
up(db: Database.Database) {
|
||||
db.exec(`ALTER TABLE agents ADD COLUMN hidden INTEGER NOT NULL DEFAULT 0`)
|
||||
}
|
||||
},
|
||||
{
|
||||
id: '043_hash_session_tokens',
|
||||
up(db: Database.Database) {
|
||||
// Migrate existing plaintext session tokens to SHA-256 hashes.
|
||||
// After this migration, session tokens are stored as hashes — raw tokens
|
||||
// are only returned to the client on creation. Existing sessions will be
|
||||
// invalidated (users need to re-login).
|
||||
const rows = db.prepare('SELECT id, token FROM user_sessions').all() as Array<{ id: number; token: string }>
|
||||
const update = db.prepare('UPDATE user_sessions SET token = ? WHERE id = ?')
|
||||
for (const row of rows) {
|
||||
const hashed = createHash('sha256').update(row.token).digest('hex')
|
||||
update.run(hashed, row.id)
|
||||
}
|
||||
}
|
||||
}
|
||||
]
|
||||
|
||||
|
|
|
|||
|
|
@ -3,20 +3,43 @@ import { randomBytes, scryptSync, timingSafeEqual } from 'crypto'
|
|||
// Password hashing using Node.js built-in scrypt
|
||||
const SALT_LENGTH = 16
|
||||
const KEY_LENGTH = 32
|
||||
const SCRYPT_COST = 16384
|
||||
const SCRYPT_COST = 65536
|
||||
const SCRYPT_MAXMEM = 128 * SCRYPT_COST * 8 * 2 // ~128MB headroom for N=65536
|
||||
|
||||
// Previous cost factor — used to verify passwords hashed before the upgrade
|
||||
const LEGACY_SCRYPT_COST = 16384
|
||||
|
||||
export function hashPassword(password: string): string {
|
||||
const salt = randomBytes(SALT_LENGTH).toString('hex')
|
||||
const hash = scryptSync(password, salt, KEY_LENGTH, { N: SCRYPT_COST }).toString('hex')
|
||||
const hash = scryptSync(password, salt, KEY_LENGTH, { N: SCRYPT_COST, maxmem: SCRYPT_MAXMEM }).toString('hex')
|
||||
return `${salt}:${hash}`
|
||||
}
|
||||
|
||||
export function verifyPassword(password: string, stored: string): boolean {
|
||||
/**
|
||||
* Verify a password against a stored hash.
|
||||
* Tries current cost first, then falls back to legacy cost for pre-upgrade hashes.
|
||||
* Returns { valid, needsRehash } so callers can progressively upgrade hashes.
|
||||
*/
|
||||
export function verifyPasswordWithRehashCheck(password: string, stored: string): { valid: boolean; needsRehash: boolean } {
|
||||
const [salt, hash] = stored.split(':')
|
||||
if (!salt || !hash) return false
|
||||
const derived = scryptSync(password, salt, KEY_LENGTH, { N: SCRYPT_COST })
|
||||
if (!salt || !hash) return { valid: false, needsRehash: false }
|
||||
const storedBuf = Buffer.from(hash, 'hex')
|
||||
if (derived.length !== storedBuf.length) return false
|
||||
return timingSafeEqual(derived, storedBuf)
|
||||
|
||||
// Try current cost first
|
||||
const derived = scryptSync(password, salt, KEY_LENGTH, { N: SCRYPT_COST, maxmem: SCRYPT_MAXMEM })
|
||||
if (derived.length === storedBuf.length && timingSafeEqual(derived, storedBuf)) {
|
||||
return { valid: true, needsRehash: false }
|
||||
}
|
||||
|
||||
// Fall back to legacy cost for passwords hashed before the upgrade
|
||||
const legacyDerived = scryptSync(password, salt, KEY_LENGTH, { N: LEGACY_SCRYPT_COST })
|
||||
if (legacyDerived.length !== storedBuf.length) return { valid: false, needsRehash: false }
|
||||
if (timingSafeEqual(legacyDerived, storedBuf)) {
|
||||
return { valid: true, needsRehash: true }
|
||||
}
|
||||
return { valid: false, needsRehash: false }
|
||||
}
|
||||
|
||||
export function verifyPassword(password: string, stored: string): boolean {
|
||||
return verifyPasswordWithRehashCheck(password, stored).valid
|
||||
}
|
||||
|
|
|
|||
|
|
@ -71,8 +71,8 @@ export function createRateLimiter(options: RateLimiterOptions) {
|
|||
if (cleanupInterval.unref) cleanupInterval.unref()
|
||||
|
||||
return function checkRateLimit(request: Request): NextResponse | null {
|
||||
// Allow disabling non-critical rate limiting for E2E tests
|
||||
if (process.env.MC_DISABLE_RATE_LIMIT === '1' && !options.critical) return null
|
||||
// Allow disabling non-critical rate limiting for E2E tests (never in production)
|
||||
if (process.env.MC_DISABLE_RATE_LIMIT === '1' && !options.critical && process.env.NODE_ENV !== 'production') return null
|
||||
const ip = extractClientIp(request)
|
||||
const now = Date.now()
|
||||
const entry = store.get(ip)
|
||||
|
|
@ -143,7 +143,7 @@ export function createAgentRateLimiter(options: RateLimiterOptions) {
|
|||
if (cleanupInterval.unref) cleanupInterval.unref()
|
||||
|
||||
return function checkAgentRateLimit(request: Request): NextResponse | null {
|
||||
if (process.env.MC_DISABLE_RATE_LIMIT === '1' && !options.critical) return null
|
||||
if (process.env.MC_DISABLE_RATE_LIMIT === '1' && !options.critical && process.env.NODE_ENV !== 'production') return null
|
||||
|
||||
const agentName = (request.headers.get('x-agent-name') || '').trim()
|
||||
const key = agentName || `ip:${extractClientIp(request)}`
|
||||
|
|
|
|||
|
|
@ -35,7 +35,8 @@ function envFlag(name: string): boolean | undefined {
|
|||
|
||||
export function getMcSessionCookieOptions(input: { maxAgeSeconds: number; isSecureRequest?: boolean }): Partial<ResponseCookie> {
|
||||
const secureEnv = envFlag('MC_COOKIE_SECURE')
|
||||
const secure = secureEnv ?? input.isSecureRequest ?? false
|
||||
const isProduction = process.env.NODE_ENV === 'production'
|
||||
const secure = secureEnv ?? input.isSecureRequest ?? isProduction
|
||||
|
||||
return {
|
||||
httpOnly: true,
|
||||
|
|
|
|||
Loading…
Reference in New Issue