diff --git a/.env.example b/.env.example index 8ec9d57e96..84e20d9605 100644 --- a/.env.example +++ b/.env.example @@ -2,3 +2,6 @@ DATABASE_URL=postgres://paperclip:paperclip@localhost:5432/paperclip PORT=3100 SERVE_UI=false BETTER_AUTH_SECRET=paperclip-dev-secret + +# Discord webhook for daily merge digest (scripts/discord-daily-digest.sh) +# DISCORD_WEBHOOK_URL=https://discord.com/api/webhooks/... diff --git a/.gitignore b/.gitignore index cdeb446edc..8ed3dd93ad 100644 --- a/.gitignore +++ b/.gitignore @@ -35,6 +35,7 @@ server/src/**/*.d.ts server/src/**/*.d.ts.map tmp/ feedback-export-* +diagnostics/ # Editor / tool temp files *.tmp diff --git a/doc/DEVELOPING.md b/doc/DEVELOPING.md index 7958ffd19c..225ae9ffea 100644 --- a/doc/DEVELOPING.md +++ b/doc/DEVELOPING.md @@ -222,6 +222,8 @@ That repo-local env also sets: - `PAPERCLIP_WORKTREE_COLOR=` The server/UI use those values for worktree-specific branding such as the top banner and dynamically colored favicon. +Authenticated worktree servers also use the `PAPERCLIP_INSTANCE_ID` value to scope Better Auth cookie names. +Browser cookies are shared by host rather than port, so this prevents logging into one `127.0.0.1:` worktree from replacing another worktree server's session cookie. Print shell exports explicitly when needed: diff --git a/packages/db/src/backup-lib.test.ts b/packages/db/src/backup-lib.test.ts index 2ea9b07001..6497cbe6af 100644 --- a/packages/db/src/backup-lib.test.ts +++ b/packages/db/src/backup-lib.test.ts @@ -127,6 +127,7 @@ describeEmbeddedPostgres("runDatabaseBackup", () => { backupDir, retention: { dailyDays: 7, weeklyWeeks: 4, monthlyMonths: 1 }, filenamePrefix: "paperclip-test", + backupEngine: "javascript", }); expect(result.backupFile).toMatch(/paperclip-test-.*\.sql\.gz$/); @@ -148,14 +149,17 @@ describeEmbeddedPostgres("runDatabaseBackup", () => { title: string; payload: string; state: string; - metadata: { index: number; even: boolean }; + metadata: { index: number; even: boolean } | string; }[]>(` SELECT "title", "payload", "state"::text AS "state", "metadata" FROM "public"."backup_test_records" WHERE "title" IN ('row-0', 'row-159') ORDER BY "title" `); - expect(sampleRows).toEqual([ + expect(sampleRows.map((row) => ({ + ...row, + metadata: typeof row.metadata === "string" ? JSON.parse(row.metadata) : row.metadata, + }))).toEqual([ { title: "row-0", payload, diff --git a/packages/db/src/backup-lib.ts b/packages/db/src/backup-lib.ts index e1e88724f8..2662b6dc99 100644 --- a/packages/db/src/backup-lib.ts +++ b/packages/db/src/backup-lib.ts @@ -1,6 +1,8 @@ import { createReadStream, createWriteStream, existsSync, mkdirSync, readdirSync, statSync, unlinkSync } from "node:fs"; import { basename, resolve } from "node:path"; import { createInterface } from "node:readline"; +import { spawn } from "node:child_process"; +import { open as openFile } from "node:fs/promises"; import { pipeline } from "node:stream/promises"; import { createGunzip, createGzip } from "node:zlib"; import postgres from "postgres"; @@ -20,6 +22,7 @@ export type RunDatabaseBackupOptions = { includeMigrationJournal?: boolean; excludeTables?: string[]; nullifyColumns?: Record; + backupEngine?: "auto" | "pg_dump" | "javascript"; }; export type RunDatabaseBackupResult = { @@ -61,6 +64,9 @@ type ExtensionDefinition = { const DRIZZLE_SCHEMA = "drizzle"; const DRIZZLE_MIGRATIONS_TABLE = "__drizzle_migrations"; const DEFAULT_BACKUP_WRITE_BUFFER_BYTES = 1024 * 1024; +const BACKUP_DATA_CURSOR_ROWS = 100; +const BACKUP_CLI_STDERR_BYTES = 64 * 1024; +const BACKUP_BREAKPOINT_DETECT_BYTES = 64 * 1024; const STATEMENT_BREAKPOINT = "-- paperclip statement breakpoint 69f6f3f1-42fd-46a6-bf17-d1d85f8f3900"; @@ -223,6 +229,134 @@ function tableKey(schemaName: string, tableName: string): string { return `${schemaName}.${tableName}`; } +function hasBackupTransforms(opts: RunDatabaseBackupOptions): boolean { + return opts.includeMigrationJournal === true || + (opts.excludeTables?.length ?? 0) > 0 || + Object.keys(opts.nullifyColumns ?? {}).length > 0; +} + +function formatSqlValue(rawValue: unknown, columnName: string | undefined, nullifiedColumns: Set): string { + const val = columnName && nullifiedColumns.has(columnName) ? null : rawValue; + if (val === null || val === undefined) return "NULL"; + if (typeof val === "boolean") return val ? "true" : "false"; + if (typeof val === "number") return String(val); + if (val instanceof Date) return formatSqlLiteral(val.toISOString()); + if (typeof val === "object") return formatSqlLiteral(JSON.stringify(val)); + return formatSqlLiteral(String(val)); +} + +function appendCapturedStderr(previous: string, chunk: Buffer | string): string { + const next = previous + (Buffer.isBuffer(chunk) ? chunk.toString("utf8") : chunk); + if (Buffer.byteLength(next, "utf8") <= BACKUP_CLI_STDERR_BYTES) return next; + return Buffer.from(next, "utf8").subarray(-BACKUP_CLI_STDERR_BYTES).toString("utf8"); +} + +async function waitForChildExit(child: ReturnType, label: string): Promise { + let stderr = ""; + child.stderr?.on("data", (chunk) => { + stderr = appendCapturedStderr(stderr, chunk); + }); + + const result = await new Promise<{ code: number | null; signal: NodeJS.Signals | null }>((resolve, reject) => { + child.once("error", reject); + child.once("exit", (code, signal) => resolve({ code, signal })); + }); + + if (result.signal) { + throw new Error(`${label} exited via ${result.signal}${stderr.trim() ? `: ${stderr.trim()}` : ""}`); + } + if (result.code !== 0) { + throw new Error(`${label} failed with exit code ${result.code ?? "unknown"}${stderr.trim() ? `: ${stderr.trim()}` : ""}`); + } +} + +async function runPgDumpBackup(opts: { + connectionString: string; + backupFile: string; + connectTimeout: number; +}): Promise { + const pgDumpBin = process.env.PAPERCLIP_PG_DUMP_PATH || "pg_dump"; + const child = spawn( + pgDumpBin, + [ + `--dbname=${opts.connectionString}`, + "--format=plain", + "--clean", + "--if-exists", + "--no-owner", + "--no-privileges", + "--schema=public", + ], + { + stdio: ["ignore", "pipe", "pipe"], + env: { + ...process.env, + PGCONNECT_TIMEOUT: String(opts.connectTimeout), + }, + }, + ); + + if (!child.stdout) { + throw new Error("pg_dump did not expose stdout"); + } + + await Promise.all([ + pipeline(child.stdout, createGzip(), createWriteStream(opts.backupFile)), + waitForChildExit(child, pgDumpBin), + ]); +} + +async function restoreWithPsql(opts: RunDatabaseRestoreOptions, connectTimeout: number): Promise { + const psqlBin = process.env.PAPERCLIP_PSQL_PATH || "psql"; + const child = spawn( + psqlBin, + [ + `--dbname=${opts.connectionString}`, + "--set=ON_ERROR_STOP=1", + "--quiet", + "--no-psqlrc", + ], + { + stdio: ["pipe", "ignore", "pipe"], + env: { + ...process.env, + PGCONNECT_TIMEOUT: String(connectTimeout), + }, + }, + ); + + if (!child.stdin) { + throw new Error("psql did not expose stdin"); + } + + const input = opts.backupFile.endsWith(".gz") + ? createReadStream(opts.backupFile).pipe(createGunzip()) + : createReadStream(opts.backupFile); + + await Promise.all([ + pipeline(input, child.stdin), + waitForChildExit(child, psqlBin), + ]); +} + +async function hasStatementBreakpoints(backupFile: string): Promise { + const raw = createReadStream(backupFile); + const stream = backupFile.endsWith(".gz") ? raw.pipe(createGunzip()) : raw; + let text = ""; + + try { + for await (const chunk of stream) { + text += Buffer.isBuffer(chunk) ? chunk.toString("utf8") : String(chunk); + if (text.includes(STATEMENT_BREAKPOINT)) return true; + if (Buffer.byteLength(text, "utf8") >= BACKUP_BREAKPOINT_DETECT_BYTES) return false; + } + return text.includes(STATEMENT_BREAKPOINT); + } finally { + stream.destroy(); + raw.destroy(); + } +} + async function* readRestoreStatements(backupFile: string): AsyncGenerator { const raw = createReadStream(backupFile); const stream = backupFile.endsWith(".gz") ? raw.pipe(createGunzip()) : raw; @@ -263,41 +397,21 @@ async function* readRestoreStatements(backupFile: string): AsyncGenerator { - streamError = error; - }); - - const writeChunk = async (chunk: string): Promise => { - if (streamError) throw streamError; - const canContinue = stream.write(chunk); - if (!canContinue) { - await new Promise((resolve, reject) => { - const handleDrain = () => { - cleanup(); - resolve(); - }; - const handleError = (error: Error) => { - cleanup(); - reject(error); - }; - const cleanup = () => { - stream.off("drain", handleDrain); - stream.off("error", handleError); - }; - stream.once("drain", handleDrain); - stream.once("error", handleError); - }); + const writeChunk = async (chunk: string | Buffer): Promise => { + const file = await filePromise; + if (typeof chunk === "string") { + await file.write(chunk, null, "utf8"); + } else { + await file.write(chunk); } - if (streamError) throw streamError; }; const flushBufferedLines = () => { @@ -316,37 +430,43 @@ export function createBufferedTextFileWriter(filePath: string, maxBufferedBytes if (closed) { throw new Error(`Cannot write to closed backup file: ${filePath}`); } - if (streamError) throw streamError; bufferedLines.push(line); bufferedBytes += Buffer.byteLength(line, "utf8") + 1; if (bufferedBytes >= flushThreshold) { flushBufferedLines(); } }, + async drain() { + if (closed) { + throw new Error(`Cannot drain closed backup file: ${filePath}`); + } + flushBufferedLines(); + await pendingWrite; + }, + async writeRaw(chunk: string | Buffer) { + if (closed) { + throw new Error(`Cannot write to closed backup file: ${filePath}`); + } + flushBufferedLines(); + firstChunk = false; + pendingWrite = pendingWrite.then(() => writeChunk(chunk)); + await pendingWrite; + }, async close() { if (closed) return; closed = true; flushBufferedLines(); await pendingWrite; - await new Promise((resolve, reject) => { - if (streamError) { - reject(streamError); - return; - } - stream.end((error?: Error | null) => { - if (error) reject(error); - else resolve(); - }); - }); - if (streamError) throw streamError; + const file = await filePromise; + await file.close(); }, async abort() { if (closed) return; closed = true; bufferedLines = []; bufferedBytes = 0; - stream.destroy(); await pendingWrite.catch(() => {}); + await filePromise.then((file) => file.close()).catch(() => {}); if (existsSync(filePath)) { try { unlinkSync(filePath); @@ -362,16 +482,53 @@ export async function runDatabaseBackup(opts: RunDatabaseBackupOptions): Promise const filenamePrefix = opts.filenamePrefix ?? "paperclip"; const retention = opts.retention; const connectTimeout = Math.max(1, Math.trunc(opts.connectTimeoutSeconds ?? 5)); + const backupEngine = opts.backupEngine ?? "auto"; + const canUsePgDump = !hasBackupTransforms(opts); const includeMigrationJournal = opts.includeMigrationJournal === true; const excludedTableNames = normalizeTableNameSet(opts.excludeTables); const nullifiedColumnsByTable = normalizeNullifyColumnMap(opts.nullifyColumns); - const sql = postgres(opts.connectionString, { max: 1, connect_timeout: connectTimeout }); + let sql = postgres(opts.connectionString, { max: 1, connect_timeout: connectTimeout }); + let sqlClosed = false; + const closeSql = async () => { + if (sqlClosed) return; + sqlClosed = true; + await sql.end(); + }; mkdirSync(opts.backupDir, { recursive: true }); const sqlFile = resolve(opts.backupDir, `${filenamePrefix}-${timestamp()}.sql`); const backupFile = `${sqlFile}.gz`; const writer = createBufferedTextFileWriter(sqlFile); try { + if (backupEngine === "pg_dump" || (backupEngine === "auto" && canUsePgDump)) { + await sql`SELECT 1`; + try { + await closeSql(); + await runPgDumpBackup({ + connectionString: opts.connectionString, + backupFile, + connectTimeout, + }); + await writer.abort(); + const sizeBytes = statSync(backupFile).size; + const prunedCount = pruneOldBackups(opts.backupDir, retention, filenamePrefix); + return { + backupFile, + sizeBytes, + prunedCount, + }; + } catch (error) { + if (existsSync(backupFile)) { + try { unlinkSync(backupFile); } catch { /* ignore */ } + } + if (backupEngine === "pg_dump") { + throw error; + } + sql = postgres(opts.connectionString, { max: 1, connect_timeout: connectTimeout }); + sqlClosed = false; + } + } + await sql`SELECT 1`; const emit = (line: string) => writer.emit(line); @@ -703,20 +860,39 @@ export async function runDatabaseBackup(opts: RunDatabaseBackupOptions): Promise emit(`-- Data for: ${schema_name}.${tablename} (${count[0]!.n} rows)`); - const rows = await sql.unsafe(`SELECT * FROM ${qualifiedTableName}`).values(); const nullifiedColumns = nullifiedColumnsByTable.get(tablename) ?? new Set(); - for (const row of rows) { - const values = row.map((rawValue: unknown, index) => { - const columnName = cols[index]?.column_name; - const val = columnName && nullifiedColumns.has(columnName) ? null : rawValue; - if (val === null || val === undefined) return "NULL"; - if (typeof val === "boolean") return val ? "true" : "false"; - if (typeof val === "number") return String(val); - if (val instanceof Date) return formatSqlLiteral(val.toISOString()); - if (typeof val === "object") return formatSqlLiteral(JSON.stringify(val)); - return formatSqlLiteral(String(val)); - }); - emitStatement(`INSERT INTO ${qualifiedTableName} (${colNames}) VALUES (${values.join(", ")});`); + if (backupEngine !== "javascript" && nullifiedColumns.size === 0) { + emit(`COPY ${qualifiedTableName} (${colNames}) FROM stdin;`); + await writer.writeRaw("\n"); + const copySql = postgres(opts.connectionString, { max: 1, connect_timeout: connectTimeout }); + try { + const copyStream = await copySql + .unsafe(`COPY ${qualifiedTableName} (${colNames}) TO STDOUT`) + .readable(); + for await (const chunk of copyStream) { + await writer.writeRaw(Buffer.isBuffer(chunk) ? chunk : Buffer.from(String(chunk))); + } + } finally { + await copySql.end(); + } + await writer.writeRaw("\\.\n"); + emitStatementBoundary(); + emit(""); + continue; + } + + const rowCursor = sql + .unsafe(`SELECT * FROM ${qualifiedTableName}`) + .values() + .cursor(BACKUP_DATA_CURSOR_ROWS) as AsyncIterable; + for await (const rows of rowCursor) { + for (const row of rows) { + const values = row.map((rawValue, index) => + formatSqlValue(rawValue, cols[index]?.column_name, nullifiedColumns), + ); + emitStatement(`INSERT INTO ${qualifiedTableName} (${colNames}) VALUES (${values.join(", ")});`); + } + await writer.drain(); } emit(""); } @@ -768,12 +944,23 @@ export async function runDatabaseBackup(opts: RunDatabaseBackupOptions): Promise } throw error; } finally { - await sql.end(); + await closeSql(); } } export async function runDatabaseRestore(opts: RunDatabaseRestoreOptions): Promise { const connectTimeout = Math.max(1, Math.trunc(opts.connectTimeoutSeconds ?? 5)); + try { + await restoreWithPsql(opts, connectTimeout); + return; + } catch (error) { + if (!(await hasStatementBreakpoints(opts.backupFile))) { + throw new Error( + `Failed to restore ${basename(opts.backupFile)} with psql: ${sanitizeRestoreErrorMessage(error)}`, + ); + } + } + const sql = postgres(opts.connectionString, { max: 1, connect_timeout: connectTimeout }); try { diff --git a/scripts/dev-runner-paths.mjs b/scripts/dev-runner-paths.mjs index efea8f51c4..99114b6bb2 100644 --- a/scripts/dev-runner-paths.mjs +++ b/scripts/dev-runner-paths.mjs @@ -14,6 +14,8 @@ const ignoredTestConfigBasenames = new Set([ "vitest.config.ts", ]); +const nodeDiagnosticReportPattern = /^report\.\d{8}\.\d{6}\.\d+\.\d+\.\d+\.json$/i; + export function shouldTrackDevServerPath(relativePath) { const normalizedPath = String(relativePath).replaceAll("\\", "/").replace(/^\.\/+/, ""); if (normalizedPath.length === 0) return false; @@ -21,6 +23,9 @@ export function shouldTrackDevServerPath(relativePath) { const segments = normalizedPath.split("/"); const basename = segments.at(-1) ?? normalizedPath; + if (nodeDiagnosticReportPattern.test(basename)) { + return false; + } if (segments.includes(".paperclip")) { return false; } diff --git a/scripts/dev-runner.mjs b/scripts/dev-runner.mjs index 4f4f7c907f..9063158ae7 100644 --- a/scripts/dev-runner.mjs +++ b/scripts/dev-runner.mjs @@ -1,5 +1,6 @@ #!/usr/bin/env node import { spawn } from "node:child_process"; +import { randomUUID } from "node:crypto"; import { existsSync, mkdirSync, readdirSync, rmSync, statSync, writeFileSync } from "node:fs"; import path from "node:path"; import { createInterface } from "node:readline/promises"; @@ -16,6 +17,8 @@ const gracefulShutdownTimeoutMs = 10_000; const changedPathSampleLimit = 5; const repoRoot = path.resolve(path.dirname(fileURLToPath(import.meta.url)), ".."); const devServerStatusFilePath = path.join(repoRoot, ".paperclip", "dev-server-status.json"); +const devServerStatusToken = mode === "dev" ? randomUUID() : null; +const devServerStatusTokenHeader = "x-paperclip-dev-server-status-token"; const watchedDirectories = [ "cli", @@ -81,9 +84,11 @@ const env = { if (mode === "dev") { env.PAPERCLIP_DEV_SERVER_STATUS_FILE = devServerStatusFilePath; + env.PAPERCLIP_DEV_SERVER_STATUS_TOKEN = devServerStatusToken ?? ""; } if (mode === "watch") { + delete env.PAPERCLIP_DEV_SERVER_STATUS_TOKEN; env.PAPERCLIP_MIGRATION_PROMPT ??= "never"; env.PAPERCLIP_MIGRATION_AUTO_APPLY ??= "true"; } @@ -426,7 +431,9 @@ async function scanForBackendChanges() { async function getDevHealthPayload() { const serverPort = env.PORT ?? process.env.PORT ?? "3100"; - const response = await fetch(`http://127.0.0.1:${serverPort}/api/health`); + const response = await fetch(`http://127.0.0.1:${serverPort}/api/health`, { + headers: devServerStatusToken ? { [devServerStatusTokenHeader]: devServerStatusToken } : undefined, + }); if (!response.ok) { throw new Error(`Health request failed (${response.status})`); } diff --git a/scripts/dev-runner.ts b/scripts/dev-runner.ts index a30096882a..e5d73794bd 100644 --- a/scripts/dev-runner.ts +++ b/scripts/dev-runner.ts @@ -1,5 +1,6 @@ #!/usr/bin/env -S node --import tsx import { spawn } from "node:child_process"; +import { randomUUID } from "node:crypto"; import { existsSync, mkdirSync, readdirSync, rmSync, statSync, writeFileSync } from "node:fs"; import path from "node:path"; import { createInterface } from "node:readline/promises"; @@ -35,6 +36,8 @@ const autoRestartPollIntervalMs = 2500; const gracefulShutdownTimeoutMs = 10_000; const changedPathSampleLimit = 5; const devServerStatusFilePath = path.join(repoRoot, ".paperclip", "dev-server-status.json"); +const devServerStatusToken = mode === "dev" ? randomUUID() : null; +const devServerStatusTokenHeader = "x-paperclip-dev-server-status-token"; const watchedDirectories = [ "cli", @@ -133,10 +136,12 @@ const env: NodeJS.ProcessEnv = { if (mode === "dev") { env.PAPERCLIP_DEV_SERVER_STATUS_FILE = devServerStatusFilePath; + env.PAPERCLIP_DEV_SERVER_STATUS_TOKEN = devServerStatusToken ?? ""; env.PAPERCLIP_MIGRATION_AUTO_APPLY ??= "true"; } if (mode === "watch") { + delete env.PAPERCLIP_DEV_SERVER_STATUS_TOKEN; env.PAPERCLIP_MIGRATION_PROMPT ??= "never"; env.PAPERCLIP_MIGRATION_AUTO_APPLY ??= "true"; } @@ -553,7 +558,9 @@ async function scanForBackendChanges() { } async function getDevHealthPayload() { - const response = await fetch(`http://127.0.0.1:${serverPort}/api/health`); + const response = await fetch(`http://127.0.0.1:${serverPort}/api/health`, { + headers: devServerStatusToken ? { [devServerStatusTokenHeader]: devServerStatusToken } : undefined, + }); if (!response.ok) { throw new Error(`Health request failed (${response.status})`); } diff --git a/scripts/discord-daily-digest.sh b/scripts/discord-daily-digest.sh new file mode 100755 index 0000000000..5e240b2e1f --- /dev/null +++ b/scripts/discord-daily-digest.sh @@ -0,0 +1,79 @@ +#!/usr/bin/env bash +set -euo pipefail + +REPO_ROOT="$(cd "$(dirname "$0")/.." && pwd)" +cd "$REPO_ROOT" + +WEBHOOK_URL="${DISCORD_WEBHOOK_URL:-}" +DATE="${1:-$(date +%Y-%m-%d)}" +REPO_URL="https://github.com/paperclipai/paperclip" + +if [[ -z "$WEBHOOK_URL" ]]; then + echo "Error: DISCORD_WEBHOOK_URL env var is required" >&2 + echo "Usage: DISCORD_WEBHOOK_URL=https://discord.com/api/webhooks/... $0 [date]" >&2 + echo " date defaults to today (YYYY-MM-DD format)" >&2 + exit 1 +fi + +NEXT_DATE=$(date -j -v+1d -f "%Y-%m-%d" "$DATE" "+%Y-%m-%d" 2>/dev/null \ + || date -d "$DATE + 1 day" "+%Y-%m-%d" 2>/dev/null) + +COMMITS=$(git log --since="${DATE}T00:00:00" --until="${NEXT_DATE}T00:00:00" master \ + --format="%h|%s|%an" 2>/dev/null || true) + +json_escape() { + python3 -c 'import json, sys; print(json.dumps(sys.stdin.read().rstrip("\n"))[1:-1])' +} + +if [[ -z "$COMMITS" ]]; then + PAYLOAD=$(cat </dev/null || echo "$PAYLOAD" + exit 0 +fi + +RESPONSE=$(curl -s -o /dev/null -w "%{http_code}" \ + -H "Content-Type: application/json" \ + -d "$PAYLOAD" \ + "$WEBHOOK_URL") + +if [[ "$RESPONSE" == "204" || "$RESPONSE" == "200" ]]; then + echo "Discord digest posted for ${DATE} (${COMMIT_COUNT:-0} commits)" +else + echo "Error: Discord webhook returned HTTP ${RESPONSE}" >&2 + exit 1 +fi diff --git a/scripts/provision-worktree.sh b/scripts/provision-worktree.sh index 725f9c6780..a80c01f779 100644 --- a/scripts/provision-worktree.sh +++ b/scripts/provision-worktree.sh @@ -332,11 +332,15 @@ main().catch((error) => { EOF } -if paperclipai_command_available; then - run_isolated_worktree_init +if [[ -e "$worktree_config_path" && -e "$worktree_env_path" ]]; then + echo "Reusing existing isolated Paperclip worktree config at $worktree_config_path" >&2 else - echo "paperclipai CLI not available in this workspace; writing isolated fallback config without DB seeding." >&2 - write_fallback_worktree_config + if paperclipai_command_available; then + run_isolated_worktree_init + else + echo "paperclipai CLI not available in this workspace; writing isolated fallback config without DB seeding." >&2 + write_fallback_worktree_config + fi fi list_base_node_modules_paths() { diff --git a/server/src/__tests__/adapter-routes.test.ts b/server/src/__tests__/adapter-routes.test.ts index cab559afab..d67a4f9bf5 100644 --- a/server/src/__tests__/adapter-routes.test.ts +++ b/server/src/__tests__/adapter-routes.test.ts @@ -83,11 +83,12 @@ describe("adapter routes", () => { const res = await request(app).get("/api/adapters"); expect(res.status).toBe(200); - expect(Array.isArray(res.body)).toBe(true); - expect(res.body.length).toBeGreaterThan(0); + const adapters = Array.isArray(res.body) ? res.body : JSON.parse(res.text); + expect(Array.isArray(adapters)).toBe(true); + expect(adapters.length).toBeGreaterThan(0); // Every adapter should have a capabilities object - for (const adapter of res.body) { + for (const adapter of adapters) { expect(adapter.capabilities).toBeDefined(); expect(typeof adapter.capabilities.supportsInstructionsBundle).toBe("boolean"); expect(typeof adapter.capabilities.supportsSkills).toBe("boolean"); diff --git a/server/src/__tests__/better-auth.test.ts b/server/src/__tests__/better-auth.test.ts new file mode 100644 index 0000000000..79bfa3e75b --- /dev/null +++ b/server/src/__tests__/better-auth.test.ts @@ -0,0 +1,43 @@ +import { afterEach, describe, expect, it } from "vitest"; +import type { BetterAuthOptions } from "better-auth"; +import { getCookies } from "better-auth/cookies"; +import { + buildBetterAuthAdvancedOptions, + deriveAuthCookiePrefix, +} from "../auth/better-auth.js"; + +const ORIGINAL_INSTANCE_ID = process.env.PAPERCLIP_INSTANCE_ID; + +afterEach(() => { + if (ORIGINAL_INSTANCE_ID === undefined) delete process.env.PAPERCLIP_INSTANCE_ID; + else process.env.PAPERCLIP_INSTANCE_ID = ORIGINAL_INSTANCE_ID; +}); + +describe("Better Auth cookie scoping", () => { + it("derives an instance-scoped cookie prefix", () => { + expect(deriveAuthCookiePrefix("default")).toBe("paperclip-default"); + expect(deriveAuthCookiePrefix("PAP-1601-worktree")).toBe("paperclip-PAP-1601-worktree"); + }); + + it("uses PAPERCLIP_INSTANCE_ID for the Better Auth cookie prefix", () => { + process.env.PAPERCLIP_INSTANCE_ID = "sat-worktree"; + + const advanced = buildBetterAuthAdvancedOptions({ disableSecureCookies: false }); + + expect(advanced).toEqual({ + cookiePrefix: "paperclip-sat-worktree", + }); + expect(getCookies({ advanced } as BetterAuthOptions).sessionToken.name).toBe( + "paperclip-sat-worktree.session_token", + ); + }); + + it("keeps local http auth cookies non-secure while preserving the scoped prefix", () => { + process.env.PAPERCLIP_INSTANCE_ID = "pap-worktree"; + + expect(buildBetterAuthAdvancedOptions({ disableSecureCookies: true })).toEqual({ + cookiePrefix: "paperclip-pap-worktree", + useSecureCookies: false, + }); + }); +}); diff --git a/server/src/__tests__/dev-runner-paths.test.ts b/server/src/__tests__/dev-runner-paths.test.ts index 6f9a5b8085..aaf264c030 100644 --- a/server/src/__tests__/dev-runner-paths.test.ts +++ b/server/src/__tests__/dev-runner-paths.test.ts @@ -2,12 +2,15 @@ import { describe, expect, it } from "vitest"; import { shouldTrackDevServerPath } from "../../../scripts/dev-runner-paths.mjs"; describe("shouldTrackDevServerPath", () => { - it("ignores repo-local Paperclip state and common test file paths", () => { + it("ignores generated state, diagnostic reports, and common test file paths", () => { expect( shouldTrackDevServerPath( ".paperclip/worktrees/PAP-712-for-project-configuration-get-rid-of-the-overview-tab-for-now/.agents/skills/paperclip", ), ).toBe(false); + expect(shouldTrackDevServerPath("server/report.20260416.154629.4965.0.001.json")).toBe(false); + expect(shouldTrackDevServerPath("server/report.20260416.154636.4725.0.001.json")).toBe(false); + expect(shouldTrackDevServerPath("server/report.20260416.154636.4965.0.002.json")).toBe(false); expect(shouldTrackDevServerPath("server/src/__tests__/health.test.ts")).toBe(false); expect(shouldTrackDevServerPath("packages/shared/src/lib/foo.test.ts")).toBe(false); expect(shouldTrackDevServerPath("packages/shared/src/lib/foo.spec.tsx")).toBe(false); diff --git a/server/src/__tests__/health-dev-server-token.test.ts b/server/src/__tests__/health-dev-server-token.test.ts new file mode 100644 index 0000000000..7c768a35a0 --- /dev/null +++ b/server/src/__tests__/health-dev-server-token.test.ts @@ -0,0 +1,128 @@ +import { mkdtempSync, rmSync, writeFileSync } from "node:fs"; +import os from "node:os"; +import path from "node:path"; +import express from "express"; +import request from "supertest"; +import { afterEach, describe, expect, it, vi } from "vitest"; +import type { Db } from "@paperclipai/db"; +import { healthRoutes } from "../routes/health.js"; + +const tempDirs: string[] = []; + +function createDevServerStatusFile(payload: unknown) { + const dir = mkdtempSync(path.join(os.tmpdir(), "paperclip-health-dev-server-")); + tempDirs.push(dir); + const filePath = path.join(dir, "dev-server-status.json"); + writeFileSync(filePath, `${JSON.stringify(payload)}\n`, "utf8"); + return filePath; +} + +afterEach(() => { + for (const dir of tempDirs.splice(0)) { + rmSync(dir, { recursive: true, force: true }); + } +}); + +describe("GET /health dev-server supervisor access", () => { + it("exposes dev-server metadata to the supervising dev runner in authenticated mode", async () => { + const previousFile = process.env.PAPERCLIP_DEV_SERVER_STATUS_FILE; + const previousToken = process.env.PAPERCLIP_DEV_SERVER_STATUS_TOKEN; + process.env.PAPERCLIP_DEV_SERVER_STATUS_FILE = createDevServerStatusFile({ + dirty: true, + lastChangedAt: "2026-03-20T12:00:00.000Z", + changedPathCount: 1, + changedPathsSample: ["server/src/routes/health.ts"], + pendingMigrations: [], + lastRestartAt: "2026-03-20T11:30:00.000Z", + }); + process.env.PAPERCLIP_DEV_SERVER_STATUS_TOKEN = "dev-runner-token"; + + let selectCall = 0; + const db = { + execute: vi.fn().mockResolvedValue([{ "?column?": 1 }]), + select: vi.fn(() => { + selectCall += 1; + if (selectCall === 1) { + return { + from: vi.fn(() => ({ + where: vi.fn().mockResolvedValue([{ count: 1 }]), + })), + }; + } + if (selectCall === 2) { + return { + from: vi.fn(() => ({ + where: vi.fn().mockResolvedValue([ + { + id: "settings-1", + general: {}, + experimental: { autoRestartDevServerWhenIdle: true }, + createdAt: new Date("2026-03-20T11:00:00.000Z"), + updatedAt: new Date("2026-03-20T11:00:00.000Z"), + }, + ]), + })), + }; + } + return { + from: vi.fn(() => ({ + where: vi.fn().mockResolvedValue([{ count: 0 }]), + })), + }; + }), + } as unknown as Db; + + try { + const app = express(); + app.use((req, _res, next) => { + (req as any).actor = { type: "none", source: "none" }; + next(); + }); + app.use( + "/health", + healthRoutes(db, { + deploymentMode: "authenticated", + deploymentExposure: "private", + authReady: true, + companyDeletionEnabled: true, + }), + ); + + const res = await request(app) + .get("/health") + .set("X-Paperclip-Dev-Server-Status-Token", "dev-runner-token"); + + expect(res.status).toBe(200); + expect(res.body).toEqual({ + status: "ok", + deploymentMode: "authenticated", + bootstrapStatus: "ready", + bootstrapInviteActive: false, + devServer: { + enabled: true, + restartRequired: true, + reason: "backend_changes", + lastChangedAt: "2026-03-20T12:00:00.000Z", + changedPathCount: 1, + changedPathsSample: ["server/src/routes/health.ts"], + pendingMigrations: [], + autoRestartEnabled: true, + activeRunCount: 0, + waitingForIdle: false, + lastRestartAt: "2026-03-20T11:30:00.000Z", + }, + }); + } finally { + if (previousFile === undefined) { + delete process.env.PAPERCLIP_DEV_SERVER_STATUS_FILE; + } else { + process.env.PAPERCLIP_DEV_SERVER_STATUS_FILE = previousFile; + } + if (previousToken === undefined) { + delete process.env.PAPERCLIP_DEV_SERVER_STATUS_TOKEN; + } else { + process.env.PAPERCLIP_DEV_SERVER_STATUS_TOKEN = previousToken; + } + } + }); +}); diff --git a/server/src/__tests__/http-log-policy.test.ts b/server/src/__tests__/http-log-policy.test.ts index ec140c3e87..0e540a1116 100644 --- a/server/src/__tests__/http-log-policy.test.ts +++ b/server/src/__tests__/http-log-policy.test.ts @@ -53,6 +53,8 @@ describe("shouldSilenceHttpSuccessLog", () => { }); it("silences successful static asset requests", () => { + expect(shouldSilenceHttpSuccessLog("GET", "/", 200)).toBe(true); + expect(shouldSilenceHttpSuccessLog("GET", "/index.html", 200)).toBe(true); expect(shouldSilenceHttpSuccessLog("GET", "/@fs/Users/dotta/paperclip/ui/src/main.tsx", 200)).toBe(true); expect(shouldSilenceHttpSuccessLog("GET", "/src/App.tsx?t=123", 200)).toBe(true); expect(shouldSilenceHttpSuccessLog("GET", "/site.webmanifest", 200)).toBe(true); diff --git a/server/src/__tests__/instance-database-backups-routes.test.ts b/server/src/__tests__/instance-database-backups-routes.test.ts new file mode 100644 index 0000000000..87774a82e4 --- /dev/null +++ b/server/src/__tests__/instance-database-backups-routes.test.ts @@ -0,0 +1,149 @@ +import express from "express"; +import request from "supertest"; +import { describe, expect, it, vi } from "vitest"; +import { errorHandler } from "../middleware/index.js"; +import { + instanceDatabaseBackupRoutes, + type InstanceDatabaseBackupService, +} from "../routes/instance-database-backups.js"; +import { conflict } from "../errors.js"; + +function createApp(actor: Record, service: InstanceDatabaseBackupService) { + const app = express(); + app.use(express.json()); + app.use((req, _res, next) => { + req.actor = actor as typeof req.actor; + next(); + }); + app.use("/api", instanceDatabaseBackupRoutes(service)); + app.use(errorHandler); + return app; +} + +function createBackupService(overrides: Partial = {}): InstanceDatabaseBackupService { + return { + runManualBackup: vi.fn().mockResolvedValue({ + trigger: "manual", + backupFile: "/tmp/paperclip-20260416.sql.gz", + sizeBytes: 1234, + prunedCount: 2, + backupDir: "/tmp", + retention: { + dailyDays: 7, + weeklyWeeks: 4, + monthlyMonths: 1, + }, + startedAt: "2026-04-16T20:00:00.000Z", + finishedAt: "2026-04-16T20:00:01.000Z", + durationMs: 1000, + }), + ...overrides, + }; +} + +describe("instance database backup routes", () => { + it("runs a manual backup for an instance admin and returns the server result", async () => { + const service = createBackupService(); + const app = createApp( + { + type: "board", + userId: "admin-1", + source: "session", + isInstanceAdmin: true, + }, + service, + ); + + const res = await request(app).post("/api/instance/database-backups").send({}); + + expect(res.status).toBe(201); + expect(service.runManualBackup).toHaveBeenCalledTimes(1); + expect(res.body).toEqual({ + trigger: "manual", + backupFile: "/tmp/paperclip-20260416.sql.gz", + sizeBytes: 1234, + prunedCount: 2, + backupDir: "/tmp", + retention: { + dailyDays: 7, + weeklyWeeks: 4, + monthlyMonths: 1, + }, + startedAt: "2026-04-16T20:00:00.000Z", + finishedAt: "2026-04-16T20:00:01.000Z", + durationMs: 1000, + }); + }); + + it("allows local implicit board access", async () => { + const service = createBackupService(); + const app = createApp( + { + type: "board", + userId: "local-board", + source: "local_implicit", + isInstanceAdmin: false, + }, + service, + ); + + await request(app).post("/api/instance/database-backups").send({}).expect(201); + + expect(service.runManualBackup).toHaveBeenCalledTimes(1); + }); + + it("rejects non-admin board users", async () => { + const service = createBackupService(); + const app = createApp( + { + type: "board", + userId: "user-1", + source: "session", + isInstanceAdmin: false, + companyIds: ["company-1"], + }, + service, + ); + + await request(app).post("/api/instance/database-backups").send({}).expect(403); + + expect(service.runManualBackup).not.toHaveBeenCalled(); + }); + + it("rejects agent callers", async () => { + const service = createBackupService(); + const app = createApp( + { + type: "agent", + agentId: "agent-1", + companyId: "company-1", + source: "agent_key", + }, + service, + ); + + await request(app).post("/api/instance/database-backups").send({}).expect(403); + + expect(service.runManualBackup).not.toHaveBeenCalled(); + }); + + it("returns conflict when another server backup is already running", async () => { + const service = createBackupService({ + runManualBackup: vi.fn().mockRejectedValue(conflict("Database backup already in progress")), + }); + const app = createApp( + { + type: "board", + userId: "admin-1", + source: "session", + isInstanceAdmin: true, + }, + service, + ); + + const res = await request(app).post("/api/instance/database-backups").send({}); + + expect(res.status).toBe(409); + expect(res.body).toEqual({ error: "Database backup already in progress" }); + }); +}); diff --git a/server/src/__tests__/server-startup-feedback-export.test.ts b/server/src/__tests__/server-startup-feedback-export.test.ts index c950aa6ee6..f0672d95ae 100644 --- a/server/src/__tests__/server-startup-feedback-export.test.ts +++ b/server/src/__tests__/server-startup-feedback-export.test.ts @@ -128,6 +128,15 @@ vi.mock("../services/index.js", () => ({ })), tickTimers: vi.fn(async () => ({ enqueued: 0 })), })), + instanceSettingsService: vi.fn(() => ({ + getGeneral: vi.fn(async () => ({ + backupRetention: { + dailyDays: 7, + weeklyWeeks: 4, + monthlyMonths: 1, + }, + })), + })), reconcilePersistedRuntimeServicesOnStartup: vi.fn(async () => ({ reconciled: 0 })), routineService: vi.fn(() => ({ tickScheduledTriggers: vi.fn(async () => ({ triggered: 0 })), diff --git a/server/src/__tests__/vite-html-renderer.test.ts b/server/src/__tests__/vite-html-renderer.test.ts index f958ce79b8..add3ba8004 100644 --- a/server/src/__tests__/vite-html-renderer.test.ts +++ b/server/src/__tests__/vite-html-renderer.test.ts @@ -32,7 +32,7 @@ describe("createCachedViteHtmlRenderer", () => { } }); - it("reuses the injected dev html shell until a watched file changes", async () => { + it("reuses the injected dev html shell until index.html changes", async () => { const tempDir = fs.mkdtempSync(path.join(os.tmpdir(), "paperclip-vite-html-")); tempDirs.push(tempDir); const indexPath = path.join(tempDir, "index.html"); @@ -57,6 +57,12 @@ describe("createCachedViteHtmlRenderer", () => { expect(first.match(/\/@vite\/client/g)?.length).toBe(1); expect(first).toContain("window.$RefreshReg$"); + const sourcePath = path.join(tempDir, "src", "main.tsx"); + fs.mkdirSync(path.dirname(sourcePath), { recursive: true }); + fs.writeFileSync(sourcePath, "export {};\n", "utf8"); + watcher.emit("change", sourcePath); + expect(await renderer.render("/")).toBe(first); + fs.writeFileSync( indexPath, 'v2', diff --git a/server/src/__tests__/workspace-runtime.test.ts b/server/src/__tests__/workspace-runtime.test.ts index e44f40da95..50e89c017c 100644 --- a/server/src/__tests__/workspace-runtime.test.ts +++ b/server/src/__tests__/workspace-runtime.test.ts @@ -897,7 +897,7 @@ describe("realizeExecutionWorkspace", () => { await runGit(repoRoot, ["commit", "-m", "Add worktree provision script"]); try { - const workspace = await realizeExecutionWorkspace({ + const workspaceInput = { base: { baseCwd: repoRoot, source: "project_primary", @@ -923,7 +923,8 @@ describe("realizeExecutionWorkspace", () => { name: "Codex Coder", companyId: "company-1", }, - }); + } satisfies Parameters[0]; + const workspace = await realizeExecutionWorkspace(workspaceInput); const configPath = path.join(workspace.cwd, ".paperclip", "config.json"); const envPath = path.join(workspace.cwd, ".paperclip", ".env"); @@ -954,6 +955,34 @@ describe("realizeExecutionWorkspace", () => { process.chdir(workspace.cwd); expect(resolvePaperclipConfigPath()).toBe(configPath); + + const preservedPort = 39999; + await fs.writeFile( + configPath, + JSON.stringify( + { + ...configContents, + server: { + ...configContents.server, + port: preservedPort, + }, + }, + null, + 2, + ) + "\n", + "utf8", + ); + await fs.writeFile(envPath, `${envContents}PAPERCLIP_WORKTREE_COLOR="#112233"\n`, "utf8"); + + const reusedWorkspace = await realizeExecutionWorkspace(workspaceInput); + const reusedConfigContents = JSON.parse(await fs.readFile(configPath, "utf8")); + const reusedEnvContents = await fs.readFile(envPath, "utf8"); + + expect(reusedWorkspace.cwd).toBe(workspace.cwd); + expect(reusedWorkspace.created).toBe(false); + expect(reusedConfigContents.server.port).toBe(preservedPort); + expect(reusedConfigContents.database.embeddedPostgresDataDir).toBe(path.join(expectedInstanceRoot, "db")); + expect(reusedEnvContents).toContain('PAPERCLIP_WORKTREE_COLOR="#112233"'); } finally { process.chdir(previousCwd); } diff --git a/server/src/app.ts b/server/src/app.ts index 971645d5e9..cd0d88dbc7 100644 --- a/server/src/app.ts +++ b/server/src/app.ts @@ -27,6 +27,10 @@ import { sidebarBadgeRoutes } from "./routes/sidebar-badges.js"; import { sidebarPreferenceRoutes } from "./routes/sidebar-preferences.js"; import { inboxDismissalRoutes } from "./routes/inbox-dismissals.js"; import { instanceSettingsRoutes } from "./routes/instance-settings.js"; +import { + instanceDatabaseBackupRoutes, + type InstanceDatabaseBackupService, +} from "./routes/instance-database-backups.js"; import { llmRoutes } from "./routes/llms.js"; import { authRoutes } from "./routes/auth.js"; import { assetRoutes } from "./routes/assets.js"; @@ -112,6 +116,7 @@ export async function createApp( now?: Date; }): Promise; }; + databaseBackupService?: InstanceDatabaseBackupService; deploymentMode: DeploymentMode; deploymentExposure: DeploymentExposure; allowedHostnames: string[]; @@ -194,6 +199,9 @@ export async function createApp( api.use(sidebarPreferenceRoutes(db)); api.use(inboxDismissalRoutes(db)); api.use(instanceSettingsRoutes(db)); + if (opts.databaseBackupService) { + api.use(instanceDatabaseBackupRoutes(opts.databaseBackupService)); + } const hostServicesDisposers = new Map void>(); const workerManager = createPluginWorkerManager(); const pluginRegistry = pluginRegistryService(db); diff --git a/server/src/auth/better-auth.ts b/server/src/auth/better-auth.ts index 881c3072ca..402dad5203 100644 --- a/server/src/auth/better-auth.ts +++ b/server/src/auth/better-auth.ts @@ -11,6 +11,7 @@ import { authVerifications, } from "@paperclipai/db"; import type { Config } from "../config.js"; +import { resolvePaperclipInstanceId } from "../home-paths.js"; export type BetterAuthSessionUser = { id: string; @@ -25,6 +26,24 @@ export type BetterAuthSessionResult = { type BetterAuthInstance = ReturnType; +const AUTH_COOKIE_PREFIX_FALLBACK = "default"; +const AUTH_COOKIE_PREFIX_INVALID_SEGMENTS_RE = /[^a-zA-Z0-9_-]+/g; + +export function deriveAuthCookiePrefix(instanceId = resolvePaperclipInstanceId()): string { + const scopedInstanceId = instanceId + .trim() + .replace(AUTH_COOKIE_PREFIX_INVALID_SEGMENTS_RE, "-") + .replace(/^-+|-+$/g, "") || AUTH_COOKIE_PREFIX_FALLBACK; + return `paperclip-${scopedInstanceId}`; +} + +export function buildBetterAuthAdvancedOptions(input: { disableSecureCookies: boolean }) { + return { + cookiePrefix: deriveAuthCookiePrefix(), + ...(input.disableSecureCookies ? { useSecureCookies: false } : {}), + }; +} + function headersFromNodeHeaders(rawHeaders: IncomingHttpHeaders): Headers { const headers = new Headers(); for (const [key, raw] of Object.entries(rawHeaders)) { @@ -97,7 +116,7 @@ export function createBetterAuthInstance(db: Db, config: Config, trustedOrigins? requireEmailVerification: false, disableSignUp: config.authDisableSignUp, }, - ...(isHttpOnly ? { advanced: { useSecureCookies: false } } : {}), + advanced: buildBetterAuthAdvancedOptions({ disableSecureCookies: isHttpOnly }), }; if (!baseUrl) { diff --git a/server/src/index.ts b/server/src/index.ts index eb6ad4e477..870dd013ce 100644 --- a/server/src/index.ts +++ b/server/src/index.ts @@ -41,6 +41,11 @@ import { printStartupBanner } from "./startup-banner.js"; import { getBoardClaimWarningUrl, initializeBoardClaimChallenge } from "./board-claim.js"; import { maybePersistWorktreeRuntimePorts } from "./worktree-config.js"; import { initTelemetry, getTelemetryClient } from "./telemetry.js"; +import { conflict } from "./errors.js"; +import type { + InstanceDatabaseBackupRunResult, + InstanceDatabaseBackupTrigger, +} from "./routes/instance-database-backups.js"; type BetterAuthSessionUser = { id: string; @@ -521,11 +526,80 @@ export async function startServer(): Promise { const feedback = feedbackService(db as any, { shareClient: createFeedbackTraceShareClientFromConfig(config), }); + const backupSettingsSvc = instanceSettingsService(db); + let databaseBackupInFlight = false; + const runServerDatabaseBackup = async ( + trigger: InstanceDatabaseBackupTrigger, + ): Promise => { + if (databaseBackupInFlight) { + const message = "Database backup already in progress"; + if (trigger === "scheduled") { + logger.warn("Skipping scheduled database backup because a previous backup is still running"); + return null; + } + throw conflict(message); + } + + databaseBackupInFlight = true; + const startedAt = new Date(); + const startedAtMs = Date.now(); + const label = trigger === "scheduled" ? "Automatic" : "Manual"; + try { + logger.info({ backupDir: config.databaseBackupDir, trigger }, `${label} database backup starting`); + // Read retention from Instance Settings (DB) so changes take effect without restart. + const generalSettings = await backupSettingsSvc.getGeneral(); + const retention = generalSettings.backupRetention; + + const result = await runDatabaseBackup({ + connectionString: activeDatabaseConnectionString, + backupDir: config.databaseBackupDir, + retention, + filenamePrefix: "paperclip", + }); + const finishedAt = new Date(); + const response: InstanceDatabaseBackupRunResult = { + ...result, + trigger, + backupDir: config.databaseBackupDir, + retention, + startedAt: startedAt.toISOString(), + finishedAt: finishedAt.toISOString(), + durationMs: Date.now() - startedAtMs, + }; + logger.info( + { + backupFile: result.backupFile, + sizeBytes: result.sizeBytes, + prunedCount: result.prunedCount, + backupDir: config.databaseBackupDir, + retention, + trigger, + durationMs: response.durationMs, + }, + `${label} database backup complete: ${formatDatabaseBackupResult(result)}`, + ); + return response; + } catch (err) { + logger.error({ err, backupDir: config.databaseBackupDir, trigger }, `${label} database backup failed`); + throw err; + } finally { + databaseBackupInFlight = false; + } + }; const app = await createApp(db as any, { uiMode, serverPort: listenPort, storageService, feedbackExportService: feedback, + databaseBackupService: { + runManualBackup: async () => { + const result = await runServerDatabaseBackup("manual"); + if (!result) { + throw conflict("Database backup already in progress"); + } + return result; + }, + }, deploymentMode: config.deploymentMode, deploymentExposure: config.deploymentExposure, allowedHostnames: config.allowedHostnames, @@ -644,43 +718,6 @@ export async function startServer(): Promise { if (config.databaseBackupEnabled) { const backupIntervalMs = config.databaseBackupIntervalMinutes * 60 * 1000; - const settingsSvc = instanceSettingsService(db); - let backupInFlight = false; - - const runScheduledBackup = async () => { - if (backupInFlight) { - logger.warn("Skipping scheduled database backup because a previous backup is still running"); - return; - } - - backupInFlight = true; - try { - // Read retention from Instance Settings (DB) so changes take effect without restart - const generalSettings = await settingsSvc.getGeneral(); - const retention = generalSettings.backupRetention; - - const result = await runDatabaseBackup({ - connectionString: activeDatabaseConnectionString, - backupDir: config.databaseBackupDir, - retention, - filenamePrefix: "paperclip", - }); - logger.info( - { - backupFile: result.backupFile, - sizeBytes: result.sizeBytes, - prunedCount: result.prunedCount, - backupDir: config.databaseBackupDir, - retention, - }, - `Automatic database backup complete: ${formatDatabaseBackupResult(result)}`, - ); - } catch (err) { - logger.error({ err, backupDir: config.databaseBackupDir }, "Automatic database backup failed"); - } finally { - backupInFlight = false; - } - }; logger.info( { @@ -691,7 +728,9 @@ export async function startServer(): Promise { "Automatic database backups enabled", ); setInterval(() => { - void runScheduledBackup(); + void runServerDatabaseBackup("scheduled").catch(() => { + // runServerDatabaseBackup already logs the failure with context. + }); }, backupIntervalMs); } diff --git a/server/src/middleware/http-log-policy.ts b/server/src/middleware/http-log-policy.ts index d652ee38d0..a0b93694b2 100644 --- a/server/src/middleware/http-log-policy.ts +++ b/server/src/middleware/http-log-policy.ts @@ -23,6 +23,8 @@ const SILENCED_SUCCESS_STATIC_PREFIXES = [ ]; const SILENCED_SUCCESS_STATIC_PATHS = new Set([ + "/", + "/index.html", "/favicon.ico", "/site.webmanifest", "/sw.js", diff --git a/server/src/routes/health.ts b/server/src/routes/health.ts index a1992dd41c..388585a5bd 100644 --- a/server/src/routes/health.ts +++ b/server/src/routes/health.ts @@ -1,3 +1,4 @@ +import { timingSafeEqual } from "node:crypto"; import { Router } from "express"; import type { Db } from "@paperclipai/db"; import { and, count, eq, gt, inArray, isNull, sql } from "drizzle-orm"; @@ -16,6 +17,17 @@ function shouldExposeFullHealthDetails( return actorType === "board" || actorType === "agent"; } +function hasDevServerStatusToken(providedToken: string | undefined) { + const expectedToken = process.env.PAPERCLIP_DEV_SERVER_STATUS_TOKEN?.trim(); + const token = providedToken?.trim(); + if (!expectedToken || !token) return false; + + const expected = Buffer.from(expectedToken); + const provided = Buffer.from(token); + if (expected.length !== provided.length) return false; + return timingSafeEqual(expected, provided); +} + export function healthRoutes( db?: Db, opts: { @@ -38,6 +50,8 @@ export function healthRoutes( actorType, opts.deploymentMode, ); + const exposeDevServerDetails = + exposeFullDetails || hasDevServerStatusToken(req.get("x-paperclip-dev-server-status-token")); if (!db) { res.json( @@ -90,7 +104,7 @@ export function healthRoutes( const persistedDevServerStatus = readPersistedDevServerStatus(); let devServer: ReturnType | undefined; - if (persistedDevServerStatus && typeof (db as { select?: unknown }).select === "function") { + if (exposeDevServerDetails && persistedDevServerStatus && typeof (db as { select?: unknown }).select === "function") { const instanceSettings = instanceSettingsService(db); const experimentalSettings = await instanceSettings.getExperimental(); const activeRunCount = await db @@ -111,6 +125,7 @@ export function healthRoutes( deploymentMode: opts.deploymentMode, bootstrapStatus, bootstrapInviteActive, + ...(devServer ? { devServer } : {}), }); return; } diff --git a/server/src/routes/index.ts b/server/src/routes/index.ts index 2b6506f55b..8d954a9570 100644 --- a/server/src/routes/index.ts +++ b/server/src/routes/index.ts @@ -17,3 +17,4 @@ export { inboxDismissalRoutes } from "./inbox-dismissals.js"; export { llmRoutes } from "./llms.js"; export { accessRoutes } from "./access.js"; export { instanceSettingsRoutes } from "./instance-settings.js"; +export { instanceDatabaseBackupRoutes } from "./instance-database-backups.js"; diff --git a/server/src/routes/instance-database-backups.ts b/server/src/routes/instance-database-backups.ts new file mode 100644 index 0000000000..a7fbb5ac83 --- /dev/null +++ b/server/src/routes/instance-database-backups.ts @@ -0,0 +1,30 @@ +import { Router } from "express"; +import type { BackupRetentionPolicy, RunDatabaseBackupResult } from "@paperclipai/db"; +import { assertInstanceAdmin } from "./authz.js"; + +export type InstanceDatabaseBackupTrigger = "manual" | "scheduled"; + +export type InstanceDatabaseBackupRunResult = RunDatabaseBackupResult & { + trigger: InstanceDatabaseBackupTrigger; + backupDir: string; + retention: BackupRetentionPolicy; + startedAt: string; + finishedAt: string; + durationMs: number; +}; + +export type InstanceDatabaseBackupService = { + runManualBackup(): Promise; +}; + +export function instanceDatabaseBackupRoutes(service: InstanceDatabaseBackupService) { + const router = Router(); + + router.post("/instance/database-backups", async (req, res) => { + assertInstanceAdmin(req); + const result = await service.runManualBackup(); + res.status(201).json(result); + }); + + return router; +} diff --git a/server/src/vite-html-renderer.ts b/server/src/vite-html-renderer.ts index 4834591acf..983a3ea158 100644 --- a/server/src/vite-html-renderer.ts +++ b/server/src/vite-html-renderer.ts @@ -63,7 +63,7 @@ export function createCachedViteHtmlRenderer(opts: { function onWatchEvent(filePath: string): void { const resolvedPath = path.resolve(filePath); - if (resolvedPath === templatePath || resolvedPath.startsWith(`${uiRoot}${path.sep}`)) { + if (resolvedPath === templatePath) { invalidate(); } }