[codex] Add backup endpoint and dev runtime hardening (#4087)

## Thinking Path

> - Paperclip is a local-first control plane for AI-agent companies.
> - Operators need predictable local dev behavior, recoverable instance
data, and scripts that do not churn the running app.
> - Several accumulated changes improve backup streaming, dev-server
health, static UI caching/logging, diagnostic-file ignores, and instance
isolation.
> - These are operational improvements that can land independently from
product UI work.
> - This pull request groups the dev-infra and backup changes from the
split branch into one standalone branch.
> - The benefit is safer local operation, easier manual backups, less
noisy dev output, and less cross-instance auth leakage.

## What Changed

- Added a manual instance database backup endpoint and route tests.
- Streamed backup/restore handling to avoid materializing large payloads
at once.
- Reduced dev static UI log/cache churn and ignored Node diagnostic
report captures.
- Added guarded dev auto-restart health polling coverage.
- Preserved worktree config during provisioning and scoped auth cookies
by instance.
- Added a Discord daily digest helper script and environment
documentation.
- Hardened adapter-route and startup feedback export tests around the
changed infrastructure.

## Verification

- `pnpm install --frozen-lockfile`
- `pnpm exec vitest run packages/db/src/backup-lib.test.ts
server/src/__tests__/instance-database-backups-routes.test.ts
server/src/__tests__/server-startup-feedback-export.test.ts
server/src/__tests__/adapter-routes.test.ts
server/src/__tests__/dev-runner-paths.test.ts
server/src/__tests__/health-dev-server-token.test.ts
server/src/__tests__/http-log-policy.test.ts
server/src/__tests__/vite-html-renderer.test.ts
server/src/__tests__/workspace-runtime.test.ts
server/src/__tests__/better-auth.test.ts`
- Split integration check: merged after the runtime/governance branch
and before UI branches with no merge conflicts.
- Confirmed this branch does not include `pnpm-lock.yaml`.

## Risks

- Medium risk: touches server startup, backup streaming, auth cookie
naming, dev health checks, and worktree provisioning.
- Backup endpoint behavior depends on existing board/admin access
controls and database backup helpers.
- No database migrations are included.

> For core feature work, check [`ROADMAP.md`](ROADMAP.md) first and
discuss it in `#dev` before opening the PR. Feature PRs that overlap
with planned core work may need to be redirected — check the roadmap
first. See `CONTRIBUTING.md`.

## Model Used

- OpenAI Codex, GPT-5.4 tool-enabled coding model, agentic
code-editing/runtime with local shell and GitHub CLI access; exact
context window and reasoning mode are not exposed by the Paperclip
harness.

## Checklist

- [x] I have included a thinking path that traces from project context
to this change
- [x] I have specified the model used (with version and capability
details)
- [x] I have checked ROADMAP.md and confirmed this PR does not duplicate
planned core work
- [x] I have run tests locally and they pass
- [x] I have added or updated tests where applicable
- [x] If this change affects the UI, I have included before/after
screenshots
- [x] I have updated relevant documentation to reflect my changes
- [x] I have considered and documented any risks above
- [x] I will address all Greptile and reviewer comments before
requesting merge

---------

Co-authored-by: Paperclip <noreply@paperclip.ing>
This commit is contained in:
Dotta
2026-04-20 06:08:55 -05:00
committed by GitHub
parent 236d11d36f
commit e89d3f7e11
27 changed files with 894 additions and 111 deletions

View File

@@ -2,3 +2,6 @@ DATABASE_URL=postgres://paperclip:paperclip@localhost:5432/paperclip
PORT=3100
SERVE_UI=false
BETTER_AUTH_SECRET=paperclip-dev-secret
# Discord webhook for daily merge digest (scripts/discord-daily-digest.sh)
# DISCORD_WEBHOOK_URL=https://discord.com/api/webhooks/...

1
.gitignore vendored
View File

@@ -35,6 +35,7 @@ server/src/**/*.d.ts
server/src/**/*.d.ts.map
tmp/
feedback-export-*
diagnostics/
# Editor / tool temp files
*.tmp

View File

@@ -222,6 +222,8 @@ That repo-local env also sets:
- `PAPERCLIP_WORKTREE_COLOR=<hex-color>`
The server/UI use those values for worktree-specific branding such as the top banner and dynamically colored favicon.
Authenticated worktree servers also use the `PAPERCLIP_INSTANCE_ID` value to scope Better Auth cookie names.
Browser cookies are shared by host rather than port, so this prevents logging into one `127.0.0.1:<port>` worktree from replacing another worktree server's session cookie.
Print shell exports explicitly when needed:

View File

@@ -127,6 +127,7 @@ describeEmbeddedPostgres("runDatabaseBackup", () => {
backupDir,
retention: { dailyDays: 7, weeklyWeeks: 4, monthlyMonths: 1 },
filenamePrefix: "paperclip-test",
backupEngine: "javascript",
});
expect(result.backupFile).toMatch(/paperclip-test-.*\.sql\.gz$/);
@@ -148,14 +149,17 @@ describeEmbeddedPostgres("runDatabaseBackup", () => {
title: string;
payload: string;
state: string;
metadata: { index: number; even: boolean };
metadata: { index: number; even: boolean } | string;
}[]>(`
SELECT "title", "payload", "state"::text AS "state", "metadata"
FROM "public"."backup_test_records"
WHERE "title" IN ('row-0', 'row-159')
ORDER BY "title"
`);
expect(sampleRows).toEqual([
expect(sampleRows.map((row) => ({
...row,
metadata: typeof row.metadata === "string" ? JSON.parse(row.metadata) : row.metadata,
}))).toEqual([
{
title: "row-0",
payload,

View File

@@ -1,6 +1,8 @@
import { createReadStream, createWriteStream, existsSync, mkdirSync, readdirSync, statSync, unlinkSync } from "node:fs";
import { basename, resolve } from "node:path";
import { createInterface } from "node:readline";
import { spawn } from "node:child_process";
import { open as openFile } from "node:fs/promises";
import { pipeline } from "node:stream/promises";
import { createGunzip, createGzip } from "node:zlib";
import postgres from "postgres";
@@ -20,6 +22,7 @@ export type RunDatabaseBackupOptions = {
includeMigrationJournal?: boolean;
excludeTables?: string[];
nullifyColumns?: Record<string, string[]>;
backupEngine?: "auto" | "pg_dump" | "javascript";
};
export type RunDatabaseBackupResult = {
@@ -61,6 +64,9 @@ type ExtensionDefinition = {
const DRIZZLE_SCHEMA = "drizzle";
const DRIZZLE_MIGRATIONS_TABLE = "__drizzle_migrations";
const DEFAULT_BACKUP_WRITE_BUFFER_BYTES = 1024 * 1024;
const BACKUP_DATA_CURSOR_ROWS = 100;
const BACKUP_CLI_STDERR_BYTES = 64 * 1024;
const BACKUP_BREAKPOINT_DETECT_BYTES = 64 * 1024;
const STATEMENT_BREAKPOINT = "-- paperclip statement breakpoint 69f6f3f1-42fd-46a6-bf17-d1d85f8f3900";
@@ -223,6 +229,134 @@ function tableKey(schemaName: string, tableName: string): string {
return `${schemaName}.${tableName}`;
}
function hasBackupTransforms(opts: RunDatabaseBackupOptions): boolean {
return opts.includeMigrationJournal === true ||
(opts.excludeTables?.length ?? 0) > 0 ||
Object.keys(opts.nullifyColumns ?? {}).length > 0;
}
function formatSqlValue(rawValue: unknown, columnName: string | undefined, nullifiedColumns: Set<string>): string {
const val = columnName && nullifiedColumns.has(columnName) ? null : rawValue;
if (val === null || val === undefined) return "NULL";
if (typeof val === "boolean") return val ? "true" : "false";
if (typeof val === "number") return String(val);
if (val instanceof Date) return formatSqlLiteral(val.toISOString());
if (typeof val === "object") return formatSqlLiteral(JSON.stringify(val));
return formatSqlLiteral(String(val));
}
function appendCapturedStderr(previous: string, chunk: Buffer | string): string {
const next = previous + (Buffer.isBuffer(chunk) ? chunk.toString("utf8") : chunk);
if (Buffer.byteLength(next, "utf8") <= BACKUP_CLI_STDERR_BYTES) return next;
return Buffer.from(next, "utf8").subarray(-BACKUP_CLI_STDERR_BYTES).toString("utf8");
}
async function waitForChildExit(child: ReturnType<typeof spawn>, label: string): Promise<void> {
let stderr = "";
child.stderr?.on("data", (chunk) => {
stderr = appendCapturedStderr(stderr, chunk);
});
const result = await new Promise<{ code: number | null; signal: NodeJS.Signals | null }>((resolve, reject) => {
child.once("error", reject);
child.once("exit", (code, signal) => resolve({ code, signal }));
});
if (result.signal) {
throw new Error(`${label} exited via ${result.signal}${stderr.trim() ? `: ${stderr.trim()}` : ""}`);
}
if (result.code !== 0) {
throw new Error(`${label} failed with exit code ${result.code ?? "unknown"}${stderr.trim() ? `: ${stderr.trim()}` : ""}`);
}
}
async function runPgDumpBackup(opts: {
connectionString: string;
backupFile: string;
connectTimeout: number;
}): Promise<void> {
const pgDumpBin = process.env.PAPERCLIP_PG_DUMP_PATH || "pg_dump";
const child = spawn(
pgDumpBin,
[
`--dbname=${opts.connectionString}`,
"--format=plain",
"--clean",
"--if-exists",
"--no-owner",
"--no-privileges",
"--schema=public",
],
{
stdio: ["ignore", "pipe", "pipe"],
env: {
...process.env,
PGCONNECT_TIMEOUT: String(opts.connectTimeout),
},
},
);
if (!child.stdout) {
throw new Error("pg_dump did not expose stdout");
}
await Promise.all([
pipeline(child.stdout, createGzip(), createWriteStream(opts.backupFile)),
waitForChildExit(child, pgDumpBin),
]);
}
async function restoreWithPsql(opts: RunDatabaseRestoreOptions, connectTimeout: number): Promise<void> {
const psqlBin = process.env.PAPERCLIP_PSQL_PATH || "psql";
const child = spawn(
psqlBin,
[
`--dbname=${opts.connectionString}`,
"--set=ON_ERROR_STOP=1",
"--quiet",
"--no-psqlrc",
],
{
stdio: ["pipe", "ignore", "pipe"],
env: {
...process.env,
PGCONNECT_TIMEOUT: String(connectTimeout),
},
},
);
if (!child.stdin) {
throw new Error("psql did not expose stdin");
}
const input = opts.backupFile.endsWith(".gz")
? createReadStream(opts.backupFile).pipe(createGunzip())
: createReadStream(opts.backupFile);
await Promise.all([
pipeline(input, child.stdin),
waitForChildExit(child, psqlBin),
]);
}
async function hasStatementBreakpoints(backupFile: string): Promise<boolean> {
const raw = createReadStream(backupFile);
const stream = backupFile.endsWith(".gz") ? raw.pipe(createGunzip()) : raw;
let text = "";
try {
for await (const chunk of stream) {
text += Buffer.isBuffer(chunk) ? chunk.toString("utf8") : String(chunk);
if (text.includes(STATEMENT_BREAKPOINT)) return true;
if (Buffer.byteLength(text, "utf8") >= BACKUP_BREAKPOINT_DETECT_BYTES) return false;
}
return text.includes(STATEMENT_BREAKPOINT);
} finally {
stream.destroy();
raw.destroy();
}
}
async function* readRestoreStatements(backupFile: string): AsyncGenerator<string> {
const raw = createReadStream(backupFile);
const stream = backupFile.endsWith(".gz") ? raw.pipe(createGunzip()) : raw;
@@ -263,41 +397,21 @@ async function* readRestoreStatements(backupFile: string): AsyncGenerator<string
}
export function createBufferedTextFileWriter(filePath: string, maxBufferedBytes = DEFAULT_BACKUP_WRITE_BUFFER_BYTES) {
const stream = createWriteStream(filePath, { encoding: "utf8" });
const filePromise = openFile(filePath, "w");
const flushThreshold = Math.max(1, Math.trunc(maxBufferedBytes));
let bufferedLines: string[] = [];
let bufferedBytes = 0;
let firstChunk = true;
let closed = false;
let streamError: Error | null = null;
let pendingWrite = Promise.resolve();
stream.on("error", (error) => {
streamError = error;
});
const writeChunk = async (chunk: string): Promise<void> => {
if (streamError) throw streamError;
const canContinue = stream.write(chunk);
if (!canContinue) {
await new Promise<void>((resolve, reject) => {
const handleDrain = () => {
cleanup();
resolve();
};
const handleError = (error: Error) => {
cleanup();
reject(error);
};
const cleanup = () => {
stream.off("drain", handleDrain);
stream.off("error", handleError);
};
stream.once("drain", handleDrain);
stream.once("error", handleError);
});
const writeChunk = async (chunk: string | Buffer): Promise<void> => {
const file = await filePromise;
if (typeof chunk === "string") {
await file.write(chunk, null, "utf8");
} else {
await file.write(chunk);
}
if (streamError) throw streamError;
};
const flushBufferedLines = () => {
@@ -316,37 +430,43 @@ export function createBufferedTextFileWriter(filePath: string, maxBufferedBytes
if (closed) {
throw new Error(`Cannot write to closed backup file: ${filePath}`);
}
if (streamError) throw streamError;
bufferedLines.push(line);
bufferedBytes += Buffer.byteLength(line, "utf8") + 1;
if (bufferedBytes >= flushThreshold) {
flushBufferedLines();
}
},
async drain() {
if (closed) {
throw new Error(`Cannot drain closed backup file: ${filePath}`);
}
flushBufferedLines();
await pendingWrite;
},
async writeRaw(chunk: string | Buffer) {
if (closed) {
throw new Error(`Cannot write to closed backup file: ${filePath}`);
}
flushBufferedLines();
firstChunk = false;
pendingWrite = pendingWrite.then(() => writeChunk(chunk));
await pendingWrite;
},
async close() {
if (closed) return;
closed = true;
flushBufferedLines();
await pendingWrite;
await new Promise<void>((resolve, reject) => {
if (streamError) {
reject(streamError);
return;
}
stream.end((error?: Error | null) => {
if (error) reject(error);
else resolve();
});
});
if (streamError) throw streamError;
const file = await filePromise;
await file.close();
},
async abort() {
if (closed) return;
closed = true;
bufferedLines = [];
bufferedBytes = 0;
stream.destroy();
await pendingWrite.catch(() => {});
await filePromise.then((file) => file.close()).catch(() => {});
if (existsSync(filePath)) {
try {
unlinkSync(filePath);
@@ -362,16 +482,53 @@ export async function runDatabaseBackup(opts: RunDatabaseBackupOptions): Promise
const filenamePrefix = opts.filenamePrefix ?? "paperclip";
const retention = opts.retention;
const connectTimeout = Math.max(1, Math.trunc(opts.connectTimeoutSeconds ?? 5));
const backupEngine = opts.backupEngine ?? "auto";
const canUsePgDump = !hasBackupTransforms(opts);
const includeMigrationJournal = opts.includeMigrationJournal === true;
const excludedTableNames = normalizeTableNameSet(opts.excludeTables);
const nullifiedColumnsByTable = normalizeNullifyColumnMap(opts.nullifyColumns);
const sql = postgres(opts.connectionString, { max: 1, connect_timeout: connectTimeout });
let sql = postgres(opts.connectionString, { max: 1, connect_timeout: connectTimeout });
let sqlClosed = false;
const closeSql = async () => {
if (sqlClosed) return;
sqlClosed = true;
await sql.end();
};
mkdirSync(opts.backupDir, { recursive: true });
const sqlFile = resolve(opts.backupDir, `${filenamePrefix}-${timestamp()}.sql`);
const backupFile = `${sqlFile}.gz`;
const writer = createBufferedTextFileWriter(sqlFile);
try {
if (backupEngine === "pg_dump" || (backupEngine === "auto" && canUsePgDump)) {
await sql`SELECT 1`;
try {
await closeSql();
await runPgDumpBackup({
connectionString: opts.connectionString,
backupFile,
connectTimeout,
});
await writer.abort();
const sizeBytes = statSync(backupFile).size;
const prunedCount = pruneOldBackups(opts.backupDir, retention, filenamePrefix);
return {
backupFile,
sizeBytes,
prunedCount,
};
} catch (error) {
if (existsSync(backupFile)) {
try { unlinkSync(backupFile); } catch { /* ignore */ }
}
if (backupEngine === "pg_dump") {
throw error;
}
sql = postgres(opts.connectionString, { max: 1, connect_timeout: connectTimeout });
sqlClosed = false;
}
}
await sql`SELECT 1`;
const emit = (line: string) => writer.emit(line);
@@ -703,20 +860,39 @@ export async function runDatabaseBackup(opts: RunDatabaseBackupOptions): Promise
emit(`-- Data for: ${schema_name}.${tablename} (${count[0]!.n} rows)`);
const rows = await sql.unsafe(`SELECT * FROM ${qualifiedTableName}`).values();
const nullifiedColumns = nullifiedColumnsByTable.get(tablename) ?? new Set<string>();
for (const row of rows) {
const values = row.map((rawValue: unknown, index) => {
const columnName = cols[index]?.column_name;
const val = columnName && nullifiedColumns.has(columnName) ? null : rawValue;
if (val === null || val === undefined) return "NULL";
if (typeof val === "boolean") return val ? "true" : "false";
if (typeof val === "number") return String(val);
if (val instanceof Date) return formatSqlLiteral(val.toISOString());
if (typeof val === "object") return formatSqlLiteral(JSON.stringify(val));
return formatSqlLiteral(String(val));
});
emitStatement(`INSERT INTO ${qualifiedTableName} (${colNames}) VALUES (${values.join(", ")});`);
if (backupEngine !== "javascript" && nullifiedColumns.size === 0) {
emit(`COPY ${qualifiedTableName} (${colNames}) FROM stdin;`);
await writer.writeRaw("\n");
const copySql = postgres(opts.connectionString, { max: 1, connect_timeout: connectTimeout });
try {
const copyStream = await copySql
.unsafe(`COPY ${qualifiedTableName} (${colNames}) TO STDOUT`)
.readable();
for await (const chunk of copyStream) {
await writer.writeRaw(Buffer.isBuffer(chunk) ? chunk : Buffer.from(String(chunk)));
}
} finally {
await copySql.end();
}
await writer.writeRaw("\\.\n");
emitStatementBoundary();
emit("");
continue;
}
const rowCursor = sql
.unsafe(`SELECT * FROM ${qualifiedTableName}`)
.values()
.cursor(BACKUP_DATA_CURSOR_ROWS) as AsyncIterable<unknown[][]>;
for await (const rows of rowCursor) {
for (const row of rows) {
const values = row.map((rawValue, index) =>
formatSqlValue(rawValue, cols[index]?.column_name, nullifiedColumns),
);
emitStatement(`INSERT INTO ${qualifiedTableName} (${colNames}) VALUES (${values.join(", ")});`);
}
await writer.drain();
}
emit("");
}
@@ -768,12 +944,23 @@ export async function runDatabaseBackup(opts: RunDatabaseBackupOptions): Promise
}
throw error;
} finally {
await sql.end();
await closeSql();
}
}
export async function runDatabaseRestore(opts: RunDatabaseRestoreOptions): Promise<void> {
const connectTimeout = Math.max(1, Math.trunc(opts.connectTimeoutSeconds ?? 5));
try {
await restoreWithPsql(opts, connectTimeout);
return;
} catch (error) {
if (!(await hasStatementBreakpoints(opts.backupFile))) {
throw new Error(
`Failed to restore ${basename(opts.backupFile)} with psql: ${sanitizeRestoreErrorMessage(error)}`,
);
}
}
const sql = postgres(opts.connectionString, { max: 1, connect_timeout: connectTimeout });
try {

View File

@@ -14,6 +14,8 @@ const ignoredTestConfigBasenames = new Set([
"vitest.config.ts",
]);
const nodeDiagnosticReportPattern = /^report\.\d{8}\.\d{6}\.\d+\.\d+\.\d+\.json$/i;
export function shouldTrackDevServerPath(relativePath) {
const normalizedPath = String(relativePath).replaceAll("\\", "/").replace(/^\.\/+/, "");
if (normalizedPath.length === 0) return false;
@@ -21,6 +23,9 @@ export function shouldTrackDevServerPath(relativePath) {
const segments = normalizedPath.split("/");
const basename = segments.at(-1) ?? normalizedPath;
if (nodeDiagnosticReportPattern.test(basename)) {
return false;
}
if (segments.includes(".paperclip")) {
return false;
}

View File

@@ -1,5 +1,6 @@
#!/usr/bin/env node
import { spawn } from "node:child_process";
import { randomUUID } from "node:crypto";
import { existsSync, mkdirSync, readdirSync, rmSync, statSync, writeFileSync } from "node:fs";
import path from "node:path";
import { createInterface } from "node:readline/promises";
@@ -16,6 +17,8 @@ const gracefulShutdownTimeoutMs = 10_000;
const changedPathSampleLimit = 5;
const repoRoot = path.resolve(path.dirname(fileURLToPath(import.meta.url)), "..");
const devServerStatusFilePath = path.join(repoRoot, ".paperclip", "dev-server-status.json");
const devServerStatusToken = mode === "dev" ? randomUUID() : null;
const devServerStatusTokenHeader = "x-paperclip-dev-server-status-token";
const watchedDirectories = [
"cli",
@@ -81,9 +84,11 @@ const env = {
if (mode === "dev") {
env.PAPERCLIP_DEV_SERVER_STATUS_FILE = devServerStatusFilePath;
env.PAPERCLIP_DEV_SERVER_STATUS_TOKEN = devServerStatusToken ?? "";
}
if (mode === "watch") {
delete env.PAPERCLIP_DEV_SERVER_STATUS_TOKEN;
env.PAPERCLIP_MIGRATION_PROMPT ??= "never";
env.PAPERCLIP_MIGRATION_AUTO_APPLY ??= "true";
}
@@ -426,7 +431,9 @@ async function scanForBackendChanges() {
async function getDevHealthPayload() {
const serverPort = env.PORT ?? process.env.PORT ?? "3100";
const response = await fetch(`http://127.0.0.1:${serverPort}/api/health`);
const response = await fetch(`http://127.0.0.1:${serverPort}/api/health`, {
headers: devServerStatusToken ? { [devServerStatusTokenHeader]: devServerStatusToken } : undefined,
});
if (!response.ok) {
throw new Error(`Health request failed (${response.status})`);
}

View File

@@ -1,5 +1,6 @@
#!/usr/bin/env -S node --import tsx
import { spawn } from "node:child_process";
import { randomUUID } from "node:crypto";
import { existsSync, mkdirSync, readdirSync, rmSync, statSync, writeFileSync } from "node:fs";
import path from "node:path";
import { createInterface } from "node:readline/promises";
@@ -35,6 +36,8 @@ const autoRestartPollIntervalMs = 2500;
const gracefulShutdownTimeoutMs = 10_000;
const changedPathSampleLimit = 5;
const devServerStatusFilePath = path.join(repoRoot, ".paperclip", "dev-server-status.json");
const devServerStatusToken = mode === "dev" ? randomUUID() : null;
const devServerStatusTokenHeader = "x-paperclip-dev-server-status-token";
const watchedDirectories = [
"cli",
@@ -133,10 +136,12 @@ const env: NodeJS.ProcessEnv = {
if (mode === "dev") {
env.PAPERCLIP_DEV_SERVER_STATUS_FILE = devServerStatusFilePath;
env.PAPERCLIP_DEV_SERVER_STATUS_TOKEN = devServerStatusToken ?? "";
env.PAPERCLIP_MIGRATION_AUTO_APPLY ??= "true";
}
if (mode === "watch") {
delete env.PAPERCLIP_DEV_SERVER_STATUS_TOKEN;
env.PAPERCLIP_MIGRATION_PROMPT ??= "never";
env.PAPERCLIP_MIGRATION_AUTO_APPLY ??= "true";
}
@@ -553,7 +558,9 @@ async function scanForBackendChanges() {
}
async function getDevHealthPayload() {
const response = await fetch(`http://127.0.0.1:${serverPort}/api/health`);
const response = await fetch(`http://127.0.0.1:${serverPort}/api/health`, {
headers: devServerStatusToken ? { [devServerStatusTokenHeader]: devServerStatusToken } : undefined,
});
if (!response.ok) {
throw new Error(`Health request failed (${response.status})`);
}

79
scripts/discord-daily-digest.sh Executable file
View File

@@ -0,0 +1,79 @@
#!/usr/bin/env bash
set -euo pipefail
REPO_ROOT="$(cd "$(dirname "$0")/.." && pwd)"
cd "$REPO_ROOT"
WEBHOOK_URL="${DISCORD_WEBHOOK_URL:-}"
DATE="${1:-$(date +%Y-%m-%d)}"
REPO_URL="https://github.com/paperclipai/paperclip"
if [[ -z "$WEBHOOK_URL" ]]; then
echo "Error: DISCORD_WEBHOOK_URL env var is required" >&2
echo "Usage: DISCORD_WEBHOOK_URL=https://discord.com/api/webhooks/... $0 [date]" >&2
echo " date defaults to today (YYYY-MM-DD format)" >&2
exit 1
fi
NEXT_DATE=$(date -j -v+1d -f "%Y-%m-%d" "$DATE" "+%Y-%m-%d" 2>/dev/null \
|| date -d "$DATE + 1 day" "+%Y-%m-%d" 2>/dev/null)
COMMITS=$(git log --since="${DATE}T00:00:00" --until="${NEXT_DATE}T00:00:00" master \
--format="%h|%s|%an" 2>/dev/null || true)
json_escape() {
python3 -c 'import json, sys; print(json.dumps(sys.stdin.read().rstrip("\n"))[1:-1])'
}
if [[ -z "$COMMITS" ]]; then
PAYLOAD=$(cat <<ENDJSON
{
"embeds": [{
"title": "📋 Daily Merge Digest — ${DATE}",
"description": "No commits were merged into \`master\` today.",
"color": 9807270
}]
}
ENDJSON
)
else
COMMIT_COUNT=$(echo "$COMMITS" | wc -l | tr -d ' ')
LINES=""
while IFS='|' read -r hash subject author; do
escaped_subject=$(printf '%s' "$subject" | json_escape)
escaped_author=$(printf '%s' "$author" | json_escape)
LINES="${LINES}• [\`${hash}\`](${REPO_URL}/commit/${hash}) ${escaped_subject} — *${escaped_author}*\\n"
done <<< "$COMMITS"
PAYLOAD=$(cat <<ENDJSON
{
"embeds": [{
"title": "📋 Daily Merge Digest — ${DATE}",
"description": "**${COMMIT_COUNT} commit(s)** merged into \`master\` today:\\n\\n${LINES}",
"color": 3066993,
"footer": {
"text": "paperclipai/paperclip • master"
}
}]
}
ENDJSON
)
fi
if [[ "${DRY_RUN:-}" == "1" ]]; then
echo "$PAYLOAD" | python3 -m json.tool 2>/dev/null || echo "$PAYLOAD"
exit 0
fi
RESPONSE=$(curl -s -o /dev/null -w "%{http_code}" \
-H "Content-Type: application/json" \
-d "$PAYLOAD" \
"$WEBHOOK_URL")
if [[ "$RESPONSE" == "204" || "$RESPONSE" == "200" ]]; then
echo "Discord digest posted for ${DATE} (${COMMIT_COUNT:-0} commits)"
else
echo "Error: Discord webhook returned HTTP ${RESPONSE}" >&2
exit 1
fi

View File

@@ -332,11 +332,15 @@ main().catch((error) => {
EOF
}
if paperclipai_command_available; then
run_isolated_worktree_init
if [[ -e "$worktree_config_path" && -e "$worktree_env_path" ]]; then
echo "Reusing existing isolated Paperclip worktree config at $worktree_config_path" >&2
else
echo "paperclipai CLI not available in this workspace; writing isolated fallback config without DB seeding." >&2
write_fallback_worktree_config
if paperclipai_command_available; then
run_isolated_worktree_init
else
echo "paperclipai CLI not available in this workspace; writing isolated fallback config without DB seeding." >&2
write_fallback_worktree_config
fi
fi
list_base_node_modules_paths() {

View File

@@ -83,11 +83,12 @@ describe("adapter routes", () => {
const res = await request(app).get("/api/adapters");
expect(res.status).toBe(200);
expect(Array.isArray(res.body)).toBe(true);
expect(res.body.length).toBeGreaterThan(0);
const adapters = Array.isArray(res.body) ? res.body : JSON.parse(res.text);
expect(Array.isArray(adapters)).toBe(true);
expect(adapters.length).toBeGreaterThan(0);
// Every adapter should have a capabilities object
for (const adapter of res.body) {
for (const adapter of adapters) {
expect(adapter.capabilities).toBeDefined();
expect(typeof adapter.capabilities.supportsInstructionsBundle).toBe("boolean");
expect(typeof adapter.capabilities.supportsSkills).toBe("boolean");

View File

@@ -0,0 +1,43 @@
import { afterEach, describe, expect, it } from "vitest";
import type { BetterAuthOptions } from "better-auth";
import { getCookies } from "better-auth/cookies";
import {
buildBetterAuthAdvancedOptions,
deriveAuthCookiePrefix,
} from "../auth/better-auth.js";
const ORIGINAL_INSTANCE_ID = process.env.PAPERCLIP_INSTANCE_ID;
afterEach(() => {
if (ORIGINAL_INSTANCE_ID === undefined) delete process.env.PAPERCLIP_INSTANCE_ID;
else process.env.PAPERCLIP_INSTANCE_ID = ORIGINAL_INSTANCE_ID;
});
describe("Better Auth cookie scoping", () => {
it("derives an instance-scoped cookie prefix", () => {
expect(deriveAuthCookiePrefix("default")).toBe("paperclip-default");
expect(deriveAuthCookiePrefix("PAP-1601-worktree")).toBe("paperclip-PAP-1601-worktree");
});
it("uses PAPERCLIP_INSTANCE_ID for the Better Auth cookie prefix", () => {
process.env.PAPERCLIP_INSTANCE_ID = "sat-worktree";
const advanced = buildBetterAuthAdvancedOptions({ disableSecureCookies: false });
expect(advanced).toEqual({
cookiePrefix: "paperclip-sat-worktree",
});
expect(getCookies({ advanced } as BetterAuthOptions).sessionToken.name).toBe(
"paperclip-sat-worktree.session_token",
);
});
it("keeps local http auth cookies non-secure while preserving the scoped prefix", () => {
process.env.PAPERCLIP_INSTANCE_ID = "pap-worktree";
expect(buildBetterAuthAdvancedOptions({ disableSecureCookies: true })).toEqual({
cookiePrefix: "paperclip-pap-worktree",
useSecureCookies: false,
});
});
});

View File

@@ -2,12 +2,15 @@ import { describe, expect, it } from "vitest";
import { shouldTrackDevServerPath } from "../../../scripts/dev-runner-paths.mjs";
describe("shouldTrackDevServerPath", () => {
it("ignores repo-local Paperclip state and common test file paths", () => {
it("ignores generated state, diagnostic reports, and common test file paths", () => {
expect(
shouldTrackDevServerPath(
".paperclip/worktrees/PAP-712-for-project-configuration-get-rid-of-the-overview-tab-for-now/.agents/skills/paperclip",
),
).toBe(false);
expect(shouldTrackDevServerPath("server/report.20260416.154629.4965.0.001.json")).toBe(false);
expect(shouldTrackDevServerPath("server/report.20260416.154636.4725.0.001.json")).toBe(false);
expect(shouldTrackDevServerPath("server/report.20260416.154636.4965.0.002.json")).toBe(false);
expect(shouldTrackDevServerPath("server/src/__tests__/health.test.ts")).toBe(false);
expect(shouldTrackDevServerPath("packages/shared/src/lib/foo.test.ts")).toBe(false);
expect(shouldTrackDevServerPath("packages/shared/src/lib/foo.spec.tsx")).toBe(false);

View File

@@ -0,0 +1,128 @@
import { mkdtempSync, rmSync, writeFileSync } from "node:fs";
import os from "node:os";
import path from "node:path";
import express from "express";
import request from "supertest";
import { afterEach, describe, expect, it, vi } from "vitest";
import type { Db } from "@paperclipai/db";
import { healthRoutes } from "../routes/health.js";
const tempDirs: string[] = [];
function createDevServerStatusFile(payload: unknown) {
const dir = mkdtempSync(path.join(os.tmpdir(), "paperclip-health-dev-server-"));
tempDirs.push(dir);
const filePath = path.join(dir, "dev-server-status.json");
writeFileSync(filePath, `${JSON.stringify(payload)}\n`, "utf8");
return filePath;
}
afterEach(() => {
for (const dir of tempDirs.splice(0)) {
rmSync(dir, { recursive: true, force: true });
}
});
describe("GET /health dev-server supervisor access", () => {
it("exposes dev-server metadata to the supervising dev runner in authenticated mode", async () => {
const previousFile = process.env.PAPERCLIP_DEV_SERVER_STATUS_FILE;
const previousToken = process.env.PAPERCLIP_DEV_SERVER_STATUS_TOKEN;
process.env.PAPERCLIP_DEV_SERVER_STATUS_FILE = createDevServerStatusFile({
dirty: true,
lastChangedAt: "2026-03-20T12:00:00.000Z",
changedPathCount: 1,
changedPathsSample: ["server/src/routes/health.ts"],
pendingMigrations: [],
lastRestartAt: "2026-03-20T11:30:00.000Z",
});
process.env.PAPERCLIP_DEV_SERVER_STATUS_TOKEN = "dev-runner-token";
let selectCall = 0;
const db = {
execute: vi.fn().mockResolvedValue([{ "?column?": 1 }]),
select: vi.fn(() => {
selectCall += 1;
if (selectCall === 1) {
return {
from: vi.fn(() => ({
where: vi.fn().mockResolvedValue([{ count: 1 }]),
})),
};
}
if (selectCall === 2) {
return {
from: vi.fn(() => ({
where: vi.fn().mockResolvedValue([
{
id: "settings-1",
general: {},
experimental: { autoRestartDevServerWhenIdle: true },
createdAt: new Date("2026-03-20T11:00:00.000Z"),
updatedAt: new Date("2026-03-20T11:00:00.000Z"),
},
]),
})),
};
}
return {
from: vi.fn(() => ({
where: vi.fn().mockResolvedValue([{ count: 0 }]),
})),
};
}),
} as unknown as Db;
try {
const app = express();
app.use((req, _res, next) => {
(req as any).actor = { type: "none", source: "none" };
next();
});
app.use(
"/health",
healthRoutes(db, {
deploymentMode: "authenticated",
deploymentExposure: "private",
authReady: true,
companyDeletionEnabled: true,
}),
);
const res = await request(app)
.get("/health")
.set("X-Paperclip-Dev-Server-Status-Token", "dev-runner-token");
expect(res.status).toBe(200);
expect(res.body).toEqual({
status: "ok",
deploymentMode: "authenticated",
bootstrapStatus: "ready",
bootstrapInviteActive: false,
devServer: {
enabled: true,
restartRequired: true,
reason: "backend_changes",
lastChangedAt: "2026-03-20T12:00:00.000Z",
changedPathCount: 1,
changedPathsSample: ["server/src/routes/health.ts"],
pendingMigrations: [],
autoRestartEnabled: true,
activeRunCount: 0,
waitingForIdle: false,
lastRestartAt: "2026-03-20T11:30:00.000Z",
},
});
} finally {
if (previousFile === undefined) {
delete process.env.PAPERCLIP_DEV_SERVER_STATUS_FILE;
} else {
process.env.PAPERCLIP_DEV_SERVER_STATUS_FILE = previousFile;
}
if (previousToken === undefined) {
delete process.env.PAPERCLIP_DEV_SERVER_STATUS_TOKEN;
} else {
process.env.PAPERCLIP_DEV_SERVER_STATUS_TOKEN = previousToken;
}
}
});
});

View File

@@ -53,6 +53,8 @@ describe("shouldSilenceHttpSuccessLog", () => {
});
it("silences successful static asset requests", () => {
expect(shouldSilenceHttpSuccessLog("GET", "/", 200)).toBe(true);
expect(shouldSilenceHttpSuccessLog("GET", "/index.html", 200)).toBe(true);
expect(shouldSilenceHttpSuccessLog("GET", "/@fs/Users/dotta/paperclip/ui/src/main.tsx", 200)).toBe(true);
expect(shouldSilenceHttpSuccessLog("GET", "/src/App.tsx?t=123", 200)).toBe(true);
expect(shouldSilenceHttpSuccessLog("GET", "/site.webmanifest", 200)).toBe(true);

View File

@@ -0,0 +1,149 @@
import express from "express";
import request from "supertest";
import { describe, expect, it, vi } from "vitest";
import { errorHandler } from "../middleware/index.js";
import {
instanceDatabaseBackupRoutes,
type InstanceDatabaseBackupService,
} from "../routes/instance-database-backups.js";
import { conflict } from "../errors.js";
function createApp(actor: Record<string, unknown>, service: InstanceDatabaseBackupService) {
const app = express();
app.use(express.json());
app.use((req, _res, next) => {
req.actor = actor as typeof req.actor;
next();
});
app.use("/api", instanceDatabaseBackupRoutes(service));
app.use(errorHandler);
return app;
}
function createBackupService(overrides: Partial<InstanceDatabaseBackupService> = {}): InstanceDatabaseBackupService {
return {
runManualBackup: vi.fn().mockResolvedValue({
trigger: "manual",
backupFile: "/tmp/paperclip-20260416.sql.gz",
sizeBytes: 1234,
prunedCount: 2,
backupDir: "/tmp",
retention: {
dailyDays: 7,
weeklyWeeks: 4,
monthlyMonths: 1,
},
startedAt: "2026-04-16T20:00:00.000Z",
finishedAt: "2026-04-16T20:00:01.000Z",
durationMs: 1000,
}),
...overrides,
};
}
describe("instance database backup routes", () => {
it("runs a manual backup for an instance admin and returns the server result", async () => {
const service = createBackupService();
const app = createApp(
{
type: "board",
userId: "admin-1",
source: "session",
isInstanceAdmin: true,
},
service,
);
const res = await request(app).post("/api/instance/database-backups").send({});
expect(res.status).toBe(201);
expect(service.runManualBackup).toHaveBeenCalledTimes(1);
expect(res.body).toEqual({
trigger: "manual",
backupFile: "/tmp/paperclip-20260416.sql.gz",
sizeBytes: 1234,
prunedCount: 2,
backupDir: "/tmp",
retention: {
dailyDays: 7,
weeklyWeeks: 4,
monthlyMonths: 1,
},
startedAt: "2026-04-16T20:00:00.000Z",
finishedAt: "2026-04-16T20:00:01.000Z",
durationMs: 1000,
});
});
it("allows local implicit board access", async () => {
const service = createBackupService();
const app = createApp(
{
type: "board",
userId: "local-board",
source: "local_implicit",
isInstanceAdmin: false,
},
service,
);
await request(app).post("/api/instance/database-backups").send({}).expect(201);
expect(service.runManualBackup).toHaveBeenCalledTimes(1);
});
it("rejects non-admin board users", async () => {
const service = createBackupService();
const app = createApp(
{
type: "board",
userId: "user-1",
source: "session",
isInstanceAdmin: false,
companyIds: ["company-1"],
},
service,
);
await request(app).post("/api/instance/database-backups").send({}).expect(403);
expect(service.runManualBackup).not.toHaveBeenCalled();
});
it("rejects agent callers", async () => {
const service = createBackupService();
const app = createApp(
{
type: "agent",
agentId: "agent-1",
companyId: "company-1",
source: "agent_key",
},
service,
);
await request(app).post("/api/instance/database-backups").send({}).expect(403);
expect(service.runManualBackup).not.toHaveBeenCalled();
});
it("returns conflict when another server backup is already running", async () => {
const service = createBackupService({
runManualBackup: vi.fn().mockRejectedValue(conflict("Database backup already in progress")),
});
const app = createApp(
{
type: "board",
userId: "admin-1",
source: "session",
isInstanceAdmin: true,
},
service,
);
const res = await request(app).post("/api/instance/database-backups").send({});
expect(res.status).toBe(409);
expect(res.body).toEqual({ error: "Database backup already in progress" });
});
});

View File

@@ -128,6 +128,15 @@ vi.mock("../services/index.js", () => ({
})),
tickTimers: vi.fn(async () => ({ enqueued: 0 })),
})),
instanceSettingsService: vi.fn(() => ({
getGeneral: vi.fn(async () => ({
backupRetention: {
dailyDays: 7,
weeklyWeeks: 4,
monthlyMonths: 1,
},
})),
})),
reconcilePersistedRuntimeServicesOnStartup: vi.fn(async () => ({ reconciled: 0 })),
routineService: vi.fn(() => ({
tickScheduledTriggers: vi.fn(async () => ({ triggered: 0 })),

View File

@@ -32,7 +32,7 @@ describe("createCachedViteHtmlRenderer", () => {
}
});
it("reuses the injected dev html shell until a watched file changes", async () => {
it("reuses the injected dev html shell until index.html changes", async () => {
const tempDir = fs.mkdtempSync(path.join(os.tmpdir(), "paperclip-vite-html-"));
tempDirs.push(tempDir);
const indexPath = path.join(tempDir, "index.html");
@@ -57,6 +57,12 @@ describe("createCachedViteHtmlRenderer", () => {
expect(first.match(/\/@vite\/client/g)?.length).toBe(1);
expect(first).toContain("window.$RefreshReg$");
const sourcePath = path.join(tempDir, "src", "main.tsx");
fs.mkdirSync(path.dirname(sourcePath), { recursive: true });
fs.writeFileSync(sourcePath, "export {};\n", "utf8");
watcher.emit("change", sourcePath);
expect(await renderer.render("/")).toBe(first);
fs.writeFileSync(
indexPath,
'<html><body>v2<script type="module" src="/src/main.tsx"></script></body></html>',

View File

@@ -897,7 +897,7 @@ describe("realizeExecutionWorkspace", () => {
await runGit(repoRoot, ["commit", "-m", "Add worktree provision script"]);
try {
const workspace = await realizeExecutionWorkspace({
const workspaceInput = {
base: {
baseCwd: repoRoot,
source: "project_primary",
@@ -923,7 +923,8 @@ describe("realizeExecutionWorkspace", () => {
name: "Codex Coder",
companyId: "company-1",
},
});
} satisfies Parameters<typeof realizeExecutionWorkspace>[0];
const workspace = await realizeExecutionWorkspace(workspaceInput);
const configPath = path.join(workspace.cwd, ".paperclip", "config.json");
const envPath = path.join(workspace.cwd, ".paperclip", ".env");
@@ -954,6 +955,34 @@ describe("realizeExecutionWorkspace", () => {
process.chdir(workspace.cwd);
expect(resolvePaperclipConfigPath()).toBe(configPath);
const preservedPort = 39999;
await fs.writeFile(
configPath,
JSON.stringify(
{
...configContents,
server: {
...configContents.server,
port: preservedPort,
},
},
null,
2,
) + "\n",
"utf8",
);
await fs.writeFile(envPath, `${envContents}PAPERCLIP_WORKTREE_COLOR="#112233"\n`, "utf8");
const reusedWorkspace = await realizeExecutionWorkspace(workspaceInput);
const reusedConfigContents = JSON.parse(await fs.readFile(configPath, "utf8"));
const reusedEnvContents = await fs.readFile(envPath, "utf8");
expect(reusedWorkspace.cwd).toBe(workspace.cwd);
expect(reusedWorkspace.created).toBe(false);
expect(reusedConfigContents.server.port).toBe(preservedPort);
expect(reusedConfigContents.database.embeddedPostgresDataDir).toBe(path.join(expectedInstanceRoot, "db"));
expect(reusedEnvContents).toContain('PAPERCLIP_WORKTREE_COLOR="#112233"');
} finally {
process.chdir(previousCwd);
}

View File

@@ -27,6 +27,10 @@ import { sidebarBadgeRoutes } from "./routes/sidebar-badges.js";
import { sidebarPreferenceRoutes } from "./routes/sidebar-preferences.js";
import { inboxDismissalRoutes } from "./routes/inbox-dismissals.js";
import { instanceSettingsRoutes } from "./routes/instance-settings.js";
import {
instanceDatabaseBackupRoutes,
type InstanceDatabaseBackupService,
} from "./routes/instance-database-backups.js";
import { llmRoutes } from "./routes/llms.js";
import { authRoutes } from "./routes/auth.js";
import { assetRoutes } from "./routes/assets.js";
@@ -112,6 +116,7 @@ export async function createApp(
now?: Date;
}): Promise<unknown>;
};
databaseBackupService?: InstanceDatabaseBackupService;
deploymentMode: DeploymentMode;
deploymentExposure: DeploymentExposure;
allowedHostnames: string[];
@@ -194,6 +199,9 @@ export async function createApp(
api.use(sidebarPreferenceRoutes(db));
api.use(inboxDismissalRoutes(db));
api.use(instanceSettingsRoutes(db));
if (opts.databaseBackupService) {
api.use(instanceDatabaseBackupRoutes(opts.databaseBackupService));
}
const hostServicesDisposers = new Map<string, () => void>();
const workerManager = createPluginWorkerManager();
const pluginRegistry = pluginRegistryService(db);

View File

@@ -11,6 +11,7 @@ import {
authVerifications,
} from "@paperclipai/db";
import type { Config } from "../config.js";
import { resolvePaperclipInstanceId } from "../home-paths.js";
export type BetterAuthSessionUser = {
id: string;
@@ -25,6 +26,24 @@ export type BetterAuthSessionResult = {
type BetterAuthInstance = ReturnType<typeof betterAuth>;
const AUTH_COOKIE_PREFIX_FALLBACK = "default";
const AUTH_COOKIE_PREFIX_INVALID_SEGMENTS_RE = /[^a-zA-Z0-9_-]+/g;
export function deriveAuthCookiePrefix(instanceId = resolvePaperclipInstanceId()): string {
const scopedInstanceId = instanceId
.trim()
.replace(AUTH_COOKIE_PREFIX_INVALID_SEGMENTS_RE, "-")
.replace(/^-+|-+$/g, "") || AUTH_COOKIE_PREFIX_FALLBACK;
return `paperclip-${scopedInstanceId}`;
}
export function buildBetterAuthAdvancedOptions(input: { disableSecureCookies: boolean }) {
return {
cookiePrefix: deriveAuthCookiePrefix(),
...(input.disableSecureCookies ? { useSecureCookies: false } : {}),
};
}
function headersFromNodeHeaders(rawHeaders: IncomingHttpHeaders): Headers {
const headers = new Headers();
for (const [key, raw] of Object.entries(rawHeaders)) {
@@ -97,7 +116,7 @@ export function createBetterAuthInstance(db: Db, config: Config, trustedOrigins?
requireEmailVerification: false,
disableSignUp: config.authDisableSignUp,
},
...(isHttpOnly ? { advanced: { useSecureCookies: false } } : {}),
advanced: buildBetterAuthAdvancedOptions({ disableSecureCookies: isHttpOnly }),
};
if (!baseUrl) {

View File

@@ -41,6 +41,11 @@ import { printStartupBanner } from "./startup-banner.js";
import { getBoardClaimWarningUrl, initializeBoardClaimChallenge } from "./board-claim.js";
import { maybePersistWorktreeRuntimePorts } from "./worktree-config.js";
import { initTelemetry, getTelemetryClient } from "./telemetry.js";
import { conflict } from "./errors.js";
import type {
InstanceDatabaseBackupRunResult,
InstanceDatabaseBackupTrigger,
} from "./routes/instance-database-backups.js";
type BetterAuthSessionUser = {
id: string;
@@ -521,11 +526,80 @@ export async function startServer(): Promise<StartedServer> {
const feedback = feedbackService(db as any, {
shareClient: createFeedbackTraceShareClientFromConfig(config),
});
const backupSettingsSvc = instanceSettingsService(db);
let databaseBackupInFlight = false;
const runServerDatabaseBackup = async (
trigger: InstanceDatabaseBackupTrigger,
): Promise<InstanceDatabaseBackupRunResult | null> => {
if (databaseBackupInFlight) {
const message = "Database backup already in progress";
if (trigger === "scheduled") {
logger.warn("Skipping scheduled database backup because a previous backup is still running");
return null;
}
throw conflict(message);
}
databaseBackupInFlight = true;
const startedAt = new Date();
const startedAtMs = Date.now();
const label = trigger === "scheduled" ? "Automatic" : "Manual";
try {
logger.info({ backupDir: config.databaseBackupDir, trigger }, `${label} database backup starting`);
// Read retention from Instance Settings (DB) so changes take effect without restart.
const generalSettings = await backupSettingsSvc.getGeneral();
const retention = generalSettings.backupRetention;
const result = await runDatabaseBackup({
connectionString: activeDatabaseConnectionString,
backupDir: config.databaseBackupDir,
retention,
filenamePrefix: "paperclip",
});
const finishedAt = new Date();
const response: InstanceDatabaseBackupRunResult = {
...result,
trigger,
backupDir: config.databaseBackupDir,
retention,
startedAt: startedAt.toISOString(),
finishedAt: finishedAt.toISOString(),
durationMs: Date.now() - startedAtMs,
};
logger.info(
{
backupFile: result.backupFile,
sizeBytes: result.sizeBytes,
prunedCount: result.prunedCount,
backupDir: config.databaseBackupDir,
retention,
trigger,
durationMs: response.durationMs,
},
`${label} database backup complete: ${formatDatabaseBackupResult(result)}`,
);
return response;
} catch (err) {
logger.error({ err, backupDir: config.databaseBackupDir, trigger }, `${label} database backup failed`);
throw err;
} finally {
databaseBackupInFlight = false;
}
};
const app = await createApp(db as any, {
uiMode,
serverPort: listenPort,
storageService,
feedbackExportService: feedback,
databaseBackupService: {
runManualBackup: async () => {
const result = await runServerDatabaseBackup("manual");
if (!result) {
throw conflict("Database backup already in progress");
}
return result;
},
},
deploymentMode: config.deploymentMode,
deploymentExposure: config.deploymentExposure,
allowedHostnames: config.allowedHostnames,
@@ -644,43 +718,6 @@ export async function startServer(): Promise<StartedServer> {
if (config.databaseBackupEnabled) {
const backupIntervalMs = config.databaseBackupIntervalMinutes * 60 * 1000;
const settingsSvc = instanceSettingsService(db);
let backupInFlight = false;
const runScheduledBackup = async () => {
if (backupInFlight) {
logger.warn("Skipping scheduled database backup because a previous backup is still running");
return;
}
backupInFlight = true;
try {
// Read retention from Instance Settings (DB) so changes take effect without restart
const generalSettings = await settingsSvc.getGeneral();
const retention = generalSettings.backupRetention;
const result = await runDatabaseBackup({
connectionString: activeDatabaseConnectionString,
backupDir: config.databaseBackupDir,
retention,
filenamePrefix: "paperclip",
});
logger.info(
{
backupFile: result.backupFile,
sizeBytes: result.sizeBytes,
prunedCount: result.prunedCount,
backupDir: config.databaseBackupDir,
retention,
},
`Automatic database backup complete: ${formatDatabaseBackupResult(result)}`,
);
} catch (err) {
logger.error({ err, backupDir: config.databaseBackupDir }, "Automatic database backup failed");
} finally {
backupInFlight = false;
}
};
logger.info(
{
@@ -691,7 +728,9 @@ export async function startServer(): Promise<StartedServer> {
"Automatic database backups enabled",
);
setInterval(() => {
void runScheduledBackup();
void runServerDatabaseBackup("scheduled").catch(() => {
// runServerDatabaseBackup already logs the failure with context.
});
}, backupIntervalMs);
}

View File

@@ -23,6 +23,8 @@ const SILENCED_SUCCESS_STATIC_PREFIXES = [
];
const SILENCED_SUCCESS_STATIC_PATHS = new Set([
"/",
"/index.html",
"/favicon.ico",
"/site.webmanifest",
"/sw.js",

View File

@@ -1,3 +1,4 @@
import { timingSafeEqual } from "node:crypto";
import { Router } from "express";
import type { Db } from "@paperclipai/db";
import { and, count, eq, gt, inArray, isNull, sql } from "drizzle-orm";
@@ -16,6 +17,17 @@ function shouldExposeFullHealthDetails(
return actorType === "board" || actorType === "agent";
}
function hasDevServerStatusToken(providedToken: string | undefined) {
const expectedToken = process.env.PAPERCLIP_DEV_SERVER_STATUS_TOKEN?.trim();
const token = providedToken?.trim();
if (!expectedToken || !token) return false;
const expected = Buffer.from(expectedToken);
const provided = Buffer.from(token);
if (expected.length !== provided.length) return false;
return timingSafeEqual(expected, provided);
}
export function healthRoutes(
db?: Db,
opts: {
@@ -38,6 +50,8 @@ export function healthRoutes(
actorType,
opts.deploymentMode,
);
const exposeDevServerDetails =
exposeFullDetails || hasDevServerStatusToken(req.get("x-paperclip-dev-server-status-token"));
if (!db) {
res.json(
@@ -90,7 +104,7 @@ export function healthRoutes(
const persistedDevServerStatus = readPersistedDevServerStatus();
let devServer: ReturnType<typeof toDevServerHealthStatus> | undefined;
if (persistedDevServerStatus && typeof (db as { select?: unknown }).select === "function") {
if (exposeDevServerDetails && persistedDevServerStatus && typeof (db as { select?: unknown }).select === "function") {
const instanceSettings = instanceSettingsService(db);
const experimentalSettings = await instanceSettings.getExperimental();
const activeRunCount = await db
@@ -111,6 +125,7 @@ export function healthRoutes(
deploymentMode: opts.deploymentMode,
bootstrapStatus,
bootstrapInviteActive,
...(devServer ? { devServer } : {}),
});
return;
}

View File

@@ -17,3 +17,4 @@ export { inboxDismissalRoutes } from "./inbox-dismissals.js";
export { llmRoutes } from "./llms.js";
export { accessRoutes } from "./access.js";
export { instanceSettingsRoutes } from "./instance-settings.js";
export { instanceDatabaseBackupRoutes } from "./instance-database-backups.js";

View File

@@ -0,0 +1,30 @@
import { Router } from "express";
import type { BackupRetentionPolicy, RunDatabaseBackupResult } from "@paperclipai/db";
import { assertInstanceAdmin } from "./authz.js";
export type InstanceDatabaseBackupTrigger = "manual" | "scheduled";
export type InstanceDatabaseBackupRunResult = RunDatabaseBackupResult & {
trigger: InstanceDatabaseBackupTrigger;
backupDir: string;
retention: BackupRetentionPolicy;
startedAt: string;
finishedAt: string;
durationMs: number;
};
export type InstanceDatabaseBackupService = {
runManualBackup(): Promise<InstanceDatabaseBackupRunResult>;
};
export function instanceDatabaseBackupRoutes(service: InstanceDatabaseBackupService) {
const router = Router();
router.post("/instance/database-backups", async (req, res) => {
assertInstanceAdmin(req);
const result = await service.runManualBackup();
res.status(201).json(result);
});
return router;
}

View File

@@ -63,7 +63,7 @@ export function createCachedViteHtmlRenderer(opts: {
function onWatchEvent(filePath: string): void {
const resolvedPath = path.resolve(filePath);
if (resolvedPath === templatePath || resolvedPath.startsWith(`${uiRoot}${path.sep}`)) {
if (resolvedPath === templatePath) {
invalidate();
}
}