Compare commits

...

5 Commits

Author SHA1 Message Date
Dotta
9f67cd8c7c Clamp issue attachments to process cap
Co-Authored-By: Paperclip <noreply@paperclip.ing>
2026-04-28 16:16:52 -05:00
Dotta
708edc0887 Honor process attachment cap fallback
Co-Authored-By: Paperclip <noreply@paperclip.ing>
2026-04-28 15:58:28 -05:00
Dotta
74a401bb6d Fix company fixture attachment limits
Co-Authored-By: Paperclip <noreply@paperclip.ing>
2026-04-28 15:39:23 -05:00
Dotta
a3ceec0897 Address Greptile backend slice feedback
Co-Authored-By: Paperclip <noreply@paperclip.ing>
2026-04-28 15:35:11 -05:00
Dotta
3f6912dc7d Split backend control-plane QoL slice
Co-Authored-By: Paperclip <noreply@paperclip.ing>
2026-04-28 15:15:47 -05:00
66 changed files with 34186 additions and 148 deletions

View File

@@ -14,6 +14,7 @@ function makeCompany(overrides: Partial<Company>): Company {
issueCounter: 1,
budgetMonthlyCents: 0,
spentMonthlyCents: 0,
attachmentMaxBytes: 10 * 1024 * 1024,
requireBoardApprovalForNewAgents: false,
feedbackDataSharingEnabled: false,
feedbackDataSharingConsentAt: null,

View File

@@ -376,8 +376,11 @@ describeEmbeddedPostgres("paperclipai company import/export e2e", () => {
name: "Export Engineer",
role: "engineer",
adapterType: "claude_local",
adapterConfig: {
promptTemplate: "You verify company portability.",
adapterConfig: {},
instructionsBundle: {
files: {
"AGENTS.md": "You verify company portability.",
},
},
}),
},

View File

@@ -160,6 +160,7 @@ describe("renderCompanyImportPreview", () => {
path: "COMPANY.md",
name: "Source Co",
description: null,
attachmentMaxBytes: null,
brandColor: null,
logoPath: null,
requireBoardApprovalForNewAgents: false,
@@ -375,6 +376,7 @@ describe("import selection catalog", () => {
path: "COMPANY.md",
name: "Source Co",
description: null,
attachmentMaxBytes: null,
brandColor: null,
logoPath: "images/company-logo.png",
requireBoardApprovalForNewAgents: false,

View File

@@ -0,0 +1 @@
ALTER TABLE "companies" ADD COLUMN "attachment_max_bytes" integer DEFAULT 10485760 NOT NULL;

View File

@@ -0,0 +1,4 @@
CREATE UNIQUE INDEX "issues_active_productivity_review_uq" ON "issues" USING btree ("company_id","origin_kind","origin_id") WHERE "issues"."origin_kind" = 'issue_productivity_review'
and "issues"."origin_id" is not null
and "issues"."hidden_at" is null
and "issues"."status" not in ('done', 'cancelled');

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@@ -512,6 +512,20 @@
"when": 1777305216238,
"tag": "0072_large_sandman",
"breakpoints": true
},
{
"idx": 73,
"version": "7",
"when": 1777382021347,
"tag": "0073_shiny_salo",
"breakpoints": true
},
{
"idx": 74,
"version": "7",
"when": 1777384535070,
"tag": "0074_striped_genesis",
"breakpoints": true
}
]
}
}

View File

@@ -13,6 +13,9 @@ export const companies = pgTable(
issueCounter: integer("issue_counter").notNull().default(0),
budgetMonthlyCents: integer("budget_monthly_cents").notNull().default(0),
spentMonthlyCents: integer("spent_monthly_cents").notNull().default(0),
attachmentMaxBytes: integer("attachment_max_bytes")
.notNull()
.default(10 * 1024 * 1024),
requireBoardApprovalForNewAgents: boolean("require_board_approval_for_new_agents")
.notNull()
.default(false),

View File

@@ -115,6 +115,14 @@ export const issues = pgTable(
and ${table.hiddenAt} is null
and ${table.status} not in ('done', 'cancelled')`,
),
activeProductivityReviewIdx: uniqueIndex("issues_active_productivity_review_uq")
.on(table.companyId, table.originKind, table.originId)
.where(
sql`${table.originKind} = 'issue_productivity_review'
and ${table.originId} is not null
and ${table.hiddenAt} is null
and ${table.status} not in ('done', 'cancelled')`,
),
activeStrandedIssueRecoveryIdx: uniqueIndex("issues_active_stranded_issue_recovery_uq")
.on(table.companyId, table.originKind, table.originId)
.where(

View File

@@ -26,6 +26,20 @@ describe("dynamic adapter type validation schemas", () => {
).toThrow();
});
it("accepts an explicit managed instructions bundle for new agents", () => {
expect(
createAgentSchema.parse({
name: "Bundle Agent",
adapterType: "codex_local",
instructionsBundle: {
files: {
"AGENTS.md": "Use AGENTS.md.",
},
},
}).instructionsBundle?.files["AGENTS.md"],
).toBe("Use AGENTS.md.");
});
it("accepts external adapter types in invite acceptance schema", () => {
expect(
acceptInviteSchema.parse({

View File

@@ -1,6 +1,9 @@
export const COMPANY_STATUSES = ["active", "paused", "archived"] as const;
export type CompanyStatus = (typeof COMPANY_STATUSES)[number];
export const DEFAULT_COMPANY_ATTACHMENT_MAX_BYTES = 10 * 1024 * 1024;
export const MAX_COMPANY_ATTACHMENT_MAX_BYTES = 1024 * 1024 * 1024;
export const DEPLOYMENT_MODES = ["local_trusted", "authenticated"] as const;
export type DeploymentMode = (typeof DEPLOYMENT_MODES)[number];
@@ -138,6 +141,12 @@ export const INBOX_MINE_ISSUE_STATUS_FILTER = INBOX_MINE_ISSUE_STATUSES.join(","
export const ISSUE_PRIORITIES = ["critical", "high", "medium", "low"] as const;
export type IssuePriority = (typeof ISSUE_PRIORITIES)[number];
export const MAX_ISSUE_REQUEST_DEPTH = 1024;
export function clampIssueRequestDepth(value: number | null | undefined): number {
if (typeof value !== "number" || !Number.isFinite(value)) return 0;
return Math.min(MAX_ISSUE_REQUEST_DEPTH, Math.max(0, Math.floor(value)));
}
export const ISSUE_THREAD_INTERACTION_KINDS = [
"suggest_tasks",
@@ -164,7 +173,14 @@ export const ISSUE_THREAD_INTERACTION_CONTINUATION_POLICIES = [
export type IssueThreadInteractionContinuationPolicy =
(typeof ISSUE_THREAD_INTERACTION_CONTINUATION_POLICIES)[number];
export const ISSUE_ORIGIN_KINDS = ["manual", "routine_execution", "stale_active_run_evaluation"] as const;
export const ISSUE_ORIGIN_KINDS = [
"manual",
"routine_execution",
"stale_active_run_evaluation",
"harness_liveness_escalation",
"issue_productivity_review",
"stranded_issue_recovery",
] as const;
export type BuiltInIssueOriginKind = (typeof ISSUE_ORIGIN_KINDS)[number];
export type PluginIssueOriginKind = `plugin:${string}`;
export type IssueOriginKind = BuiltInIssueOriginKind | PluginIssueOriginKind;

View File

@@ -1,6 +1,8 @@
export { agentAdapterTypeSchema, optionalAgentAdapterTypeSchema } from "./adapter-type.js";
export {
COMPANY_STATUSES,
DEFAULT_COMPANY_ATTACHMENT_MAX_BYTES,
MAX_COMPANY_ATTACHMENT_MAX_BYTES,
DEPLOYMENT_MODES,
DEPLOYMENT_EXPOSURES,
BIND_MODES,
@@ -16,6 +18,8 @@ export {
INBOX_MINE_ISSUE_STATUSES,
INBOX_MINE_ISSUE_STATUS_FILTER,
ISSUE_PRIORITIES,
MAX_ISSUE_REQUEST_DEPTH,
clampIssueRequestDepth,
ISSUE_THREAD_INTERACTION_KINDS,
ISSUE_THREAD_INTERACTION_STATUSES,
ISSUE_THREAD_INTERACTION_CONTINUATION_POLICIES,
@@ -329,6 +333,8 @@ export type {
IssueBlockerAttention,
IssueBlockerAttentionReason,
IssueBlockerAttentionState,
IssueProductivityReview,
IssueProductivityReviewTrigger,
IssueReferenceSource,
IssueRelatedWorkItem,
IssueRelatedWorkSummary,

View File

@@ -46,8 +46,10 @@ describe("routine variable helpers", () => {
it("identifies built-in variable names", () => {
expect(isBuiltinRoutineVariable("date")).toBe(true);
expect(isBuiltinRoutineVariable("timestamp")).toBe(true);
expect(isBuiltinRoutineVariable("repo")).toBe(false);
expect(BUILTIN_ROUTINE_VARIABLE_NAMES.has("date")).toBe(true);
expect(BUILTIN_ROUTINE_VARIABLE_NAMES.has("timestamp")).toBe(true);
});
it("getBuiltinRoutineVariableValues returns date in YYYY-MM-DD format", () => {
@@ -56,9 +58,17 @@ describe("routine variable helpers", () => {
expect(values.date).toBe(new Date().toISOString().slice(0, 10));
});
it("getBuiltinRoutineVariableValues returns a human-readable timestamp with year, time, and UTC", () => {
const values = getBuiltinRoutineVariableValues();
const year = String(new Date().getUTCFullYear());
expect(values.timestamp).toContain(year);
expect(values.timestamp).toMatch(/\d{1,2}:\d{2}\s?(AM|PM)/);
expect(values.timestamp).toContain("UTC");
});
it("excludes built-in variables from syncRoutineVariablesWithTemplate", () => {
const result = syncRoutineVariablesWithTemplate(
"Daily report for {{date}} — {{repo}}",
"Daily report for {{date}} at {{timestamp}} — {{repo}}",
[],
);
expect(result).toEqual([
@@ -66,11 +76,11 @@ describe("routine variable helpers", () => {
]);
});
it("interpolates built-in date variable alongside user variables", () => {
it("interpolates built-in variables alongside user variables", () => {
const builtins = getBuiltinRoutineVariableValues();
const allVars = { ...builtins, repo: "paperclip" };
expect(
interpolateRoutineTemplate("Report for {{date}} on {{repo}}", allVars),
).toBe(`Report for ${builtins.date} on paperclip`);
interpolateRoutineTemplate("Report for {{date}} ({{timestamp}}) on {{repo}}", allVars),
).toBe(`Report for ${builtins.date} (${builtins.timestamp}) on paperclip`);
});
});

View File

@@ -7,19 +7,33 @@ type RoutineTemplateInput = string | null | undefined | Array<string | null | un
* Built-in variable names that are automatically available in routine templates
* without needing to be defined in the routine's variables list.
*/
export const BUILTIN_ROUTINE_VARIABLE_NAMES = new Set(["date"]);
export const BUILTIN_ROUTINE_VARIABLE_NAMES = new Set(["date", "timestamp"]);
export function isBuiltinRoutineVariable(name: string): boolean {
return BUILTIN_ROUTINE_VARIABLE_NAMES.has(name);
}
const HUMAN_TIMESTAMP_FORMATTER = new Intl.DateTimeFormat("en-US", {
year: "numeric",
month: "long",
day: "numeric",
hour: "numeric",
minute: "2-digit",
hour12: true,
timeZone: "UTC",
timeZoneName: "short",
});
/**
* Returns current values for all built-in routine variables.
* `date` expands to the current date in YYYY-MM-DD format (UTC).
* `timestamp` expands to a human-readable date and time (e.g. "April 28, 2026 at 12:17 PM UTC").
*/
export function getBuiltinRoutineVariableValues(): Record<string, string> {
const now = new Date();
return {
date: new Date().toISOString().slice(0, 10),
date: now.toISOString().slice(0, 10),
timestamp: HUMAN_TIMESTAMP_FORMATTER.format(now),
};
}

View File

@@ -34,6 +34,7 @@ export interface CompanyPortabilityCompanyManifestEntry {
description: string | null;
brandColor: string | null;
logoPath: string | null;
attachmentMaxBytes: number | null;
requireBoardApprovalForNewAgents: boolean;
feedbackDataSharingEnabled: boolean;
feedbackDataSharingConsentAt: string | null;

View File

@@ -11,6 +11,7 @@ export interface Company {
issueCounter: number;
budgetMonthlyCents: number;
spentMonthlyCents: number;
attachmentMaxBytes: number;
requireBoardApprovalForNewAgents: boolean;
feedbackDataSharingEnabled: boolean;
feedbackDataSharingConsentAt: Date | null;

View File

@@ -136,6 +136,8 @@ export type {
IssueBlockerAttention,
IssueBlockerAttentionReason,
IssueBlockerAttentionState,
IssueProductivityReview,
IssueProductivityReviewTrigger,
IssueReferenceSource,
IssueRelatedWorkItem,
IssueRelatedWorkSummary,

View File

@@ -139,6 +139,22 @@ export interface IssueBlockerAttention {
sampleStalledBlockerIdentifier: string | null;
}
export type IssueProductivityReviewTrigger =
| "no_comment_streak"
| "long_active_duration"
| "high_churn";
export interface IssueProductivityReview {
reviewIssueId: string;
reviewIdentifier: string | null;
status: IssueStatus;
priority: IssuePriority;
trigger: IssueProductivityReviewTrigger | null;
noCommentStreak: number | null;
createdAt: Date;
updatedAt: Date;
}
export interface IssueRelation {
id: string;
companyId: string;
@@ -264,6 +280,7 @@ export interface Issue {
blockedBy?: IssueRelationIssueSummary[];
blocks?: IssueRelationIssueSummary[];
blockerAttention?: IssueBlockerAttention;
productivityReview?: IssueProductivityReview | null;
relatedWork?: IssueRelatedWorkSummary;
referencedIssueIdentifiers?: string[];
planDocument?: IssueDocument | null;

View File

@@ -44,6 +44,13 @@ const adapterConfigSchema = z.record(z.unknown()).superRefine((value, ctx) => {
}
});
export const createAgentInstructionsBundleSchema = z.object({
entryFile: z.string().trim().min(1).optional(),
files: z.record(z.string()).refine((files) => Object.keys(files).length > 0, {
message: "instructionsBundle.files must contain at least one file",
}),
});
export const createAgentSchema = z.object({
name: z.string().min(1),
role: z.enum(AGENT_ROLES).optional().default("general"),
@@ -54,6 +61,7 @@ export const createAgentSchema = z.object({
desiredSkills: z.array(z.string().min(1)).optional(),
adapterType: agentAdapterTypeSchema,
adapterConfig: adapterConfigSchema.optional().default({}),
instructionsBundle: createAgentInstructionsBundleSchema.optional(),
runtimeConfig: z.record(z.unknown()).optional().default({}),
defaultEnvironmentId: z.string().uuid().optional().nullable(),
budgetMonthlyCents: z.number().int().nonnegative().optional().default(0),

View File

@@ -1,4 +1,5 @@
import { z } from "zod";
import { MAX_COMPANY_ATTACHMENT_MAX_BYTES } from "../constants.js";
import { routineVariableSchema } from "./routine.js";
export const portabilityIncludeSchema = z
@@ -37,6 +38,7 @@ export const portabilityCompanyManifestEntrySchema = z.object({
description: z.string().nullable(),
brandColor: z.string().nullable(),
logoPath: z.string().nullable(),
attachmentMaxBytes: z.number().int().min(1).max(MAX_COMPANY_ATTACHMENT_MAX_BYTES).nullable().default(null),
requireBoardApprovalForNewAgents: z.boolean(),
feedbackDataSharingEnabled: z.boolean().default(false),
feedbackDataSharingConsentAt: z.string().datetime().nullable().default(null),

View File

@@ -1,14 +1,23 @@
import { z } from "zod";
import { COMPANY_STATUSES } from "../constants.js";
import {
COMPANY_STATUSES,
MAX_COMPANY_ATTACHMENT_MAX_BYTES,
} from "../constants.js";
const logoAssetIdSchema = z.string().uuid().nullable().optional();
const brandColorSchema = z.string().regex(/^#[0-9a-fA-F]{6}$/).nullable().optional();
const feedbackDataSharingTermsVersionSchema = z.string().min(1).nullable().optional();
const attachmentMaxBytesSchema = z
.number()
.int()
.min(1)
.max(MAX_COMPANY_ATTACHMENT_MAX_BYTES);
export const createCompanySchema = z.object({
name: z.string().min(1),
description: z.string().optional().nullable(),
budgetMonthlyCents: z.number().int().nonnegative().optional().default(0),
attachmentMaxBytes: attachmentMaxBytesSchema.optional(),
});
export type CreateCompany = z.infer<typeof createCompanySchema>;
@@ -25,6 +34,7 @@ export const updateCompanySchema = createCompanySchema
feedbackDataSharingTermsVersion: feedbackDataSharingTermsVersionSchema,
brandColor: brandColorSchema,
logoAssetId: logoAssetIdSchema,
attachmentMaxBytes: attachmentMaxBytesSchema.optional(),
});
export type UpdateCompany = z.infer<typeof updateCompanySchema>;

View File

@@ -1,4 +1,5 @@
import { describe, expect, it } from "vitest";
import { MAX_ISSUE_REQUEST_DEPTH } from "../index.js";
import {
addIssueCommentSchema,
createIssueSchema,
@@ -75,4 +76,21 @@ describe("issue validators", () => {
expect(response.summaryMarkdown).toBe("Summary\n\nNext action");
expect(document.body).toBe("# Plan\n\nShip it");
});
it("clamps oversized requestDepth values on create", () => {
const parsed = createIssueSchema.parse({
title: "Clamp request depth",
requestDepth: MAX_ISSUE_REQUEST_DEPTH + 500,
});
expect(parsed.requestDepth).toBe(MAX_ISSUE_REQUEST_DEPTH);
});
it("clamps oversized requestDepth values on update", () => {
const parsed = updateIssueSchema.parse({
requestDepth: MAX_ISSUE_REQUEST_DEPTH + 1,
});
expect(parsed.requestDepth).toBe(MAX_ISSUE_REQUEST_DEPTH);
});
});

View File

@@ -5,6 +5,7 @@ import {
ISSUE_EXECUTION_STAGE_TYPES,
ISSUE_EXECUTION_STATE_STATUSES,
ISSUE_PRIORITIES,
clampIssueRequestDepth,
ISSUE_STATUSES,
ISSUE_THREAD_INTERACTION_CONTINUATION_POLICIES,
ISSUE_THREAD_INTERACTION_KINDS,
@@ -123,6 +124,12 @@ export const issueExecutionStateSchema = z.object({
lastDecisionOutcome: z.enum(ISSUE_EXECUTION_DECISION_OUTCOMES).nullable(),
});
const issueRequestDepthInputSchema = z
.number()
.int()
.nonnegative()
.transform((value) => clampIssueRequestDepth(value));
export const createIssueSchema = z.object({
projectId: z.string().uuid().optional().nullable(),
projectWorkspaceId: z.string().uuid().optional().nullable(),
@@ -136,7 +143,7 @@ export const createIssueSchema = z.object({
priority: z.enum(ISSUE_PRIORITIES).optional().default("medium"),
assigneeAgentId: z.string().uuid().optional().nullable(),
assigneeUserId: z.string().optional().nullable(),
requestDepth: z.number().int().nonnegative().optional().default(0),
requestDepth: issueRequestDepthInputSchema.optional().default(0),
billingCode: z.string().optional().nullable(),
assigneeAdapterOverrides: issueAssigneeAdapterOverridesSchema.optional().nullable(),
executionPolicy: issueExecutionPolicySchema.optional().nullable(),
@@ -168,6 +175,7 @@ export const createIssueLabelSchema = z.object({
export type CreateIssueLabel = z.infer<typeof createIssueLabelSchema>;
export const updateIssueSchema = createIssueSchema.partial().extend({
requestDepth: issueRequestDepthInputSchema.optional(),
assigneeAgentId: z.string().trim().min(1).optional().nullable(),
comment: multilineTextSchema.pipe(z.string().min(1)).optional(),
reviewRequest: issueReviewRequestSchema.optional().nullable(),

View File

@@ -481,8 +481,11 @@ describe.sequential("agent skill routes", () => {
name: "QA Agent",
role: "engineer",
adapterType: "claude_local",
adapterConfig: {
promptTemplate: "You are QA.",
adapterConfig: {},
instructionsBundle: {
files: {
"AGENTS.md": "You are QA.",
},
},
}));
@@ -504,6 +507,26 @@ describe.sequential("agent skill routes", () => {
});
});
it("rejects legacy prompt templates for directly created local agents", async () => {
const res = await requestApp(await createApp(), (baseUrl) => request(baseUrl)
.post("/api/companies/company-1/agents")
.send({
name: "QA Agent",
role: "engineer",
adapterType: "claude_local",
adapterConfig: {
instructionsFilePath: "/tmp/existing/AGENTS.md",
promptTemplate: "You are QA.",
bootstrapPromptTemplate: "Bootstrap QA.",
},
}));
expect(res.status, JSON.stringify(res.body)).toBe(422);
expect(res.body.error).toContain("New agents must use instructionsBundle/AGENTS.md");
expect(mockAgentService.create).not.toHaveBeenCalled();
expect(mockAgentInstructionsService.materializeManagedBundle).not.toHaveBeenCalled();
});
it("materializes the bundled CEO instruction set for default CEO agents", async () => {
const res = await requestApp(await createApp(), (baseUrl) => request(baseUrl)
.post("/api/companies/company-1/agents")
@@ -652,8 +675,11 @@ describe.sequential("agent skill routes", () => {
name: "QA Agent",
role: "engineer",
adapterType: "claude_local",
adapterConfig: {
promptTemplate: "You are QA.",
adapterConfig: {},
instructionsBundle: {
files: {
"AGENTS.md": "You are QA.",
},
},
});
@@ -675,4 +701,24 @@ describe.sequential("agent skill routes", () => {
| undefined;
expect(approvalInput?.payload?.adapterConfig?.promptTemplate).toBeUndefined();
});
it("rejects legacy prompt templates for hire approval payloads", async () => {
const res = await request(await createApp(createDb(true)))
.post("/api/companies/company-1/agent-hires")
.send({
name: "QA Agent",
role: "engineer",
adapterType: "claude_local",
adapterConfig: {
instructionsFilePath: "/tmp/existing/AGENTS.md",
promptTemplate: "You are QA.",
bootstrapPromptTemplate: "Bootstrap QA.",
},
});
expect(res.status, JSON.stringify(res.body)).toBe(422);
expect(res.body.error).toContain("New agents must use instructionsBundle/AGENTS.md");
expect(mockAgentService.create).not.toHaveBeenCalled();
expect(mockAgentInstructionsService.materializeManagedBundle).not.toHaveBeenCalled();
});
});

View File

@@ -28,6 +28,10 @@ const mockEnvironmentService = vi.hoisted(() => ({
getById: vi.fn(),
}));
const mockCompanyService = vi.hoisted(() => ({
getById: vi.fn(),
}));
const mockIssueReferenceService = vi.hoisted(() => ({
deleteDocumentSource: vi.fn(async () => undefined),
diffIssueReferenceSummary: vi.fn(() => ({
@@ -51,6 +55,7 @@ const mockLogActivity = vi.hoisted(() => vi.fn());
vi.mock("../services/index.js", () => ({
projectService: () => mockProjectService,
issueService: () => mockIssueService,
companyService: () => mockCompanyService,
environmentService: () => mockEnvironmentService,
issueReferenceService: () => mockIssueReferenceService,
logActivity: mockLogActivity,
@@ -158,6 +163,11 @@ describe.sequential("execution environment route guards", () => {
mockIssueService.update.mockReset();
mockIssueService.getByIdentifier.mockReset();
mockIssueService.assertCheckoutOwner.mockReset();
mockCompanyService.getById.mockReset();
mockCompanyService.getById.mockResolvedValue({
id: "company-1",
attachmentMaxBytes: 10 * 1024 * 1024,
});
mockEnvironmentService.getById.mockReset();
mockIssueReferenceService.deleteDocumentSource.mockClear();
mockIssueReferenceService.diffIssueReferenceSummary.mockClear();

View File

@@ -672,7 +672,8 @@ describeEmbeddedPostgres("heartbeat dependency-aware queued run selection", () =
const companyId = randomUUID();
const agentId = randomUUID();
const rootIssueId = randomUUID();
const childIssueId = randomUUID();
const issueChain = Array.from({ length: 17 }, () => randomUUID());
const deepDescendantIssueId = issueChain.at(-1)!;
await db.insert(companies).values({
id: companyId,
@@ -705,15 +706,15 @@ describeEmbeddedPostgres("heartbeat dependency-aware queued run selection", () =
priority: "medium",
assigneeAgentId: agentId,
},
{
id: childIssueId,
...issueChain.map((issueId, index) => ({
id: issueId,
companyId,
parentId: rootIssueId,
title: "Paused child",
parentId: index === 0 ? rootIssueId : issueChain[index - 1],
title: `Paused desc ${index + 1}`,
status: "todo",
priority: "medium",
assigneeAgentId: agentId,
},
})),
]);
const [hold] = await db
.insert(issueTreeHolds)
@@ -731,8 +732,8 @@ describeEmbeddedPostgres("heartbeat dependency-aware queued run selection", () =
source: "automation",
triggerDetail: "system",
reason: "issue_blockers_resolved",
payload: { issueId: childIssueId },
contextSnapshot: { issueId: childIssueId, wakeReason: "issue_blockers_resolved" },
payload: { issueId: deepDescendantIssueId },
contextSnapshot: { issueId: deepDescendantIssueId, wakeReason: "issue_blockers_resolved" },
});
expect(blockedWake).toBeNull();
@@ -742,7 +743,7 @@ describeEmbeddedPostgres("heartbeat dependency-aware queued run selection", () =
reason: agentWakeupRequests.reason,
})
.from(agentWakeupRequests)
.where(sql`${agentWakeupRequests.payload} ->> 'issueId' = ${childIssueId}`)
.where(sql`${agentWakeupRequests.payload} ->> 'issueId' = ${deepDescendantIssueId}`)
.then((rows) => rows[0] ?? null);
expect(skippedWake).toMatchObject({ status: "skipped", reason: "issue_tree_hold_active" });
@@ -750,7 +751,7 @@ describeEmbeddedPostgres("heartbeat dependency-aware queued run selection", () =
await db.insert(issueComments).values({
id: childCommentId,
companyId,
issueId: childIssueId,
issueId: deepDescendantIssueId,
authorUserId: "board-user",
body: "Please respond while this hold is active.",
});
@@ -759,7 +760,7 @@ describeEmbeddedPostgres("heartbeat dependency-aware queued run selection", () =
source: "on_demand",
triggerDetail: "manual",
reason: "issue_commented",
payload: { issueId: childIssueId, commentId: childCommentId },
payload: { issueId: deepDescendantIssueId, commentId: childCommentId },
requestedByActorType: "agent",
requestedByActorId: agentId,
});
@@ -769,11 +770,11 @@ describeEmbeddedPostgres("heartbeat dependency-aware queued run selection", () =
source: "automation",
triggerDetail: "system",
reason: "issue_commented",
payload: { issueId: childIssueId, commentId: childCommentId },
payload: { issueId: deepDescendantIssueId, commentId: childCommentId },
requestedByActorType: "user",
requestedByActorId: "board-user",
contextSnapshot: {
issueId: childIssueId,
issueId: deepDescendantIssueId,
commentId: childCommentId,
wakeCommentId: childCommentId,
wakeReason: "issue_commented",

View File

@@ -472,6 +472,7 @@ describeEmbeddedPostgres("heartbeat orphaned process recovery", () => {
retryReason?: "assignment_recovery" | "issue_continuation_needed" | null;
assignToUser?: boolean;
activePauseHold?: boolean;
livenessState?: "completed" | "advanced" | "plan_only" | "empty_response" | "blocked" | "failed" | "needs_followup" | null;
runErrorCode?: string | null;
runError?: string | null;
}) {
@@ -545,6 +546,7 @@ describeEmbeddedPostgres("heartbeat orphaned process recovery", () => {
error: input.runStatus === "succeeded"
? null
: ("runError" in input ? input.runError : "run failed before issue advanced"),
livenessState: input.livenessState ?? null,
});
await db.insert(issues).values([
@@ -1417,6 +1419,59 @@ describeEmbeddedPostgres("heartbeat orphaned process recovery", () => {
}
});
it.each([
["failed", "adapter_failed"],
["failed", "process_lost"],
["timed_out", "adapter_timed_out"],
] as const)(
"re-enqueues stranded in-progress work after a %s/%s run before escalating",
async (runStatus, runErrorCode) => {
const { companyId, agentId, issueId, runId } = await seedStrandedIssueFixture({
status: "in_progress",
runStatus,
runErrorCode,
});
const heartbeat = heartbeatService(db);
const result = await heartbeat.reconcileStrandedAssignedIssues();
expect(result.dispatchRequeued).toBe(0);
expect(result.continuationRequeued).toBe(1);
expect(result.escalated).toBe(0);
expect(result.issueIds).toEqual([issueId]);
const runs = await db
.select()
.from(heartbeatRuns)
.where(eq(heartbeatRuns.agentId, agentId));
expect(runs).toHaveLength(2);
const retryRun = runs.find((row) => row.id !== runId);
expect(retryRun?.contextSnapshot as Record<string, unknown> | undefined).toMatchObject({
issueId,
taskId: issueId,
retryReason: "issue_continuation_needed",
retryOfRunId: runId,
source: "issue.continuation_recovery",
});
const recoveries = await db
.select()
.from(issues)
.where(
and(
eq(issues.companyId, companyId),
eq(issues.originKind, "stranded_issue_recovery"),
eq(issues.originId, issueId),
),
);
expect(recoveries).toHaveLength(0);
if (retryRun?.id) {
await waitForRunToSettle(heartbeat, retryRun.id);
}
},
);
it("still re-enqueues stranded assigned todo recovery when an old queued wake exists", async () => {
const { companyId, agentId, issueId, runId } = await seedStrandedIssueFixture({
status: "todo",
@@ -2055,18 +2110,21 @@ describeEmbeddedPostgres("heartbeat orphaned process recovery", () => {
expect(wakeups).toHaveLength(1);
});
it("re-enqueues continuation when the latest automatic continuation succeeded without closing the issue", async () => {
it("records productive continuation instead of recovery when the latest automatic continuation succeeded", async () => {
const { agentId, issueId, runId } = await seedStrandedIssueFixture({
status: "in_progress",
runStatus: "succeeded",
retryReason: "issue_continuation_needed",
livenessState: "advanced",
});
const heartbeat = heartbeatService(db);
const result = await heartbeat.reconcileStrandedAssignedIssues();
expect(result.continuationRequeued).toBe(1);
expect(result.continuationRequeued).toBe(0);
expect(result.productiveContinuationObserved).toBe(1);
expect(result.successfulContinuationObserved).toBe(0);
expect(result.escalated).toBe(0);
expect(result.issueIds).toEqual([issueId]);
expect(result.issueIds).toEqual([]);
const issue = await db.select().from(issues).where(eq(issues.id, issueId)).then((rows) => rows[0] ?? null);
expect(issue?.status).toBe("in_progress");
@@ -2078,14 +2136,10 @@ describeEmbeddedPostgres("heartbeat orphaned process recovery", () => {
.select()
.from(heartbeatRuns)
.where(eq(heartbeatRuns.agentId, agentId));
expect(runs).toHaveLength(2);
expect(runs.map((row) => row.id)).toEqual([runId]);
const retryRun = runs.find((row) => row.id !== runId);
expect(retryRun?.id).toBeTruthy();
expect((retryRun?.contextSnapshot as Record<string, unknown>)?.retryReason).toBe("issue_continuation_needed");
if (retryRun) {
await waitForRunToSettle(heartbeat, retryRun.id);
}
const wakeups = await db.select().from(agentWakeupRequests).where(eq(agentWakeupRequests.agentId, agentId));
expect(wakeups).toHaveLength(1);
});
it("does not reconcile user-assigned work through the agent stranded-work recovery path", async () => {

View File

@@ -74,6 +74,9 @@ function registerModuleMocks() {
}));
vi.doMock("../services/index.js", () => ({
companyService: () => ({
getById: vi.fn(async () => ({ id: "company-1", attachmentMaxBytes: 10 * 1024 * 1024 })),
}),
accessService: () => mockAccessService,
agentService: () => ({
getById: vi.fn(async () => null),

View File

@@ -36,6 +36,10 @@ const mockAgentService = vi.hoisted(() => ({
resolveByReference: vi.fn(),
}));
const mockCompanyService = vi.hoisted(() => ({
getById: vi.fn(),
}));
const mockDocumentService = vi.hoisted(() => ({
upsertIssueDocument: vi.fn(),
}));
@@ -94,6 +98,7 @@ function registerRouteMocks() {
vi.doMock("../services/index.js", () => ({
accessService: () => mockAccessService,
agentService: () => mockAgentService,
companyService: () => mockCompanyService,
documentService: () => mockDocumentService,
executionWorkspaceService: () => ({}),
feedbackService: () => ({
@@ -244,6 +249,7 @@ describe("agent issue mutation checkout ownership", () => {
mockAgentService.getById.mockReset();
mockAgentService.list.mockReset();
mockAgentService.resolveByReference.mockReset();
mockCompanyService.getById.mockReset();
mockIssueService.addComment.mockReset();
mockIssueService.assertCheckoutOwner.mockReset();
mockIssueService.getAttachmentById.mockReset();
@@ -276,6 +282,7 @@ describe("agent issue mutation checkout ownership", () => {
makeAgent(peerAgentId),
]);
mockAgentService.resolveByReference.mockResolvedValue({ ambiguous: false, agent: null });
mockCompanyService.getById.mockResolvedValue({ id: companyId, issuePrefix: "PAP" });
mockIssueService.getById.mockResolvedValue(makeIssue());
mockIssueService.getByIdentifier.mockResolvedValue(null);
mockIssueService.assertCheckoutOwner.mockResolvedValue({ adoptedFromRunId: null });
@@ -430,18 +437,20 @@ describe("agent issue mutation checkout ownership", () => {
expect(mockIssueService.update).toHaveBeenCalled();
});
it("allows same-company agent mutations when the issue is not in progress", async () => {
mockIssueService.getById.mockResolvedValue(makeIssue({ status: "todo", assigneeAgentId: ownerAgentId }));
mockIssueService.update.mockImplementation(async (_id: string, patch: Record<string, unknown>) => ({
...makeIssue({ status: "todo", assigneeAgentId: ownerAgentId }),
...patch,
}));
it.each([
["todo", "patch", (app: express.Express) => request(app).patch(`/api/issues/${issueId}`).send({ title: "Todo update" })],
["todo", "comment", (app: express.Express) => request(app).post(`/api/issues/${issueId}/comments`).send({ body: "Todo noise" })],
["blocked", "patch", (app: express.Express) => request(app).patch(`/api/issues/${issueId}`).send({ title: "Blocked update" })],
])("rejects peer agent %s issue %s mutations outside active checkout ownership", async (status, _kind, sendRequest) => {
mockIssueService.getById.mockResolvedValue(makeIssue({ status: status as "todo" | "blocked", assigneeAgentId: ownerAgentId }));
const res = await request(await createApp(peerActor())).patch(`/api/issues/${issueId}`).send({ title: "Todo update" });
const res = await sendRequest(await createApp(peerActor()));
expect(res.status).toBe(200);
expect(res.status, JSON.stringify(res.body)).toBe(403);
expect(res.body.error).toBe("Agent cannot mutate another agent's issue");
expect(mockIssueService.assertCheckoutOwner).not.toHaveBeenCalled();
expect(mockIssueService.update).toHaveBeenCalled();
expect(mockIssueService.update).not.toHaveBeenCalled();
expect(mockIssueService.addComment).not.toHaveBeenCalled();
});
it("allows same-company agent mutations on unassigned in-progress issues", async () => {

View File

@@ -10,6 +10,9 @@ const mockIssueService = vi.hoisted(() => ({
createAttachment: vi.fn(),
getAttachmentById: vi.fn(),
}));
const mockCompanyService = vi.hoisted(() => ({
getById: vi.fn(),
}));
const mockLogActivity = vi.hoisted(() => vi.fn(async () => undefined));
@@ -39,6 +42,7 @@ function registerRouteMocks() {
agentService: () => ({
getById: vi.fn(),
}),
companyService: () => mockCompanyService,
documentService: () => ({}),
executionWorkspaceService: () => ({}),
feedbackService: () => ({
@@ -166,6 +170,27 @@ function makeAttachment(contentType: string, originalFilename: string) {
};
}
describe("normalizeIssueAttachmentMaxBytes", () => {
it("keeps the process-level attachment cap as the final cap", async () => {
const previous = process.env.PAPERCLIP_ATTACHMENT_MAX_BYTES;
process.env.PAPERCLIP_ATTACHMENT_MAX_BYTES = "5";
vi.resetModules();
try {
const { normalizeIssueAttachmentMaxBytes } = await import("../attachment-types.js");
expect(normalizeIssueAttachmentMaxBytes(null)).toBe(5);
expect(normalizeIssueAttachmentMaxBytes(10)).toBe(5);
expect(normalizeIssueAttachmentMaxBytes(3)).toBe(3);
} finally {
if (previous === undefined) {
delete process.env.PAPERCLIP_ATTACHMENT_MAX_BYTES;
} else {
process.env.PAPERCLIP_ATTACHMENT_MAX_BYTES = previous;
}
vi.resetModules();
}
});
});
describe("issue attachment routes", () => {
beforeEach(() => {
vi.resetModules();
@@ -180,6 +205,10 @@ describe("issue attachment routes", () => {
registerRouteMocks();
vi.clearAllMocks();
mockLogActivity.mockResolvedValue(undefined);
mockCompanyService.getById.mockResolvedValue({
id: "company-1",
attachmentMaxBytes: 1024 * 1024 * 1024,
});
});
it("accepts zip uploads for issue attachments", async () => {
@@ -215,6 +244,50 @@ describe("issue attachment routes", () => {
expect(res.body.contentType).toBe("application/zip");
});
it("enforces the process-level issue attachment limit even when the company limit allows more", async () => {
const storage = createStorageService();
mockIssueService.getById.mockResolvedValue({
id: "11111111-1111-4111-8111-111111111111",
companyId: "company-1",
identifier: "PAP-1",
});
mockIssueService.createAttachment.mockResolvedValue(makeAttachment("application/octet-stream", "large.bin"));
const app = await createApp(storage);
const res = await request(app)
.post("/api/companies/company-1/issues/11111111-1111-4111-8111-111111111111/attachments")
.attach("file", Buffer.alloc(10 * 1024 * 1024 + 1), {
filename: "large.bin",
contentType: "application/octet-stream",
});
expect(res.status).toBe(422);
expect(res.body.error).toBe("Attachment exceeds 10485760 bytes");
expect(storage.__calls.putFile).toBeUndefined();
});
it("enforces the configured per-company issue attachment limit", async () => {
const storage = createStorageService();
mockCompanyService.getById.mockResolvedValue({
id: "company-1",
attachmentMaxBytes: 4,
});
mockIssueService.getById.mockResolvedValue({
id: "11111111-1111-4111-8111-111111111111",
companyId: "company-1",
identifier: "PAP-1",
});
const app = await createApp(storage);
const res = await request(app)
.post("/api/companies/company-1/issues/11111111-1111-4111-8111-111111111111/attachments")
.attach("file", Buffer.from("large"), { filename: "large.txt", contentType: "text/plain" });
expect(res.status).toBe(422);
expect(res.body.error).toBe("Attachment exceeds 4 bytes");
expect(mockIssueService.createAttachment).not.toHaveBeenCalled();
});
it("serves html attachments as downloads with nosniff", async () => {
const storage = createStorageService();
mockIssueService.getAttachmentById.mockResolvedValue(makeAttachment("text/html", "report.html"));

View File

@@ -74,6 +74,9 @@ function registerServiceMocks() {
}));
vi.doMock("../services/index.js", () => ({
companyService: () => ({
getById: vi.fn(async () => ({ id: "company-1", attachmentMaxBytes: 10 * 1024 * 1024 })),
}),
accessService: () => mockAccessService,
agentService: () => ({
getById: vi.fn(async () => null),

View File

@@ -74,6 +74,9 @@ function registerModuleMocks() {
}));
vi.doMock("../services/index.js", () => ({
companyService: () => ({
getById: vi.fn(async () => ({ id: "company-1", attachmentMaxBytes: 10 * 1024 * 1024 })),
}),
accessService: () => mockAccessService,
agentService: () => ({ getById: vi.fn(async () => null) }),
documentService: () => ({}),

View File

@@ -108,6 +108,9 @@ vi.mock("../services/routines.js", () => ({
}));
vi.mock("../services/index.js", () => ({
companyService: () => ({
getById: vi.fn(async () => ({ id: "company-1", attachmentMaxBytes: 10 * 1024 * 1024 })),
}),
accessService: () => mockAccessService,
agentService: () => mockAgentService,
documentService: () => ({}),
@@ -477,7 +480,7 @@ describe.sequential("issue comment reopen routes", () => {
));
});
it("does not implicitly reopen closed issues via POST comments for agent-authored comments", async () => {
it("rejects non-assignee agent POST comments on closed issues", async () => {
mockIssueService.getById.mockResolvedValue(makeIssue("done"));
mockIssueService.addComment.mockResolvedValue({
id: "comment-1",
@@ -500,11 +503,10 @@ describe.sequential("issue comment reopen routes", () => {
.post("/api/issues/11111111-1111-4111-8111-111111111111/comments")
.send({ body: "hello" });
expect(res.status).toBe(201);
expect(mockIssueService.update).not.toHaveBeenCalledWith(
"11111111-1111-4111-8111-111111111111",
{ status: "todo" },
);
expect(res.status).toBe(403);
expect(res.body.error).toBe("Agent cannot mutate another agent's issue");
expect(mockIssueService.update).not.toHaveBeenCalled();
expect(mockIssueService.addComment).not.toHaveBeenCalled();
expect(mockHeartbeatService.wakeup).not.toHaveBeenCalled();
});
@@ -625,7 +627,7 @@ describe.sequential("issue comment reopen routes", () => {
));
});
it("does not implicitly reopen closed issues via the PATCH comment path for agent-authored comments", async () => {
it("rejects non-assignee agent PATCH comments on closed issues", async () => {
mockIssueService.getById.mockResolvedValue(makeIssue("done"));
mockIssueService.addComment.mockResolvedValue({
id: "comment-1",
@@ -652,11 +654,10 @@ describe.sequential("issue comment reopen routes", () => {
.patch("/api/issues/11111111-1111-4111-8111-111111111111")
.send({ comment: "hello" });
expect(res.status).toBe(200);
expect(mockIssueService.update).not.toHaveBeenCalledWith(
"11111111-1111-4111-8111-111111111111",
expect.objectContaining({ status: "todo" }),
);
expect(res.status).toBe(403);
expect(res.body.error).toBe("Agent cannot mutate another agent's issue");
expect(mockIssueService.update).not.toHaveBeenCalled();
expect(mockIssueService.addComment).not.toHaveBeenCalled();
expect(mockHeartbeatService.wakeup).not.toHaveBeenCalled();
});
@@ -874,7 +875,7 @@ describe.sequential("issue comment reopen routes", () => {
.send({ body: "restart someone else's work", resume: true });
expect(res.status).toBe(403);
expect(res.body.error).toBe("Agent cannot request follow-up for another agent's issue");
expect(res.body.error).toBe("Agent cannot mutate another agent's issue");
expect(mockIssueService.update).not.toHaveBeenCalled();
expect(mockIssueService.addComment).not.toHaveBeenCalled();
expect(mockHeartbeatService.wakeup).not.toHaveBeenCalled();

View File

@@ -17,6 +17,9 @@ const mockIssueService = vi.hoisted(() => ({
}));
vi.mock("../services/index.js", () => ({
companyService: () => ({
getById: vi.fn(async () => ({ id: "company-1", attachmentMaxBytes: 10 * 1024 * 1024 })),
}),
accessService: () => ({
canUser: vi.fn(),
hasPermission: vi.fn(),

View File

@@ -108,6 +108,9 @@ function registerModuleMocks() {
}));
vi.doMock("../services/index.js", () => ({
companyService: () => ({
getById: vi.fn(async () => ({ id: "company-1", attachmentMaxBytes: 10 * 1024 * 1024 })),
}),
accessService: () => mockAccessService,
agentService: () => mockAgentService,
documentService: () => mockDocumentsService,

View File

@@ -24,6 +24,9 @@ const mockHeartbeatService = vi.hoisted(() => ({
function registerModuleMocks() {
vi.doMock("../services/index.js", () => ({
companyService: () => ({
getById: vi.fn(async () => ({ id: "company-1", attachmentMaxBytes: 10 * 1024 * 1024 })),
}),
accessService: () => ({
canUser: vi.fn(async () => false),
hasPermission: vi.fn(async () => false),

View File

@@ -82,6 +82,9 @@ function registerModuleMocks() {
}));
vi.doMock("../services/index.js", () => ({
companyService: () => ({
getById: vi.fn(async () => ({ id: "company-1", attachmentMaxBytes: 10 * 1024 * 1024 })),
}),
accessService: () => mockAccessService,
agentService: () => mockAgentService,
documentService: () => ({}),

View File

@@ -27,6 +27,9 @@ function registerModuleMocks() {
}));
vi.doMock("../services/index.js", () => ({
companyService: () => ({
getById: vi.fn(async () => ({ id: "company-1", attachmentMaxBytes: 10 * 1024 * 1024 })),
}),
accessService: () => ({
canUser: vi.fn(),
hasPermission: vi.fn(),
@@ -70,7 +73,7 @@ function makeIssue(status: "todo" | "done") {
id: "11111111-1111-4111-8111-111111111111",
companyId: "company-1",
status,
assigneeAgentId: "22222222-2222-4222-8222-222222222222",
assigneeAgentId: "agent-1",
assigneeUserId: null,
createdByUserId: "local-board",
identifier: "PAP-1018",

View File

@@ -36,6 +36,9 @@ vi.mock("../telemetry.js", () => ({
function registerModuleMocks() {
vi.doMock("../services/index.js", () => ({
companyService: () => ({
getById: vi.fn(async () => ({ id: "company-1", attachmentMaxBytes: 10 * 1024 * 1024 })),
}),
accessService: () => ({
canUser: vi.fn(async () => true),
hasPermission: vi.fn(async () => true),

View File

@@ -355,4 +355,44 @@ describe("issue tree control routes", () => {
}),
);
});
it("returns resume operations as released holds and avoids cancellation side effects", async () => {
const app = await createApp({
type: "board",
userId: "user-1",
companyIds: ["company-2"],
source: "session",
isInstanceAdmin: false,
});
mockTreeControlService.createHold.mockResolvedValue({
hold: {
id: "77777777-7777-4777-8777-777777777777",
mode: "resume",
status: "released",
reason: "resume subtree",
},
preview: {
mode: "resume",
totals: {
affectedIssues: 1,
},
warnings: [],
activeRuns: [],
},
resumedPauseHoldIds: ["33333333-3333-4333-8333-333333333333"],
});
const res = await request(app)
.post("/api/issues/11111111-1111-4111-8111-111111111111/tree-holds")
.send({ mode: "resume", reason: "resume subtree" });
expect(res.status).toBe(200);
expect(res.body.hold.mode).toBe("resume");
expect(res.body.hold.status).toBe("released");
expect(res.body.resumedPauseHoldIds).toEqual(["33333333-3333-4333-8333-333333333333"]);
expect(mockHeartbeatService.cancelRun).not.toHaveBeenCalled();
expect(mockTreeControlService.cancelUnclaimedWakeupsForTree).not.toHaveBeenCalled();
expect(mockTreeControlService.cancelIssueStatusesForHold).not.toHaveBeenCalled();
expect(mockTreeControlService.restoreIssueStatusesForHold).not.toHaveBeenCalled();
});
});

View File

@@ -337,19 +337,20 @@ describeEmbeddedPostgres("issueTreeControlService", () => {
});
});
it("blocks normal checkout but allows comment interaction checkout under a pause hold", async () => {
it("walks pause-hold ancestry beyond 15 levels for checkout and interaction waives", async () => {
const companyId = randomUUID();
const agentId = randomUUID();
const rootIssueId = randomUUID();
const childIssueId = randomUUID();
const issuePath = Array.from({ length: 17 }, () => randomUUID());
const rootIssueId = issuePath[0];
const deepDescendantIssueId = issuePath.at(-1)!;
const rootRunId = randomUUID();
const childRunId = randomUUID();
const deepDescendantRunId = randomUUID();
const forgedRunId = randomUUID();
const rootWakeupRequestId = randomUUID();
const childWakeupRequestId = randomUUID();
const deepDescendantWakeupRequestId = randomUUID();
const forgedWakeupRequestId = randomUUID();
const rootCommentId = randomUUID();
const childCommentId = randomUUID();
const deepDescendantCommentId = randomUUID();
await db.insert(companies).values({
id: companyId,
@@ -368,25 +369,17 @@ describeEmbeddedPostgres("issueTreeControlService", () => {
runtimeConfig: {},
permissions: {},
});
await db.insert(issues).values([
{
id: rootIssueId,
await db.insert(issues).values(
issuePath.map((issueId, index) => ({
id: issueId,
companyId,
title: "Paused root",
parentId: index > 0 ? issuePath[index - 1] : null,
title: `Issue ${index}`,
status: "todo",
priority: "medium",
assigneeAgentId: agentId,
},
{
id: childIssueId,
companyId,
parentId: rootIssueId,
title: "Paused child",
status: "todo",
priority: "medium",
assigneeAgentId: agentId,
},
]);
})),
);
await db.insert(issueComments).values([
{
id: rootCommentId,
@@ -396,11 +389,11 @@ describeEmbeddedPostgres("issueTreeControlService", () => {
body: "Please answer this root issue question.",
},
{
id: childCommentId,
id: deepDescendantCommentId,
companyId,
issueId: childIssueId,
issueId: deepDescendantIssueId,
authorUserId: "board-user",
body: "Please answer this child issue question.",
body: "Please answer this deep descendant issue question.",
},
]);
await db.insert(agentWakeupRequests).values([
@@ -424,24 +417,24 @@ describeEmbeddedPostgres("issueTreeControlService", () => {
source: "on_demand",
triggerDetail: "manual",
reason: "issue_commented",
payload: { issueId: childIssueId, commentId: childCommentId },
payload: { issueId: deepDescendantIssueId, commentId: deepDescendantCommentId },
status: "queued",
requestedByActorType: "agent",
requestedByActorId: agentId,
runId: forgedRunId,
},
{
id: childWakeupRequestId,
id: deepDescendantWakeupRequestId,
companyId,
agentId,
source: "automation",
triggerDetail: "system",
reason: "issue_commented",
payload: { issueId: childIssueId, commentId: childCommentId },
payload: { issueId: deepDescendantIssueId, commentId: deepDescendantCommentId },
status: "queued",
requestedByActorType: "user",
requestedByActorId: "board-user",
runId: childRunId,
runId: deepDescendantRunId,
},
]);
await db.insert(heartbeatRuns).values([
@@ -470,25 +463,25 @@ describeEmbeddedPostgres("issueTreeControlService", () => {
status: "queued",
wakeupRequestId: forgedWakeupRequestId,
contextSnapshot: {
issueId: childIssueId,
issueId: deepDescendantIssueId,
wakeReason: "issue_commented",
commentId: childCommentId,
wakeCommentId: childCommentId,
commentId: deepDescendantCommentId,
wakeCommentId: deepDescendantCommentId,
},
},
{
id: childRunId,
id: deepDescendantRunId,
companyId,
agentId,
invocationSource: "automation",
triggerDetail: "system",
status: "queued",
wakeupRequestId: childWakeupRequestId,
wakeupRequestId: deepDescendantWakeupRequestId,
contextSnapshot: {
issueId: childIssueId,
issueId: deepDescendantIssueId,
wakeReason: "issue_commented",
commentId: childCommentId,
wakeCommentId: childCommentId,
commentId: deepDescendantCommentId,
wakeCommentId: deepDescendantCommentId,
source: "issue.comment",
},
},
@@ -500,16 +493,28 @@ describeEmbeddedPostgres("issueTreeControlService", () => {
reason: "operator requested pause",
actor: { actorType: "user", actorId: "board-user", userId: "board-user" },
});
const deepDescendantGate = await treeSvc.getActivePauseHoldGate(companyId, deepDescendantIssueId);
expect(deepDescendantGate).toMatchObject({
holdId: expect.any(String),
rootIssueId,
issueId: deepDescendantIssueId,
isRoot: false,
mode: "pause",
});
const issueSvc = issueService(db);
await expect(issueSvc.checkout(childIssueId, agentId, ["todo"], randomUUID())).rejects.toMatchObject({
await expect(
issueSvc.checkout(deepDescendantIssueId, agentId, ["todo"], randomUUID()),
).rejects.toMatchObject({
status: 409,
details: expect.objectContaining({
rootIssueId,
mode: "pause",
}),
});
await expect(issueSvc.checkout(childIssueId, agentId, ["todo"], forgedRunId)).rejects.toMatchObject({
await expect(
issueSvc.checkout(deepDescendantIssueId, agentId, ["todo"], forgedRunId),
).rejects.toMatchObject({
status: 409,
details: expect.objectContaining({
rootIssueId,
@@ -517,9 +522,9 @@ describeEmbeddedPostgres("issueTreeControlService", () => {
}),
});
const checkedOutChild = await issueSvc.checkout(childIssueId, agentId, ["todo"], childRunId);
const checkedOutChild = await issueSvc.checkout(deepDescendantIssueId, agentId, ["todo"], deepDescendantRunId);
expect(checkedOutChild.status).toBe("in_progress");
expect(checkedOutChild.checkoutRunId).toBe(childRunId);
expect(checkedOutChild.checkoutRunId).toBe(deepDescendantRunId);
const checkedOutRoot = await issueSvc.checkout(rootIssueId, agentId, ["todo"], rootRunId);
expect(checkedOutRoot.status).toBe("in_progress");
@@ -552,4 +557,86 @@ describeEmbeddedPostgres("issueTreeControlService", () => {
expect(checkedOutLegacyFullPauseRoot.status).toBe("in_progress");
expect(checkedOutLegacyFullPauseRoot.checkoutRunId).toBe(rootRunId);
});
it("resumes subtree pauses by releasing matching pause holds", async () => {
const companyId = randomUUID();
const rootIssueId = randomUUID();
const childIssueId = randomUUID();
const nonSubtreeIssueId = randomUUID();
await db.insert(companies).values({
id: companyId,
name: "Paperclip",
issuePrefix: `T${companyId.replace(/-/g, "").slice(0, 6).toUpperCase()}`,
requireBoardApprovalForNewAgents: false,
});
await db.insert(issues).values([
{
id: rootIssueId,
companyId,
title: "Root",
status: "todo",
priority: "medium",
},
{
id: childIssueId,
companyId,
parentId: rootIssueId,
title: "Child",
status: "todo",
priority: "medium",
},
{
id: nonSubtreeIssueId,
companyId,
title: "Unrelated",
status: "todo",
priority: "medium",
},
]);
const treeSvc = issueTreeControlService(db);
const subtreePause = await treeSvc.createHold(companyId, childIssueId, {
mode: "pause",
reason: "pause child only",
actor: { actorType: "user", actorId: "board-user", userId: "board-user" },
});
const nonSubtreePause = await treeSvc.createHold(companyId, nonSubtreeIssueId, {
mode: "pause",
reason: "pause unrelated issue",
actor: { actorType: "user", actorId: "board-user", userId: "board-user" },
});
const resumed = await treeSvc.createHold(companyId, rootIssueId, {
mode: "resume",
reason: "resume subtree",
actor: { actorType: "user", actorId: "board-user", userId: "board-user" },
});
expect(resumed.hold.mode).toBe("resume");
expect(resumed.hold.status).toBe("released");
expect(resumed.resumedPauseHoldIds).toEqual([subtreePause.hold.id]);
const rows = await db
.select({ id: issueTreeHolds.id, status: issueTreeHolds.status, releaseMetadata: issueTreeHolds.releaseMetadata })
.from(issueTreeHolds)
.where(eq(issueTreeHolds.companyId, companyId));
const byId = new Map(rows.map((row) => [row.id, row] as const));
expect(byId.get(subtreePause.hold.id)?.status).toBe("released");
expect(byId.get(nonSubtreePause.hold.id)?.status).toBe("active");
expect(byId.get(resumed.hold.id)?.status).toBe("released");
const releaseMetadata = byId.get(subtreePause.hold.id)?.releaseMetadata as
| Record<string, unknown>
| null;
expect(releaseMetadata).toMatchObject({
resumedByResumeHoldId: resumed.hold.id,
resumeHoldMode: "tree_resume",
resumedPauseHoldId: subtreePause.hold.id,
});
expect((byId.get(resumed.hold.id)?.releaseMetadata as Record<string, unknown> | null)).toMatchObject({
resumedPauseHoldIds: [subtreePause.hold.id],
resumeMode: "subtree",
});
});
});

View File

@@ -27,6 +27,9 @@ const mockIssueThreadInteractionService = vi.hoisted(() => ({
}));
vi.mock("../services/index.js", () => ({
companyService: () => ({
getById: vi.fn(async () => ({ id: "company-1", attachmentMaxBytes: 10 * 1024 * 1024 })),
}),
accessService: () => ({
canUser: vi.fn(async () => true),
hasPermission: vi.fn(async () => true),
@@ -82,6 +85,9 @@ vi.mock("../services/index.js", () => ({
function registerModuleMocks() {
vi.doMock("../services/index.js", () => ({
companyService: () => ({
getById: vi.fn(async () => ({ id: "company-1", attachmentMaxBytes: 10 * 1024 * 1024 })),
}),
accessService: () => ({
canUser: vi.fn(async () => true),
hasPermission: vi.fn(async () => true),

View File

@@ -90,6 +90,9 @@ function registerRouteMocks() {
}));
vi.doMock("../services/index.js", () => ({
companyService: () => ({
getById: vi.fn(async () => ({ id: "company-1", attachmentMaxBytes: 10 * 1024 * 1024 })),
}),
accessService: () => mockAccessService,
agentService: () => mockAgentService,
documentService: () => ({}),

View File

@@ -12,6 +12,7 @@ const mockIssueService = vi.hoisted(() => ({
getCommentCursor: vi.fn(),
getComment: vi.fn(),
listBlockerAttention: vi.fn(),
listProductivityReviews: vi.fn(),
listAttachments: vi.fn(),
}));
@@ -91,6 +92,9 @@ const mockWorkProductService = vi.hoisted(() => ({
const mockEnvironmentService = vi.hoisted(() => ({}));
vi.mock("../services/index.js", () => ({
companyService: () => ({
getById: vi.fn(async () => ({ id: "company-1", attachmentMaxBytes: 10 * 1024 * 1024 })),
}),
accessService: () => mockAccessService,
agentService: () => mockAgentService,
documentService: () => mockDocumentsService,
@@ -177,6 +181,7 @@ describe.sequential("issue goal context routes", () => {
});
mockIssueService.getComment.mockResolvedValue(null);
mockIssueService.listBlockerAttention.mockResolvedValue(new Map());
mockIssueService.listProductivityReviews.mockResolvedValue(new Map());
mockIssueService.listAttachments.mockResolvedValue([]);
mockDocumentsService.getIssueDocumentPayload.mockResolvedValue({});
mockDocumentsService.getIssueDocumentByKey.mockResolvedValue(null);

View File

@@ -24,7 +24,7 @@ import {
} from "./helpers/embedded-postgres.js";
import { instanceSettingsService } from "../services/instance-settings.ts";
import { clampIssueListLimit, ISSUE_LIST_MAX_LIMIT, issueService } from "../services/issues.ts";
import { buildProjectMentionHref } from "@paperclipai/shared";
import { buildProjectMentionHref, MAX_ISSUE_REQUEST_DEPTH } from "@paperclipai/shared";
const embeddedPostgresSupport = await getEmbeddedPostgresTestSupport();
const describeEmbeddedPostgres = embeddedPostgresSupport.supported ? describe : describe.skip;
@@ -1451,6 +1451,56 @@ describeEmbeddedPostgres("issueService.create workspace inheritance", () => {
}),
]);
});
it("clamps helper-created child requestDepth to the safe maximum", async () => {
const companyId = randomUUID();
const projectId = randomUUID();
const goalId = randomUUID();
const parentIssueId = randomUUID();
await db.insert(companies).values({
id: companyId,
name: "Paperclip",
issuePrefix: `T${companyId.replace(/-/g, "").slice(0, 6).toUpperCase()}`,
requireBoardApprovalForNewAgents: false,
});
await instanceSettingsService(db).updateExperimental({ enableIsolatedWorkspaces: false });
await db.insert(goals).values({
id: goalId,
companyId,
title: "Ship child helpers",
level: "task",
status: "active",
});
await db.insert(projects).values({
id: projectId,
companyId,
goalId,
name: "Workspace project",
status: "in_progress",
});
await db.insert(issues).values({
id: parentIssueId,
companyId,
projectId,
goalId,
title: "Parent issue",
status: "in_progress",
priority: "medium",
requestDepth: MAX_ISSUE_REQUEST_DEPTH,
});
const { issue: child } = await svc.createChild(parentIssueId, {
title: "Child helper",
status: "todo",
requestDepth: MAX_ISSUE_REQUEST_DEPTH + 100,
});
expect(child.requestDepth).toBe(MAX_ISSUE_REQUEST_DEPTH);
});
});
describeEmbeddedPostgres("issueService blockers and dependency wake readiness", () => {

View File

@@ -0,0 +1,427 @@
import { randomUUID } from "node:crypto";
import { and, eq, sql } from "drizzle-orm";
import { afterAll, afterEach, beforeAll, describe, expect, it } from "vitest";
import {
activityLog,
agents,
companies,
createDb,
heartbeatRuns,
issueComments,
issues,
} from "@paperclipai/db";
import {
getEmbeddedPostgresTestSupport,
startEmbeddedPostgresTestDatabase,
} from "./helpers/embedded-postgres.js";
import { MAX_ISSUE_REQUEST_DEPTH } from "@paperclipai/shared";
import {
DEFAULT_PRODUCTIVITY_REVIEW_NO_COMMENT_STREAK_RUNS,
PRODUCTIVITY_REVIEW_ORIGIN_KIND,
productivityReviewService,
} from "../services/productivity-review.ts";
const embeddedPostgresSupport = await getEmbeddedPostgresTestSupport();
const describeEmbeddedPostgres = embeddedPostgresSupport.supported ? describe : describe.skip;
if (!embeddedPostgresSupport.supported) {
console.warn(
`Skipping embedded Postgres productivity review tests on this host: ${embeddedPostgresSupport.reason ?? "unsupported environment"}`,
);
}
describeEmbeddedPostgres("productivity review service", () => {
let tempDb: Awaited<ReturnType<typeof startEmbeddedPostgresTestDatabase>> | null = null;
let db: ReturnType<typeof createDb>;
beforeAll(async () => {
tempDb = await startEmbeddedPostgresTestDatabase("paperclip-productivity-review-");
db = createDb(tempDb.connectionString);
}, 30_000);
afterEach(async () => {
await db.execute(sql.raw(`TRUNCATE TABLE "companies" CASCADE`));
});
afterAll(async () => {
await tempDb?.cleanup();
});
async function seedAssignedIssue(opts?: {
status?: "todo" | "in_progress";
startedAt?: Date;
parentId?: string | null;
originKind?: string;
}) {
const companyId = randomUUID();
const managerId = randomUUID();
const coderId = randomUUID();
const issueId = randomUUID();
const issuePrefix = `PR${companyId.replace(/-/g, "").slice(0, 6).toUpperCase()}`;
const createdAt = new Date("2026-04-28T10:00:00.000Z");
await db.insert(companies).values({
id: companyId,
name: "Productivity Review Co",
issuePrefix,
requireBoardApprovalForNewAgents: false,
});
await db.insert(agents).values([
{
id: managerId,
companyId,
name: "CTO",
role: "cto",
status: "idle",
adapterType: "codex_local",
adapterConfig: {},
runtimeConfig: {},
permissions: {},
},
{
id: coderId,
companyId,
name: "Coder",
role: "engineer",
status: "idle",
reportsTo: managerId,
adapterType: "codex_local",
adapterConfig: {},
runtimeConfig: {},
permissions: {},
},
]);
await db.insert(issues).values({
id: issueId,
companyId,
title: "Implement data import",
status: opts?.status ?? "in_progress",
priority: "medium",
assigneeAgentId: coderId,
parentId: opts?.parentId ?? null,
originKind: opts?.originKind ?? "manual",
issueNumber: 1,
identifier: `${issuePrefix}-1`,
startedAt: opts?.startedAt ?? createdAt,
createdAt,
updatedAt: createdAt,
});
return { companyId, managerId, coderId, issueId, issuePrefix, createdAt };
}
async function insertRuns(input: {
companyId: string;
agentId: string;
issueId: string;
count: number;
now: Date;
withRunComments?: boolean;
}) {
const runs: Array<typeof heartbeatRuns.$inferInsert> = [];
for (let index = 0; index < input.count; index += 1) {
const runId = randomUUID();
const createdAt = new Date(input.now.getTime() - index * 60_000);
runs.push({
id: runId,
companyId: input.companyId,
agentId: input.agentId,
status: "succeeded",
invocationSource: "assignment",
triggerDetail: "system",
startedAt: createdAt,
finishedAt: new Date(createdAt.getTime() + 30_000),
contextSnapshot: { issueId: input.issueId, taskId: input.issueId },
livenessState: "advanced",
nextAction: "Continue processing the next batch.",
createdAt,
updatedAt: createdAt,
});
}
await db.insert(heartbeatRuns).values(runs);
if (input.withRunComments) {
await db.insert(issueComments).values(
runs.map((run, index) => ({
companyId: input.companyId,
issueId: input.issueId,
authorAgentId: input.agentId,
createdByRunId: run.id,
body: `Progress update ${index}`,
createdAt: run.createdAt as Date,
updatedAt: run.createdAt as Date,
})),
);
}
return runs;
}
async function listProductivityReviews(companyId: string) {
return db
.select()
.from(issues)
.where(and(eq(issues.companyId, companyId), eq(issues.originKind, PRODUCTIVITY_REVIEW_ORIGIN_KIND)))
.orderBy(issues.createdAt);
}
it("creates exactly one manager-assigned review for a no-comment run streak and refreshes it idempotently", async () => {
const now = new Date("2026-04-28T12:00:00.000Z");
const seeded = await seedAssignedIssue();
await insertRuns({
companyId: seeded.companyId,
agentId: seeded.coderId,
issueId: seeded.issueId,
count: DEFAULT_PRODUCTIVITY_REVIEW_NO_COMMENT_STREAK_RUNS,
now,
});
const service = productivityReviewService(db);
const first = await service.reconcileProductivityReviews({ now, companyId: seeded.companyId });
const second = await service.reconcileProductivityReviews({ now, companyId: seeded.companyId });
expect(first.created).toBe(1);
expect(second.updated).toBe(1);
const reviews = await listProductivityReviews(seeded.companyId);
expect(reviews).toHaveLength(1);
expect(reviews[0]?.parentId).toBe(seeded.issueId);
expect(reviews[0]?.assigneeAgentId).toBe(seeded.managerId);
expect(reviews[0]?.originId).toBe(seeded.issueId);
expect(reviews[0]?.originFingerprint).toBe(`productivity-review:${seeded.issueId}`);
expect(reviews[0]?.description).toContain("Primary trigger: `no_comment_streak`");
expect(reviews[0]?.description).toContain("No-comment completed-run streak: 10");
const comments = await db
.select()
.from(issueComments)
.where(eq(issueComments.issueId, reviews[0]!.id));
expect(comments.some((comment) => comment.body.includes("Productivity review evidence refreshed"))).toBe(true);
});
it("creates a long-active review without enabling a continuation hold", async () => {
const now = new Date("2026-04-28T12:00:00.000Z");
const seeded = await seedAssignedIssue({
status: "in_progress",
startedAt: new Date(now.getTime() - 7 * 60 * 60 * 1000),
});
const service = productivityReviewService(db);
const result = await service.reconcileProductivityReviews({ now, companyId: seeded.companyId });
const hold = await service.isProductivityReviewContinuationHoldActive({
companyId: seeded.companyId,
issueId: seeded.issueId,
agentId: seeded.coderId,
now,
});
expect(result.created).toBe(1);
const [review] = await listProductivityReviews(seeded.companyId);
expect(review?.description).toContain("Primary trigger: `long_active_duration`");
expect(review?.priority).toBe("medium");
expect(hold.held).toBe(false);
});
it("creates a high-churn review even when every sampled run has a progress comment", async () => {
const now = new Date("2026-04-28T12:00:00.000Z");
const seeded = await seedAssignedIssue();
await insertRuns({
companyId: seeded.companyId,
agentId: seeded.coderId,
issueId: seeded.issueId,
count: 10,
now,
withRunComments: true,
});
const result = await productivityReviewService(db).reconcileProductivityReviews({
now,
companyId: seeded.companyId,
});
expect(result.created).toBe(1);
const [review] = await listProductivityReviews(seeded.companyId);
expect(review?.description).toContain("Primary trigger: `high_churn`");
expect(review?.description).toContain("Runs in rolling windows: 10/1h");
});
it("ignores non-assignee comments when evaluating high-churn productivity reviews", async () => {
const now = new Date("2026-04-28T12:00:00.000Z");
const seeded = await seedAssignedIssue();
await insertRuns({
companyId: seeded.companyId,
agentId: seeded.coderId,
issueId: seeded.issueId,
count: 9,
now,
});
const managerRuns = await insertRuns({
companyId: seeded.companyId,
agentId: seeded.managerId,
issueId: seeded.issueId,
count: 10,
now,
});
await db.insert(issueComments).values(
managerRuns.map((run, index) => ({
companyId: seeded.companyId,
issueId: seeded.issueId,
authorAgentId: seeded.managerId,
createdByRunId: run.id,
body: `Manager note ${index}`,
createdAt: run.createdAt as Date,
updatedAt: run.createdAt as Date,
})),
);
const result = await productivityReviewService(db).reconcileProductivityReviews({
now,
companyId: seeded.companyId,
});
expect(result.created).toBe(0);
expect(await listProductivityReviews(seeded.companyId)).toHaveLength(0);
});
it("skips productivity-review descendants so reviews cannot recursively spawn reviews", async () => {
const now = new Date("2026-04-28T12:00:00.000Z");
const seeded = await seedAssignedIssue();
const reviewId = randomUUID();
const childId = randomUUID();
await db.insert(issues).values({
id: reviewId,
companyId: seeded.companyId,
title: "Existing productivity review",
status: "todo",
priority: "high",
originKind: PRODUCTIVITY_REVIEW_ORIGIN_KIND,
originId: seeded.issueId,
originFingerprint: `productivity-review:${seeded.issueId}`,
parentId: seeded.issueId,
issueNumber: 2,
identifier: `${seeded.issuePrefix}-2`,
});
await db.insert(issues).values({
id: childId,
companyId: seeded.companyId,
title: "Review follow-up child",
status: "in_progress",
priority: "medium",
assigneeAgentId: seeded.coderId,
parentId: reviewId,
issueNumber: 3,
identifier: `${seeded.issuePrefix}-3`,
startedAt: new Date(now.getTime() - 7 * 60 * 60 * 1000),
});
await insertRuns({
companyId: seeded.companyId,
agentId: seeded.coderId,
issueId: childId,
count: 10,
now,
});
const result = await productivityReviewService(db).reconcileProductivityReviews({
now,
companyId: seeded.companyId,
});
const reviews = await listProductivityReviews(seeded.companyId);
expect(result.created).toBe(0);
expect(reviews).toHaveLength(1);
});
it("treats a recently completed review as a snooze window", async () => {
const now = new Date("2026-04-28T12:00:00.000Z");
const seeded = await seedAssignedIssue();
await insertRuns({
companyId: seeded.companyId,
agentId: seeded.coderId,
issueId: seeded.issueId,
count: 10,
now,
});
const service = productivityReviewService(db);
await service.reconcileProductivityReviews({ now, companyId: seeded.companyId });
const [review] = await listProductivityReviews(seeded.companyId);
await db
.update(issues)
.set({ status: "done", updatedAt: now })
.where(eq(issues.id, review!.id));
const result = await service.reconcileProductivityReviews({
now: new Date(now.getTime() + 30 * 60 * 1000),
companyId: seeded.companyId,
});
const reviews = await listProductivityReviews(seeded.companyId);
expect(result.snoozed).toBe(1);
expect(reviews).toHaveLength(1);
});
it("reports and logs soft-stop holds for open no-comment reviews", async () => {
const now = new Date("2026-04-28T12:00:00.000Z");
const seeded = await seedAssignedIssue();
const [latestRun] = await insertRuns({
companyId: seeded.companyId,
agentId: seeded.coderId,
issueId: seeded.issueId,
count: 10,
now,
});
const service = productivityReviewService(db);
await service.reconcileProductivityReviews({ now, companyId: seeded.companyId });
const [review] = await listProductivityReviews(seeded.companyId);
const hold = await service.isProductivityReviewContinuationHoldActive({
companyId: seeded.companyId,
issueId: seeded.issueId,
agentId: seeded.coderId,
now,
});
expect(hold.held).toBe(true);
if (!hold.held) return;
await service.recordContinuationHold({
companyId: seeded.companyId,
issueId: seeded.issueId,
runId: latestRun!.id as string,
agentId: seeded.coderId,
reviewIssueId: review!.id,
trigger: hold.trigger,
reason: hold.reason,
});
const activities = await db
.select()
.from(activityLog)
.where(eq(activityLog.action, "issue.productivity_review_continuation_held"));
expect(activities).toHaveLength(1);
expect(activities[0]?.entityId).toBe(seeded.issueId);
});
it("clamps poisoned requestDepth metadata instead of aborting productivity reconciliation", async () => {
const now = new Date("2026-04-28T12:00:00.000Z");
const seeded = await seedAssignedIssue();
await db
.update(issues)
.set({ requestDepth: 2_147_483_647 })
.where(eq(issues.id, seeded.issueId));
await insertRuns({
companyId: seeded.companyId,
agentId: seeded.coderId,
issueId: seeded.issueId,
count: DEFAULT_PRODUCTIVITY_REVIEW_NO_COMMENT_STREAK_RUNS,
now,
});
const result = await productivityReviewService(db).reconcileProductivityReviews({
now,
companyId: seeded.companyId,
});
expect(result.failed).toBe(0);
const [review] = await listProductivityReviews(seeded.companyId);
expect(review?.requestDepth).toBe(MAX_ISSUE_REQUEST_DEPTH);
});
});

View File

@@ -320,6 +320,55 @@ describe.sequential("workspace runtime service route authorization", () => {
expect(mockAssertCanManageProjectWorkspaceRuntimeServices).toHaveBeenCalled();
}, 15000);
it("blocks shared-project stop/restart requests from agents", async () => {
mockProjectService.getById.mockResolvedValue(buildProject({
id: projectId,
workspaces: [{
id: workspaceId,
companyId: "company-1",
projectId,
name: "Workspace",
sourceType: "local_path",
cwd: "/tmp/project",
repoUrl: null,
repoRef: null,
defaultRef: null,
visibility: "default",
setupCommand: null,
cleanupCommand: null,
remoteProvider: null,
remoteWorkspaceRef: null,
sharedWorkspaceKey: "shared-key",
metadata: null,
runtimeConfig: null,
isPrimary: false,
runtimeServices: [],
createdAt: new Date(),
updatedAt: new Date(),
}],
}));
const app = await createProjectApp({
type: "agent",
agentId: "agent-1",
companyId: "company-1",
source: "agent_key",
runId: "run-1",
});
const responses = await Promise.all([
request(app).post(`/api/projects/${projectId}/workspaces/${workspaceId}/runtime-services/stop`).send({}),
request(app).post(`/api/projects/${projectId}/workspaces/${workspaceId}/runtime-services/restart`).send({}),
]);
for (const res of responses) {
expect(res.status).toBe(403);
expect(res.body.error).toContain("Missing permission");
expect(mockProjectService.getById).toHaveBeenCalledWith(projectId);
expect(mockAssertCanManageProjectWorkspaceRuntimeServices).not.toHaveBeenCalled();
}
}, 15000);
it("rejects agent callers that create project execution workspace commands", async () => {
const app = await createProjectApp({
type: "agent",

View File

@@ -14,6 +14,7 @@
* - Exact types: "application/pdf"
* - Wildcards: "image/*" or "application/vnd.openxmlformats-officedocument.*"
*/
import { MAX_COMPANY_ATTACHMENT_MAX_BYTES } from "@paperclipai/shared";
export const DEFAULT_ALLOWED_TYPES: readonly string[] = [
"image/png",
@@ -92,3 +93,10 @@ export function isAllowedContentType(contentType: string): boolean {
export const MAX_ATTACHMENT_BYTES =
Number(process.env.PAPERCLIP_ATTACHMENT_MAX_BYTES) || 10 * 1024 * 1024;
export function normalizeIssueAttachmentMaxBytes(value: number | null | undefined): number {
if (typeof value !== "number" || !Number.isFinite(value) || value <= 0) {
return MAX_ATTACHMENT_BYTES;
}
return Math.min(Math.floor(value), MAX_COMPANY_ATTACHMENT_MAX_BYTES, MAX_ATTACHMENT_BYTES);
}

View File

@@ -705,6 +705,12 @@ export async function startServer(): Promise<StartedServer> {
logger.warn({ ...scanned }, "startup active-run output watchdog created review work");
}
})
.then(async () => {
const reviewed = await heartbeat.reconcileProductivityReviews();
if (reviewed.created > 0 || reviewed.updated > 0 || reviewed.failed > 0) {
logger.warn({ ...reviewed }, "startup productivity reconciliation created or updated review work");
}
})
.catch((err) => {
logger.error({ err }, "startup heartbeat recovery failed");
});
@@ -764,6 +770,12 @@ export async function startServer(): Promise<StartedServer> {
logger.warn({ ...scanned }, "periodic active-run output watchdog created review work");
}
})
.then(async () => {
const reviewed = await heartbeat.reconcileProductivityReviews();
if (reviewed.created > 0 || reviewed.updated > 0 || reviewed.failed > 0) {
logger.warn({ ...reviewed }, "periodic productivity reconciliation created or updated review work");
}
})
.catch((err) => {
logger.error({ err }, "periodic heartbeat recovery failed");
});

View File

@@ -141,6 +141,7 @@ function readSkillMarkdown(skillName: string): string | null {
normalized !== "paperclip" &&
normalized !== "paperclip-create-agent" &&
normalized !== "paperclip-create-plugin" &&
normalized !== "paperclip-converting-plans-to-tasks" &&
normalized !== "para-memory-files"
)
return null;
@@ -2862,6 +2863,10 @@ export function accessRoutes(
{
name: "paperclip-create-agent",
path: "/api/skills/paperclip-create-agent"
},
{
name: "paperclip-converting-plans-to-tasks",
path: "/api/skills/paperclip-converting-plans-to-tasks"
}
]
});

View File

@@ -690,7 +690,10 @@ export function agentRoutes(
role: string;
adapterType: string;
adapterConfig: unknown;
}>(agent: T): Promise<T> {
}>(
agent: T,
input?: { files: Record<string, string>; entryFile?: string },
): Promise<T> {
if (!adapterSupportsInstructionsBundle(agent.adapterType)) {
return agent;
}
@@ -703,27 +706,45 @@ export function agentRoutes(
|| Boolean(asNonEmptyString(adapterConfig.instructionsFilePath))
|| Boolean(asNonEmptyString(adapterConfig.agentsMdPath));
if (hasExplicitInstructionsBundle) {
return agent;
const nextAdapterConfig = { ...adapterConfig };
const hadLegacyPrompt =
Object.prototype.hasOwnProperty.call(nextAdapterConfig, "promptTemplate")
|| Object.prototype.hasOwnProperty.call(nextAdapterConfig, "bootstrapPromptTemplate");
delete nextAdapterConfig.promptTemplate;
delete nextAdapterConfig.bootstrapPromptTemplate;
if (!hadLegacyPrompt) return agent;
const updated = await svc.update(agent.id, { adapterConfig: nextAdapterConfig });
return (updated as T | null) ?? { ...agent, adapterConfig: nextAdapterConfig };
}
const promptTemplate = typeof adapterConfig.promptTemplate === "string"
? adapterConfig.promptTemplate
: "";
const files = promptTemplate.trim().length === 0
? await loadDefaultAgentInstructionsBundle(resolveDefaultAgentInstructionsBundleRole(agent.role))
: { "AGENTS.md": promptTemplate };
const files = input?.files
?? await loadDefaultAgentInstructionsBundle(resolveDefaultAgentInstructionsBundleRole(agent.role));
const materialized = await instructions.materializeManagedBundle(
agent,
files,
{ entryFile: "AGENTS.md", replaceExisting: false },
{ entryFile: input?.entryFile ?? "AGENTS.md", replaceExisting: false },
);
const nextAdapterConfig = { ...materialized.adapterConfig };
delete nextAdapterConfig.promptTemplate;
delete nextAdapterConfig.bootstrapPromptTemplate;
const updated = await svc.update(agent.id, { adapterConfig: nextAdapterConfig });
return (updated as T | null) ?? { ...agent, adapterConfig: nextAdapterConfig };
}
function assertNoNewAgentLegacyPromptTemplate(adapterType: string, adapterConfig: Record<string, unknown>) {
if (!adapterSupportsInstructionsBundle(adapterType)) return;
if (
Object.prototype.hasOwnProperty.call(adapterConfig, "promptTemplate")
|| Object.prototype.hasOwnProperty.call(adapterConfig, "bootstrapPromptTemplate")
) {
throw unprocessable(
"New agents must use instructionsBundle/AGENTS.md instead of adapterConfig.promptTemplate or bootstrapPromptTemplate",
);
}
}
async function assertCanManageInstructionsPath(req: Request, targetAgent: { id: string; companyId: string }) {
assertCompanyAccess(req, targetAgent.companyId);
if (req.actor.type !== "board") {
@@ -1465,11 +1486,16 @@ export function agentRoutes(
const sourceIssueIds = parseSourceIssueIds(req.body);
const {
desiredSkills: requestedDesiredSkills,
instructionsBundle,
sourceIssueId: _sourceIssueId,
sourceIssueIds: _sourceIssueIds,
...hireInput
} = req.body;
hireInput.adapterType = assertKnownAdapterType(hireInput.adapterType);
assertNoNewAgentLegacyPromptTemplate(
hireInput.adapterType,
(hireInput.adapterConfig ?? {}) as Record<string, unknown>,
);
assertNoAgentHostWorkspaceCommandMutation(
req,
collectAgentAdapterWorkspaceCommandPaths(hireInput.adapterConfig),
@@ -1522,7 +1548,7 @@ export function agentRoutes(
spentMonthlyCents: 0,
lastHeartbeatAt: null,
});
const agent = await materializeDefaultInstructionsBundleForNewAgent(createdAgent);
const agent = await materializeDefaultInstructionsBundleForNewAgent(createdAgent, instructionsBundle);
let approval: Awaited<ReturnType<typeof approvalsSvc.getById>> | null = null;
const actor = getActorInfo(req);
@@ -1652,9 +1678,14 @@ export function agentRoutes(
const {
desiredSkills: requestedDesiredSkills,
instructionsBundle,
...createInput
} = req.body;
createInput.adapterType = assertKnownAdapterType(createInput.adapterType);
assertNoNewAgentLegacyPromptTemplate(
createInput.adapterType,
(createInput.adapterConfig ?? {}) as Record<string, unknown>,
);
assertNoAgentHostWorkspaceCommandMutation(
req,
collectAgentAdapterWorkspaceCommandPaths(createInput.adapterConfig),
@@ -1697,7 +1728,7 @@ export function agentRoutes(
spentMonthlyCents: 0,
lastHeartbeatAt: null,
});
const agent = await materializeDefaultInstructionsBundleForNewAgent(createdAgent);
const agent = await materializeDefaultInstructionsBundleForNewAgent(createdAgent, instructionsBundle);
const actor = getActorInfo(req);
await logActivity(db, {

View File

@@ -291,7 +291,9 @@ export function issueTreeControlRoutes(db: Db) {
}
}
res.status(result.hold.mode === "restore" ? 200 : 201).json(result);
res
.status(result.hold.mode === "restore" || result.hold.mode === "resume" ? 200 : 201)
.json(result);
});
router.get("/issues/:id/tree-control/state", async (req, res) => {

View File

@@ -39,6 +39,7 @@ import * as serviceIndex from "../services/index.js";
import {
accessService,
agentService,
companyService,
executionWorkspaceService,
goalService,
heartbeatService,
@@ -65,7 +66,7 @@ import {
import { shouldWakeAssigneeOnCheckout } from "./issues-checkout-wakeup.js";
import {
isInlineAttachmentContentType,
MAX_ATTACHMENT_BYTES,
normalizeIssueAttachmentMaxBytes,
normalizeContentType,
SVG_CONTENT_TYPE,
} from "../attachment-types.js";
@@ -404,6 +405,7 @@ export function issueRoutes(
pluginWorkerManager: opts.pluginWorkerManager,
});
const feedback = feedbackService(db);
const companiesSvc = companyService(db);
const instanceSettings = instanceSettingsService(db);
const agentsSvc = agentService(db);
const projectsSvc = projectService(db);
@@ -427,11 +429,6 @@ export function issueRoutes(
};
const feedbackExportService = opts?.feedbackExportService;
const environmentsSvc = environmentService(db);
const upload = multer({
storage: multer.memoryStorage(),
limits: { fileSize: MAX_ATTACHMENT_BYTES, files: 1 },
});
function withContentPath<T extends { id: string }>(attachment: T) {
return {
...attachment,
@@ -493,7 +490,11 @@ export function issueRoutes(
return parsed;
}
async function runSingleFileUpload(req: Request, res: Response) {
async function runSingleFileUpload(req: Request, res: Response, fileSizeLimit: number) {
const upload = multer({
storage: multer.memoryStorage(),
limits: { fileSize: fileSizeLimit, files: 1 },
});
await new Promise<void>((resolve, reject) => {
upload.single("file")(req, res, (err: unknown) => {
if (err) reject(err);
@@ -602,23 +603,39 @@ export function issueRoutes(
res.status(403).json({ error: "Agent authentication required" });
return false;
}
if (issue.status !== "in_progress" || issue.assigneeAgentId === null) {
if (issue.assigneeAgentId === null) {
return true;
}
if (issue.assigneeAgentId !== actorAgentId) {
if (await hasActiveCheckoutManagementOverride(actorAgentId, issue.companyId, issue.assigneeAgentId)) {
return true;
}
res.status(409).json({
error: "Issue is checked out by another agent",
details: {
issueId: issue.id,
assigneeAgentId: issue.assigneeAgentId,
actorAgentId,
},
});
if (issue.status === "in_progress") {
res.status(409).json({
error: "Issue is checked out by another agent",
details: {
issueId: issue.id,
assigneeAgentId: issue.assigneeAgentId,
actorAgentId,
},
});
} else {
res.status(403).json({
error: "Agent cannot mutate another agent's issue",
details: {
issueId: issue.id,
assigneeAgentId: issue.assigneeAgentId,
actorAgentId,
status: issue.status,
securityPrinciples: ["Least Privilege", "Complete Mediation", "Fail Securely"],
},
});
}
return false;
}
if (issue.status !== "in_progress") {
return true;
}
const runId = requireAgentRunId(req, res);
if (!runId) return false;
const ownership = await svc.assertCheckoutOwner(issue.id, actorAgentId, runId);
@@ -907,6 +924,11 @@ export function issueRoutes(
? Number.parseInt(rawLimit, 10)
: null;
const limit = parsedLimit === null ? ISSUE_LIST_DEFAULT_LIMIT : clampIssueListLimit(parsedLimit);
const rawOffset = req.query.offset as string | undefined;
const parsedOffset = rawOffset !== undefined && /^\d+$/.test(rawOffset)
? Number.parseInt(rawOffset, 10)
: null;
const offset = parsedOffset ?? 0;
if (assigneeUserFilterRaw === "me" && (!assigneeUserId || req.actor.type !== "board")) {
res.status(403).json({ error: "assigneeUserId=me requires board authentication" });
@@ -928,6 +950,10 @@ export function issueRoutes(
res.status(400).json({ error: `limit must be a positive integer up to ${ISSUE_LIST_MAX_LIMIT}` });
return;
}
if (rawOffset !== undefined && (parsedOffset === null || !Number.isInteger(parsedOffset) || parsedOffset < 0)) {
res.status(400).json({ error: "offset must be a non-negative integer" });
return;
}
const result = await svc.list(companyId, {
status: req.query.status as string | undefined,
@@ -952,6 +978,7 @@ export function issueRoutes(
includeBlockedBy: req.query.includeBlockedBy === "true" || req.query.includeBlockedBy === "1",
q: req.query.q as string | undefined,
limit,
offset,
});
res.json(result);
});
@@ -1034,6 +1061,7 @@ export function issueRoutes(
wakeComment,
relations,
blockerAttention,
productivityReview,
attachments,
continuationSummary,
currentExecutionWorkspace,
@@ -1045,6 +1073,7 @@ export function issueRoutes(
wakeCommentId ? svc.getComment(wakeCommentId) : null,
svc.getRelationSummaries(issue.id),
svc.listBlockerAttention(issue.companyId, [issue]).then((map) => map.get(issue.id) ?? null),
svc.listProductivityReviews(issue.companyId, [issue.id]).then((map) => map.get(issue.id) ?? null),
svc.listAttachments(issue.id),
documentsSvc.getIssueDocumentByKey(issue.id, ISSUE_CONTINUATION_SUMMARY_DOCUMENT_KEY),
currentExecutionWorkspacePromise,
@@ -1058,6 +1087,7 @@ export function issueRoutes(
description: issue.description,
status: issue.status,
...(blockerAttention ? { blockerAttention } : {}),
productivityReview,
priority: issue.priority,
projectId: issue.projectId,
goalId: goal?.id ?? issue.goalId,
@@ -1066,6 +1096,8 @@ export function issueRoutes(
blocks: relations.blocks,
assigneeAgentId: issue.assigneeAgentId,
assigneeUserId: issue.assigneeUserId,
originKind: issue.originKind,
originId: issue.originId,
updatedAt: issue.updatedAt,
},
ancestors: ancestors.map((ancestor) => ({
@@ -1127,13 +1159,23 @@ export function issueRoutes(
return;
}
assertCompanyAccess(req, issue.companyId);
const [{ project, goal }, ancestors, mentionedProjectIds, documentPayload, relations, blockerAttention, referenceSummary] = await Promise.all([
const [
{ project, goal },
ancestors,
mentionedProjectIds,
documentPayload,
relations,
blockerAttention,
productivityReview,
referenceSummary,
] = await Promise.all([
resolveIssueProjectAndGoal(issue),
svc.getAncestors(issue.id),
svc.findMentionedProjectIds(issue.id, { includeCommentBodies: false }),
documentsSvc.getIssueDocumentPayload(issue),
svc.getRelationSummaries(issue.id),
svc.listBlockerAttention(issue.companyId, [issue]).then((map) => map.get(issue.id) ?? null),
svc.listProductivityReviews(issue.companyId, [issue.id]).then((map) => map.get(issue.id) ?? null),
issueReferencesSvc.listIssueReferenceSummary(issue.id),
]);
const mentionedProjects = mentionedProjectIds.length > 0
@@ -1148,6 +1190,7 @@ export function issueRoutes(
goalId: goal?.id ?? issue.goalId,
ancestors,
...(blockerAttention ? { blockerAttention } : {}),
productivityReview,
blockedBy: relations.blockedBy,
blocks: relations.blocks,
relatedWork: referenceSummary,
@@ -3692,12 +3735,15 @@ export function issueRoutes(
}
if (!(await assertAgentIssueMutationAllowed(req, res, issue))) return;
const company = await companiesSvc.getById(companyId);
const attachmentMaxBytes = normalizeIssueAttachmentMaxBytes(company?.attachmentMaxBytes);
try {
await runSingleFileUpload(req, res);
await runSingleFileUpload(req, res, attachmentMaxBytes);
} catch (err) {
if (err instanceof multer.MulterError) {
if (err.code === "LIMIT_FILE_SIZE") {
res.status(422).json({ error: `Attachment exceeds ${MAX_ATTACHMENT_BYTES} bytes` });
res.status(422).json({ error: `Attachment exceeds ${attachmentMaxBytes} bytes` });
return;
}
res.status(400).json({ error: err.message });

View File

@@ -14,7 +14,7 @@ import type { WorkspaceRuntimeDesiredState, WorkspaceRuntimeServiceStateMap } fr
import { trackProjectCreated } from "@paperclipai/shared/telemetry";
import { validate } from "../middleware/validate.js";
import { projectService, logActivity, workspaceOperationService } from "../services/index.js";
import { conflict } from "../errors.js";
import { conflict, forbidden } from "../errors.js";
import { assertCompanyAccess, getActorInfo } from "./authz.js";
import {
buildWorkspaceRuntimeDesiredStatePatch,
@@ -36,6 +36,7 @@ import { environmentService } from "../services/environments.js";
import { secretService } from "../services/secrets.js";
const WORKSPACE_CONTROL_OUTPUT_MAX_CHARS = 256 * 1024;
const SHARED_WORKSPACE_STOP_AND_RESTART_ACTIONS = new Set(["stop", "restart"]);
export function projectRoutes(db: Db) {
const router = Router();
@@ -346,6 +347,15 @@ export function projectRoutes(db: Db) {
return;
}
const isSharedWorkspace = Boolean(workspace.sharedWorkspaceKey);
if (
req.actor.type === "agent"
&& isSharedWorkspace
&& SHARED_WORKSPACE_STOP_AND_RESTART_ACTIONS.has(action)
) {
throw forbidden("Missing permission to manage workspace runtime services");
}
await assertCanManageProjectWorkspaceRuntimeServices(db, req, {
companyId: project.companyId,
projectWorkspaceId: workspace.id,
@@ -549,9 +559,9 @@ export function projectRoutes(db: Db) {
stderr,
system:
action === "stop"
? "Stopped project workspace runtime services.\n"
? "Stopped project workspace runtime services.\nThis does not pause issue work or held wake scheduling."
: action === "restart"
? "Restarted project workspace runtime services.\n"
? "Restarted project workspace runtime services.\nThis does not pause issue work or held wake scheduling."
: "Started project workspace runtime services.\n",
metadata: {
runtimeServiceCount,

View File

@@ -45,6 +45,7 @@ export function companyService(db: Db) {
issueCounter: companies.issueCounter,
budgetMonthlyCents: companies.budgetMonthlyCents,
spentMonthlyCents: companies.spentMonthlyCents,
attachmentMaxBytes: companies.attachmentMaxBytes,
requireBoardApprovalForNewAgents: companies.requireBoardApprovalForNewAgents,
feedbackDataSharingEnabled: companies.feedbackDataSharingEnabled,
feedbackDataSharingConsentAt: companies.feedbackDataSharingConsentAt,

View File

@@ -2428,6 +2428,10 @@ function buildManifestFromPackageFiles(
description: asString(companyFrontmatter.description),
brandColor: asString(paperclipCompany.brandColor),
logoPath: asString(paperclipCompany.logoPath) ?? asString(paperclipCompany.logo),
attachmentMaxBytes:
typeof paperclipCompany.attachmentMaxBytes === "number" && Number.isFinite(paperclipCompany.attachmentMaxBytes)
? Math.max(1, Math.floor(paperclipCompany.attachmentMaxBytes))
: null,
requireBoardApprovalForNewAgents:
typeof paperclipCompany.requireBoardApprovalForNewAgents === "boolean"
? paperclipCompany.requireBoardApprovalForNewAgents
@@ -3465,6 +3469,7 @@ export function companyPortabilityService(db: Db, storage?: StorageService) {
company: stripEmptyValues({
brandColor: company.brandColor ?? null,
logoPath: companyLogoPath,
attachmentMaxBytes: company.attachmentMaxBytes,
requireBoardApprovalForNewAgents: company.requireBoardApprovalForNewAgents ? true : undefined,
feedbackDataSharingEnabled: company.feedbackDataSharingEnabled ? true : undefined,
feedbackDataSharingConsentAt: company.feedbackDataSharingConsentAt?.toISOString() ?? null,
@@ -3963,6 +3968,7 @@ export function companyPortabilityService(db: Db, storage?: StorageService) {
id: string;
name: string;
requireBoardApprovalForNewAgents?: boolean | null;
attachmentMaxBytes?: number | null;
} | null = null;
let companyAction: "created" | "updated" | "unchanged" = "unchanged";
@@ -3985,6 +3991,9 @@ export function companyPortabilityService(db: Db, storage?: StorageService) {
name: companyName,
description: include.company ? (sourceManifest.company?.description ?? null) : null,
brandColor: include.company ? (sourceManifest.company?.brandColor ?? null) : null,
attachmentMaxBytes: include.company
? (sourceManifest.company?.attachmentMaxBytes ?? undefined)
: undefined,
requireBoardApprovalForNewAgents: include.company
? (sourceManifest.company?.requireBoardApprovalForNewAgents ?? false)
: false,
@@ -4016,6 +4025,7 @@ export function companyPortabilityService(db: Db, storage?: StorageService) {
name: sourceManifest.company.name,
description: sourceManifest.company.description,
brandColor: sourceManifest.company.brandColor,
attachmentMaxBytes: sourceManifest.company.attachmentMaxBytes ?? undefined,
requireBoardApprovalForNewAgents: sourceManifest.company.requireBoardApprovalForNewAgents,
feedbackDataSharingEnabled: sourceManifest.company.feedbackDataSharingEnabled,
feedbackDataSharingConsentAt: sourceManifest.company.feedbackDataSharingConsentAt

View File

@@ -110,6 +110,7 @@ import {
} from "./recovery/index.js";
import { isAutomaticRecoverySuppressedByPauseHold } from "./recovery/pause-hold-guard.js";
import { recoveryService } from "./recovery/service.js";
import { productivityReviewService } from "./productivity-review.js";
import { withAgentStartLock } from "./agent-start-lock.js";
import { redactCurrentUserText, redactCurrentUserValue } from "../log-redaction.js";
import {
@@ -2004,6 +2005,7 @@ export function heartbeatService(db: Db, options: HeartbeatServiceOptions = {})
};
const budgets = budgetService(db, budgetHooks);
const recovery = recoveryService(db, { enqueueWakeup });
const productivityReviews = productivityReviewService(db, { enqueueWakeup });
let unsafeTextProjectionPromise: Promise<boolean> | null = null;
async function hasUnsafeTextProjectionDatabase() {
@@ -2807,6 +2809,29 @@ export function heartbeatService(db: Db, options: HeartbeatServiceOptions = {})
projectId: issue.projectId,
})
: null;
if (issue) {
const productivityHold = await productivityReviews.isProductivityReviewContinuationHoldActive({
companyId: issue.companyId,
issueId: issue.id,
agentId: run.agentId,
});
if (productivityHold.held) {
await setRunStatus(run.id, run.status, {
livenessReason:
`${run.livenessReason ?? "Run ended without concrete progress"}; continuation held by productivity review ${productivityHold.reviewIdentifier ?? productivityHold.reviewIssueId}`,
});
await productivityReviews.recordContinuationHold({
companyId: issue.companyId,
issueId: issue.id,
runId: run.id,
agentId: run.agentId,
reviewIssueId: productivityHold.reviewIssueId,
trigger: productivityHold.trigger,
reason: productivityHold.reason,
});
return;
}
}
const nextAttempt = readContinuationAttempt(run.continuationAttempt) + 1;
const idempotencyKey = issue
@@ -4494,6 +4519,10 @@ export function heartbeatService(db: Db, options: HeartbeatServiceOptions = {})
return recovery.scanSilentActiveRuns(opts);
}
async function reconcileProductivityReviews(opts?: { now?: Date; companyId?: string }) {
return productivityReviews.reconcileProductivityReviews(opts);
}
async function buildRunOutputSilence(
run: Pick<
typeof heartbeatRuns.$inferSelect,
@@ -7494,6 +7523,8 @@ export function heartbeatService(db: Db, options: HeartbeatServiceOptions = {})
scanSilentActiveRuns,
reconcileProductivityReviews,
buildRunOutputSilence,
tickTimers: async (now = new Date()) => {

View File

@@ -32,6 +32,10 @@ export { routineService } from "./routines.js";
export { costService } from "./costs.js";
export { financeService } from "./finance.js";
export { heartbeatService } from "./heartbeat.js";
export {
productivityReviewService,
PRODUCTIVITY_REVIEW_ORIGIN_KIND,
} from "./productivity-review.js";
export { classifyIssueGraphLiveness, type IssueLivenessFinding } from "./recovery/index.js";
export { dashboardService } from "./dashboard.js";
export { sidebarBadgeService } from "./sidebar-badges.js";

View File

@@ -71,7 +71,7 @@ type RestoreTreeStatusResult = TreeStatusUpdateResult & {
const TERMINAL_ISSUE_STATUSES = new Set<IssueStatus>(["done", "cancelled"]);
const ACTIVE_RUN_STATUSES = ["queued", "running"] as const;
const DEFAULT_RELEASE_POLICY: IssueTreeHoldReleasePolicy = { strategy: "manual" };
const MAX_PAUSE_HOLD_GATE_DEPTH = 15;
const MAX_PAUSE_HOLD_ANCESTOR_DEPTH = 100;
export const ISSUE_TREE_CONTROL_INTERACTION_WAKE_REASONS: ReadonlySet<string> = new Set([
"issue_commented",
"issue_reopened_via_comment",
@@ -548,6 +548,22 @@ export function issueTreeControlService(db: Db) {
return byIssueId;
}
async function activePauseHoldsForIssueIds(companyId: string, issueIds: string[]) {
if (issueIds.length === 0) return [];
return db
.select()
.from(issueTreeHolds)
.where(
and(
eq(issueTreeHolds.companyId, companyId),
eq(issueTreeHolds.status, "active"),
eq(issueTreeHolds.mode, "pause"),
inArray(issueTreeHolds.rootIssueId, issueIds),
),
)
.orderBy(asc(issueTreeHolds.createdAt), asc(issueTreeHolds.id));
}
async function getActivePauseHoldGate(
companyId: string,
issueId: string,
@@ -573,9 +589,12 @@ export function issueTreeControlService(db: Db) {
const holdByRootIssueId = new Map(activePauseHolds.map((hold) => [hold.rootIssueId, hold]));
let currentIssueId: string | null = issueId;
const visited = new Set<string>();
let depth = 0;
while (currentIssueId && !visited.has(currentIssueId) && depth < MAX_PAUSE_HOLD_GATE_DEPTH) {
while (
currentIssueId
&& !visited.has(currentIssueId)
&& visited.size < MAX_PAUSE_HOLD_ANCESTOR_DEPTH
) {
visited.add(currentIssueId);
const hold = holdByRootIssueId.get(currentIssueId);
if (hold) {
@@ -596,7 +615,6 @@ export function issueTreeControlService(db: Db) {
.where(and(eq(issues.id, currentIssueId), eq(issues.companyId, companyId)))
.then((rows) => rows[0] ?? null);
currentIssueId = parent?.parentId ?? null;
depth += 1;
}
return null;
@@ -690,13 +708,100 @@ export function issueTreeControlService(db: Db) {
releasePolicy?: IssueTreeHoldReleasePolicy | null;
actor: ActorInput;
},
) {
): Promise<{
hold: IssueTreeHold;
preview: IssueTreeControlPreview;
resumedPauseHoldIds?: string[];
}> {
const holdReleasePolicy = normalizeReleasePolicy(input.releasePolicy);
const holdPreview = await preview(companyId, rootIssueId, {
mode: input.mode,
releasePolicy: holdReleasePolicy,
});
if (input.mode === "resume") {
const issueIds = [...new Set(holdPreview.issues.map((issue) => issue.id))];
const activePauseHolds = await activePauseHoldsForIssueIds(companyId, issueIds);
const releaseReason = input.reason ?? "Subtree resume applied.";
const { hold: resumeHold } = await db.transaction(async (tx) => {
const [createdHold] = await tx
.insert(issueTreeHolds)
.values({
companyId,
rootIssueId,
mode: input.mode,
status: "active",
reason: input.reason ?? null,
releasePolicy: holdReleasePolicy as unknown as Record<string, unknown>,
createdByActorType: input.actor.actorType,
createdByAgentId: input.actor.agentId ?? null,
createdByUserId: input.actor.userId ?? (input.actor.actorType === "user" ? input.actor.actorId : null),
createdByRunId: input.actor.runId ?? null,
})
.returning();
const memberRows = holdPreview.issues.map((issue) => ({
companyId,
holdId: createdHold.id,
issueId: issue.id,
parentIssueId: issue.parentId,
depth: issue.depth,
issueIdentifier: issue.identifier,
issueTitle: issue.title,
issueStatus: issue.status,
assigneeAgentId: issue.assigneeAgentId,
assigneeUserId: issue.assigneeUserId,
activeRunId: issue.activeRun?.id ?? null,
activeRunStatus: issue.activeRun?.status ?? null,
skipped: issue.skipped,
skipReason: issue.skipReason,
}));
const createdMembers = memberRows.length > 0
? await tx
.insert(issueTreeHoldMembers)
.values(memberRows)
.returning()
: [];
return { hold: toHold(createdHold, createdMembers) };
});
const resumedPauseHoldIds = activePauseHolds.map((hold) => hold.id);
if (resumedPauseHoldIds.length > 0) {
await Promise.all(
activePauseHolds.map((pauseHold) =>
releaseHold(companyId, pauseHold.rootIssueId, pauseHold.id, {
reason: releaseReason,
metadata: {
resumedByResumeHoldId: resumeHold.id,
resumeHoldMode: "tree_resume",
resumedPauseHoldId: pauseHold.id,
},
actor: input.actor,
}),
),
);
}
const releasedResumeHold = await releaseHold(companyId, rootIssueId, resumeHold.id, {
reason: releaseReason,
metadata: {
resumedPauseHoldIds,
resumeMode: "subtree",
...(input.releasePolicy ? { releasePolicy: holdReleasePolicy } : {}),
},
actor: input.actor,
});
return {
hold: releasedResumeHold,
preview: holdPreview,
resumedPauseHoldIds,
};
}
const { hold, members } = await db.transaction(async (tx) => {
const [createdHold] = await tx
.insert(issueTreeHolds)

View File

@@ -27,8 +27,13 @@ import {
projectWorkspaces,
projects,
} from "@paperclipai/db";
import type { IssueBlockerAttention, IssueRelationIssueSummary } from "@paperclipai/shared";
import { extractAgentMentionIds, extractProjectMentionIds, isUuidLike } from "@paperclipai/shared";
import type {
IssueBlockerAttention,
IssueProductivityReview,
IssueProductivityReviewTrigger,
IssueRelationIssueSummary,
} from "@paperclipai/shared";
import { clampIssueRequestDepth, extractAgentMentionIds, extractProjectMentionIds, isUuidLike } from "@paperclipai/shared";
import { conflict, notFound, unprocessable } from "../errors.js";
import {
defaultIssueExecutionWorkspaceSettingsForProject,
@@ -107,6 +112,7 @@ export interface IssueFilters {
includeBlockedBy?: boolean;
q?: string;
limit?: number;
offset?: number;
}
type IssueRow = typeof issues.$inferSelect;
@@ -666,6 +672,17 @@ const BLOCKER_ATTENTION_ACTIVE_WAKE_STATUSES = ["queued", "deferred_issue_execut
const BLOCKER_ATTENTION_PENDING_INTERACTION_STATUSES = ["pending"];
const BLOCKER_ATTENTION_PENDING_APPROVAL_STATUSES = ["pending", "revision_requested"];
const BLOCKER_ATTENTION_OPEN_RECOVERY_ORIGIN_KIND = "harness_liveness_escalation";
const PRODUCTIVITY_REVIEW_ORIGIN_KIND = "issue_productivity_review";
const PRODUCTIVITY_REVIEW_TERMINAL_STATUSES = ["done", "cancelled"];
const PRODUCTIVITY_REVIEW_ACTIVITY_ACTIONS = [
"issue.productivity_review_created",
"issue.productivity_review_updated",
];
const PRODUCTIVITY_REVIEW_TRIGGERS: readonly IssueProductivityReviewTrigger[] = [
"no_comment_streak",
"long_active_duration",
"high_churn",
];
const BLOCKER_ATTENTION_OPEN_RECOVERY_TERMINAL_STATUSES = ["done", "cancelled"];
const BLOCKER_ATTENTION_MAX_DEPTH = 8;
const BLOCKER_ATTENTION_MAX_NODES = 2000;
@@ -876,6 +893,114 @@ async function terminalExplicitBlockersByRoot(
return terminalByRoot;
}
function readProductivityReviewTrigger(value: unknown): IssueProductivityReviewTrigger | null {
if (typeof value !== "string") return null;
return PRODUCTIVITY_REVIEW_TRIGGERS.includes(value as IssueProductivityReviewTrigger)
? (value as IssueProductivityReviewTrigger)
: null;
}
function readProductivityReviewStreak(value: unknown): number | null {
if (typeof value !== "number" || !Number.isFinite(value) || value < 0) return null;
return Math.floor(value);
}
async function listIssueProductivityReviewMap(
dbOrTx: any,
companyId: string,
sourceIssueIds: string[],
): Promise<Map<string, IssueProductivityReview>> {
const map = new Map<string, IssueProductivityReview>();
if (sourceIssueIds.length === 0) return map;
const reviewRows: Array<{
sourceIssueId: string | null;
reviewIssueId: string;
reviewIdentifier: string | null;
status: string;
priority: string;
createdAt: Date;
updatedAt: Date;
}> = [];
for (const chunk of chunkList([...new Set(sourceIssueIds)], ISSUE_LIST_RELATED_QUERY_CHUNK_SIZE)) {
const rows = await dbOrTx
.select({
sourceIssueId: issues.originId,
reviewIssueId: issues.id,
reviewIdentifier: issues.identifier,
status: issues.status,
priority: issues.priority,
createdAt: issues.createdAt,
updatedAt: issues.updatedAt,
})
.from(issues)
.where(
and(
eq(issues.companyId, companyId),
eq(issues.originKind, PRODUCTIVITY_REVIEW_ORIGIN_KIND),
inArray(issues.originId, chunk),
isNull(issues.hiddenAt),
notInArray(issues.status, PRODUCTIVITY_REVIEW_TERMINAL_STATUSES),
),
);
reviewRows.push(...rows);
}
if (reviewRows.length === 0) return map;
const reviewIssueIds = reviewRows.map((row) => row.reviewIssueId);
const triggerByReviewIssueId = new Map<
string,
{ trigger: IssueProductivityReviewTrigger | null; noCommentStreak: number | null }
>();
for (const chunk of chunkList(reviewIssueIds, ISSUE_LIST_RELATED_QUERY_CHUNK_SIZE)) {
const detailRows = await dbOrTx
.select({
entityId: activityLog.entityId,
details: activityLog.details,
createdAt: activityLog.createdAt,
})
.from(activityLog)
.where(
and(
eq(activityLog.companyId, companyId),
eq(activityLog.entityType, "issue"),
inArray(activityLog.entityId, chunk),
inArray(activityLog.action, PRODUCTIVITY_REVIEW_ACTIVITY_ACTIONS),
),
)
.orderBy(desc(activityLog.createdAt));
for (const row of detailRows as Array<{
entityId: string;
details: Record<string, unknown> | null;
createdAt: Date;
}>) {
if (triggerByReviewIssueId.has(row.entityId)) continue;
triggerByReviewIssueId.set(row.entityId, {
trigger: readProductivityReviewTrigger(row.details?.trigger),
noCommentStreak: readProductivityReviewStreak(row.details?.noCommentStreak),
});
}
}
for (const row of reviewRows) {
if (!row.sourceIssueId) continue;
const detail = triggerByReviewIssueId.get(row.reviewIssueId);
map.set(row.sourceIssueId, {
reviewIssueId: row.reviewIssueId,
reviewIdentifier: row.reviewIdentifier,
status: row.status as IssueProductivityReview["status"],
priority: row.priority as IssueProductivityReview["priority"],
trigger: detail?.trigger ?? null,
noCommentStreak: detail?.noCommentStreak ?? null,
createdAt: row.createdAt,
updatedAt: row.updatedAt,
});
}
return map;
}
async function listIssueBlockerAttentionMap(
dbOrTx: any,
companyId: string,
@@ -1959,6 +2084,9 @@ export function issueService(db: Db) {
const limit = typeof filters?.limit === "number" && Number.isFinite(filters.limit)
? Math.max(1, Math.floor(filters.limit))
: undefined;
const offset = typeof filters?.offset === "number" && Number.isFinite(filters.offset)
? Math.max(0, Math.floor(filters.offset))
: 0;
const touchedByUserId = filters?.touchedByUserId?.trim() || undefined;
const inboxArchivedByUserId = filters?.inboxArchivedByUserId?.trim() || undefined;
const unreadForUserId = filters?.unreadForUserId?.trim() || undefined;
@@ -2081,8 +2209,12 @@ export function issueService(db: Db) {
asc(priorityOrder),
desc(canonicalLastActivityAt),
desc(issues.updatedAt),
desc(issues.id),
);
const rows = (limit === undefined ? await baseQuery : await baseQuery.limit(limit)).map((row) => ({
const pageQuery = offset > 0
? (limit === undefined ? baseQuery.offset(offset) : baseQuery.limit(limit).offset(offset))
: (limit === undefined ? baseQuery : baseQuery.limit(limit));
const rows = (await pageQuery).map((row) => ({
...row,
description: decodeDatabaseTextPreview(row.description, ISSUE_LIST_DESCRIPTION_MAX_CHARS),
}));
@@ -2108,7 +2240,10 @@ export function issueService(db: Db) {
]);
const statsByIssueId = new Map(statsRows.map((row) => [row.issueId, row]));
const lastActivityByIssueId = new Map(lastActivityRows.map((row) => [row.issueId, row]));
const blockerAttentionByIssueId = await listIssueBlockerAttentionMap(db, companyId, withRuns);
const [blockerAttentionByIssueId, productivityReviewByIssueId] = await Promise.all([
listIssueBlockerAttentionMap(db, companyId, withRuns),
listIssueProductivityReviewMap(db, companyId, issueIds),
]);
if (!contextUserId) {
return withRuns.map((row) => {
@@ -2123,6 +2258,9 @@ export function issueService(db: Db) {
...(includeBlockedBy ? { blockedBy: blockedByMap.get(row.id) ?? [] } : {}),
lastActivityAt,
...(blockerAttentionByIssueId.has(row.id) ? { blockerAttention: blockerAttentionByIssueId.get(row.id) } : {}),
...(productivityReviewByIssueId.has(row.id)
? { productivityReview: productivityReviewByIssueId.get(row.id) }
: {}),
};
});
}
@@ -2141,6 +2279,9 @@ export function issueService(db: Db) {
...(includeBlockedBy ? { blockedBy: blockedByMap.get(row.id) ?? [] } : {}),
lastActivityAt,
...(blockerAttentionByIssueId.has(row.id) ? { blockerAttention: blockerAttentionByIssueId.get(row.id) } : {}),
...(productivityReviewByIssueId.has(row.id)
? { productivityReview: productivityReviewByIssueId.get(row.id) }
: {}),
...deriveIssueUserContext(row, contextUserId, {
myLastCommentAt: statsByIssueId.get(row.id)?.myLastCommentAt ?? null,
myLastReadAt: readByIssueId.get(row.id) ?? null,
@@ -2292,6 +2433,14 @@ export function issueService(db: Db) {
return listIssueBlockerAttentionMap(dbOrTx, companyId, issueRows);
},
listProductivityReviews: async (
companyId: string,
sourceIssueIds: string[],
dbOrTx: any = db,
) => {
return listIssueProductivityReviewMap(dbOrTx, companyId, sourceIssueIds);
},
listWakeableBlockedDependents: async (blockerIssueId: string) => {
const blockerIssue = await db
.select({ id: issues.id, companyId: issues.companyId })
@@ -2458,7 +2607,9 @@ export function issueService(db: Db) {
parentId: parent.id,
projectId: issueData.projectId ?? parent.projectId,
goalId: issueData.goalId ?? parent.goalId,
requestDepth: Math.max(parent.requestDepth + 1, issueData.requestDepth ?? 0),
requestDepth: clampIssueRequestDepth(
Math.max(clampIssueRequestDepth(parent.requestDepth) + 1, issueData.requestDepth ?? 0),
),
description: appendAcceptanceCriteriaToDescription(issueData.description, acceptanceCriteria),
inheritExecutionWorkspaceFromIssueId: parent.id,
});
@@ -2615,6 +2766,7 @@ export function issueService(db: Db) {
const values = {
...issueData,
requestDepth: clampIssueRequestDepth(issueData.requestDepth),
originKind: issueData.originKind ?? "manual",
goalId: resolveIssueGoalId({
projectId: issueData.projectId,
@@ -2700,6 +2852,9 @@ export function issueService(db: Db) {
...issueData,
updatedAt: new Date(),
};
if (issueData.requestDepth !== undefined) {
patch.requestDepth = clampIssueRequestDepth(issueData.requestDepth);
}
const nextAssigneeAgentId =
issueData.assigneeAgentId !== undefined ? issueData.assigneeAgentId : existing.assigneeAgentId;

View File

@@ -0,0 +1,792 @@
import { and, asc, desc, eq, gt, inArray, isNull, notInArray, sql } from "drizzle-orm";
import type { Db } from "@paperclipai/db";
import { clampIssueRequestDepth } from "@paperclipai/shared";
import {
agents,
companies,
costEvents,
heartbeatRuns,
issueComments,
issues,
projects,
} from "@paperclipai/db";
import { logger } from "../middleware/logger.js";
import { logActivity } from "./activity-log.js";
import { budgetService } from "./budgets.js";
import { issueService } from "./issues.js";
import { RECOVERY_ORIGIN_KINDS } from "./recovery/origins.js";
export const PRODUCTIVITY_REVIEW_ORIGIN_KIND = RECOVERY_ORIGIN_KINDS.issueProductivityReview;
export const DEFAULT_PRODUCTIVITY_REVIEW_NO_COMMENT_STREAK_RUNS = 10;
export const DEFAULT_PRODUCTIVITY_REVIEW_LONG_ACTIVE_HOURS = 6;
export const DEFAULT_PRODUCTIVITY_REVIEW_HIGH_CHURN_HOURLY = 10;
export const DEFAULT_PRODUCTIVITY_REVIEW_HIGH_CHURN_SIX_HOURS = 30;
export const DEFAULT_PRODUCTIVITY_REVIEW_RESOLVED_SNOOZE_MS = 6 * 60 * 60 * 1000;
const TERMINAL_RUN_STATUSES = ["succeeded", "failed", "cancelled", "timed_out"] as const;
const ACTIVE_RUN_STATUSES = ["queued", "running", "scheduled_retry"] as const;
const MAX_CANDIDATE_ISSUES = 250;
const MAX_RUNS_FOR_STREAK = 100;
const MAX_PARENT_WALK_DEPTH = 25;
type IssueRow = typeof issues.$inferSelect;
type AgentRow = typeof agents.$inferSelect;
type HeartbeatRunRow = typeof heartbeatRuns.$inferSelect;
type ProductivityReviewTrigger = "no_comment_streak" | "long_active_duration" | "high_churn";
type ProductivityReviewThresholds = {
noCommentStreakRuns: number;
longActiveMs: number;
highChurnHourly: number;
highChurnSixHours: number;
resolvedSnoozeMs: number;
};
type ProductivityReviewEvidence = {
trigger: ProductivityReviewTrigger;
triggerReasons: string[];
sourceIssue: IssueRow;
sourceAgent: AgentRow;
noCommentStreak: number;
totalRunCount: number;
terminalRunCount: number;
activeRunCount: number;
runCountLastHour: number;
runCountLastSixHours: number;
commentCount: number;
commentCountLastHour: number;
commentCountLastSixHours: number;
elapsedMs: number | null;
latestRuns: HeartbeatRunRow[];
latestComments: Array<typeof issueComments.$inferSelect>;
costCents: number;
usageSamples: Array<{ runId: string; usageJson: Record<string, unknown> | null }>;
nextAction: string | null;
thresholds: ProductivityReviewThresholds;
generatedAt: Date;
};
type EnqueueWakeup = (
agentId: string,
opts?: {
source?: "timer" | "assignment" | "on_demand" | "automation";
triggerDetail?: "manual" | "ping" | "callback" | "system";
reason?: string | null;
payload?: Record<string, unknown> | null;
requestedByActorType?: "user" | "agent" | "system";
requestedByActorId?: string | null;
contextSnapshot?: Record<string, unknown>;
},
) => Promise<unknown | null>;
function productivityReviewFingerprint(sourceIssueId: string) {
return `productivity-review:${sourceIssueId}`;
}
function issueRunScopeSql(issueId: string) {
return sql`(
${heartbeatRuns.contextSnapshot}->>'issueId' = ${issueId}
or ${heartbeatRuns.contextSnapshot}->>'taskId' = ${issueId}
or ${heartbeatRuns.contextSnapshot}->>'taskKey' = ${issueId}
)`;
}
function msToHuman(ms: number | null) {
if (ms === null) return "unknown";
const minutes = Math.floor(ms / 60_000);
if (minutes < 60) return `${minutes}m`;
const hours = Math.floor(minutes / 60);
const days = Math.floor(hours / 24);
if (days > 0) return `${days}d ${hours % 24}h`;
return `${hours}h ${minutes % 60}m`;
}
function issueUiLink(issue: { identifier: string | null; id: string }, prefix: string) {
const label = issue.identifier ?? issue.id;
return `[${label}](/${prefix}/issues/${label})`;
}
function runUiLink(run: { id: string; agentId: string }, prefix: string) {
return `[${run.id}](/${prefix}/agents/${run.agentId}/runs/${run.id})`;
}
function truncateInline(value: string | null | undefined, max = 260) {
if (!value) return "";
const compact = value.replace(/\s+/g, " ").trim();
return compact.length <= max ? compact : `${compact.slice(0, max - 3)}...`;
}
function readPositiveInteger(value: number, fallback: number) {
return Number.isFinite(value) && value > 0 ? Math.floor(value) : fallback;
}
function buildThresholds(overrides?: Partial<ProductivityReviewThresholds>): ProductivityReviewThresholds {
return {
noCommentStreakRuns: readPositiveInteger(
overrides?.noCommentStreakRuns ?? DEFAULT_PRODUCTIVITY_REVIEW_NO_COMMENT_STREAK_RUNS,
DEFAULT_PRODUCTIVITY_REVIEW_NO_COMMENT_STREAK_RUNS,
),
longActiveMs: readPositiveInteger(
overrides?.longActiveMs ?? DEFAULT_PRODUCTIVITY_REVIEW_LONG_ACTIVE_HOURS * 60 * 60 * 1000,
DEFAULT_PRODUCTIVITY_REVIEW_LONG_ACTIVE_HOURS * 60 * 60 * 1000,
),
highChurnHourly: readPositiveInteger(
overrides?.highChurnHourly ?? DEFAULT_PRODUCTIVITY_REVIEW_HIGH_CHURN_HOURLY,
DEFAULT_PRODUCTIVITY_REVIEW_HIGH_CHURN_HOURLY,
),
highChurnSixHours: readPositiveInteger(
overrides?.highChurnSixHours ?? DEFAULT_PRODUCTIVITY_REVIEW_HIGH_CHURN_SIX_HOURS,
DEFAULT_PRODUCTIVITY_REVIEW_HIGH_CHURN_SIX_HOURS,
),
resolvedSnoozeMs: readPositiveInteger(
overrides?.resolvedSnoozeMs ?? DEFAULT_PRODUCTIVITY_REVIEW_RESOLVED_SNOOZE_MS,
DEFAULT_PRODUCTIVITY_REVIEW_RESOLVED_SNOOZE_MS,
),
};
}
function choosePrimaryTrigger(input: {
noComment: boolean;
longActive: boolean;
highChurn: boolean;
}): ProductivityReviewTrigger | null {
if (input.noComment) return "no_comment_streak";
if (input.highChurn) return "high_churn";
if (input.longActive) return "long_active_duration";
return null;
}
function isSoftStopTrigger(trigger: ProductivityReviewTrigger) {
return trigger === "no_comment_streak" || trigger === "high_churn";
}
function formatTrigger(trigger: ProductivityReviewTrigger) {
if (trigger === "no_comment_streak") return "No-comment streak";
if (trigger === "high_churn") return "High churn";
return "Long active duration";
}
export function productivityReviewService(db: Db, deps?: { enqueueWakeup?: EnqueueWakeup }) {
const issuesSvc = issueService(db);
const budgets = budgetService(db);
async function getCompanyIssuePrefix(companyId: string) {
return db
.select({ issuePrefix: companies.issuePrefix })
.from(companies)
.where(eq(companies.id, companyId))
.then((rows) => rows[0]?.issuePrefix ?? "PAP");
}
async function getAgent(agentId: string) {
return db
.select()
.from(agents)
.where(eq(agents.id, agentId))
.then((rows) => rows[0] ?? null);
}
function isAgentInvokable(agent: AgentRow | null | undefined) {
return Boolean(agent && !["paused", "terminated", "pending_approval"].includes(agent.status));
}
async function isProductivityReviewDescendant(issue: Pick<IssueRow, "companyId" | "parentId">) {
let parentId = issue.parentId;
let depth = 0;
while (parentId && depth < MAX_PARENT_WALK_DEPTH) {
const parent = await db
.select({ id: issues.id, parentId: issues.parentId, originKind: issues.originKind })
.from(issues)
.where(and(eq(issues.companyId, issue.companyId), eq(issues.id, parentId)))
.then((rows) => rows[0] ?? null);
if (!parent) return false;
if (parent.originKind === PRODUCTIVITY_REVIEW_ORIGIN_KIND) return true;
parentId = parent.parentId;
depth += 1;
}
return false;
}
async function findOpenProductivityReview(companyId: string, sourceIssueId: string) {
return db
.select()
.from(issues)
.where(
and(
eq(issues.companyId, companyId),
eq(issues.originKind, PRODUCTIVITY_REVIEW_ORIGIN_KIND),
eq(issues.originId, sourceIssueId),
isNull(issues.hiddenAt),
notInArray(issues.status, ["done", "cancelled"]),
),
)
.orderBy(desc(issues.updatedAt))
.limit(1)
.then((rows) => rows[0] ?? null);
}
async function findRecentResolvedProductivityReview(
companyId: string,
sourceIssueId: string,
thresholds: ProductivityReviewThresholds,
now: Date,
) {
const cutoff = new Date(now.getTime() - thresholds.resolvedSnoozeMs);
return db
.select({ id: issues.id, identifier: issues.identifier, status: issues.status, updatedAt: issues.updatedAt })
.from(issues)
.where(
and(
eq(issues.companyId, companyId),
eq(issues.originKind, PRODUCTIVITY_REVIEW_ORIGIN_KIND),
eq(issues.originId, sourceIssueId),
eq(issues.status, "done"),
gt(issues.updatedAt, cutoff),
),
)
.orderBy(desc(issues.updatedAt))
.limit(1)
.then((rows) => rows[0] ?? null);
}
async function countIssueRunsSince(companyId: string, agentId: string, issueId: string, since: Date) {
return db
.select({ count: sql<number>`count(*)::int` })
.from(heartbeatRuns)
.where(
and(
eq(heartbeatRuns.companyId, companyId),
eq(heartbeatRuns.agentId, agentId),
issueRunScopeSql(issueId),
sql`coalesce(${heartbeatRuns.startedAt}, ${heartbeatRuns.createdAt}) >= ${since.toISOString()}::timestamptz`,
),
)
.then((rows) => rows[0]?.count ?? 0);
}
async function countIssueCommentsSince(companyId: string, issueId: string, agentId: string, since?: Date) {
return db
.select({ count: sql<number>`count(*)::int` })
.from(issueComments)
.innerJoin(heartbeatRuns, eq(heartbeatRuns.id, issueComments.createdByRunId))
.where(
and(
eq(issueComments.companyId, companyId),
eq(issueComments.issueId, issueId),
eq(issueComments.authorAgentId, agentId),
eq(heartbeatRuns.companyId, companyId),
eq(heartbeatRuns.agentId, agentId),
issueRunScopeSql(issueId),
since ? sql`${issueComments.createdAt} >= ${since.toISOString()}::timestamptz` : undefined,
),
)
.then((rows) => rows[0]?.count ?? 0);
}
async function collectEvidence(
sourceIssue: IssueRow,
sourceAgent: AgentRow,
thresholds: ProductivityReviewThresholds,
now: Date,
): Promise<ProductivityReviewEvidence | null> {
const oneHourAgo = new Date(now.getTime() - 60 * 60 * 1000);
const sixHoursAgo = new Date(now.getTime() - 6 * 60 * 60 * 1000);
const latestRuns = await db
.select()
.from(heartbeatRuns)
.where(
and(
eq(heartbeatRuns.companyId, sourceIssue.companyId),
eq(heartbeatRuns.agentId, sourceAgent.id),
issueRunScopeSql(sourceIssue.id),
),
)
.orderBy(desc(heartbeatRuns.createdAt), desc(heartbeatRuns.id))
.limit(MAX_RUNS_FOR_STREAK);
const runIds = latestRuns.map((run) => run.id);
const commentRunIds = new Set<string>();
if (runIds.length > 0) {
const commentRows = await db
.select({ createdByRunId: issueComments.createdByRunId })
.from(issueComments)
.where(
and(
eq(issueComments.companyId, sourceIssue.companyId),
eq(issueComments.issueId, sourceIssue.id),
inArray(issueComments.createdByRunId, runIds),
),
);
for (const row of commentRows) {
if (row.createdByRunId) commentRunIds.add(row.createdByRunId);
}
}
const terminalRuns = latestRuns.filter((run) =>
TERMINAL_RUN_STATUSES.includes(run.status as (typeof TERMINAL_RUN_STATUSES)[number]),
);
let noCommentStreak = 0;
for (const run of terminalRuns) {
if (commentRunIds.has(run.id)) break;
noCommentStreak += 1;
}
const [
runCountLastHour,
runCountLastSixHours,
assigneeRunCommentCount,
assigneeRunCommentCountLastHour,
assigneeRunCommentCountLastSixHours,
latestComments,
costRow,
] = await Promise.all([
countIssueRunsSince(sourceIssue.companyId, sourceAgent.id, sourceIssue.id, oneHourAgo),
countIssueRunsSince(sourceIssue.companyId, sourceAgent.id, sourceIssue.id, sixHoursAgo),
countIssueCommentsSince(sourceIssue.companyId, sourceIssue.id, sourceAgent.id),
countIssueCommentsSince(sourceIssue.companyId, sourceIssue.id, sourceAgent.id, oneHourAgo),
countIssueCommentsSince(sourceIssue.companyId, sourceIssue.id, sourceAgent.id, sixHoursAgo),
db
.select({ comment: issueComments })
.from(issueComments)
.innerJoin(heartbeatRuns, eq(heartbeatRuns.id, issueComments.createdByRunId))
.where(
and(
eq(issueComments.companyId, sourceIssue.companyId),
eq(issueComments.issueId, sourceIssue.id),
eq(issueComments.authorAgentId, sourceAgent.id),
eq(heartbeatRuns.companyId, sourceIssue.companyId),
eq(heartbeatRuns.agentId, sourceAgent.id),
issueRunScopeSql(sourceIssue.id),
),
)
.orderBy(desc(issueComments.createdAt), desc(issueComments.id))
.limit(5)
.then((rows) => rows.map((row) => row.comment)),
db
.select({ costCents: sql<number>`coalesce(sum(${costEvents.costCents}), 0)::int` })
.from(costEvents)
.where(and(eq(costEvents.companyId, sourceIssue.companyId), eq(costEvents.issueId, sourceIssue.id)))
.then((rows) => rows[0] ?? { costCents: 0 }),
]);
const activeRunCount = latestRuns.filter((run) =>
ACTIVE_RUN_STATUSES.includes(run.status as (typeof ACTIVE_RUN_STATUSES)[number]),
).length;
const activeStartedAt = sourceIssue.startedAt ?? sourceIssue.executionLockedAt ?? null;
const elapsedMs = sourceIssue.status === "in_progress" && activeStartedAt
? Math.max(0, now.getTime() - activeStartedAt.getTime())
: null;
const noComment = noCommentStreak >= thresholds.noCommentStreakRuns;
const longActive = elapsedMs !== null && elapsedMs >= thresholds.longActiveMs;
const highChurn =
runCountLastHour >= thresholds.highChurnHourly ||
assigneeRunCommentCountLastHour >= thresholds.highChurnHourly ||
runCountLastSixHours >= thresholds.highChurnSixHours ||
assigneeRunCommentCountLastSixHours >= thresholds.highChurnSixHours;
const trigger = choosePrimaryTrigger({ noComment, longActive, highChurn });
if (!trigger) return null;
const triggerReasons: string[] = [];
if (noComment) triggerReasons.push(`${noCommentStreak} consecutive completed issue-linked runs had no run-created issue comment`);
if (longActive) triggerReasons.push(`current active episode has lasted ${msToHuman(elapsedMs)}`);
if (highChurn) {
triggerReasons.push(
`${runCountLastHour} runs/${assigneeRunCommentCountLastHour} assignee-run comments in 1h; ${runCountLastSixHours} runs/${assigneeRunCommentCountLastSixHours} assignee-run comments in 6h`,
);
}
return {
trigger,
triggerReasons,
sourceIssue,
sourceAgent,
noCommentStreak,
totalRunCount: latestRuns.length,
terminalRunCount: terminalRuns.length,
activeRunCount,
runCountLastHour,
runCountLastSixHours,
commentCount: assigneeRunCommentCount,
commentCountLastHour: assigneeRunCommentCountLastHour,
commentCountLastSixHours: assigneeRunCommentCountLastSixHours,
elapsedMs,
latestRuns: latestRuns.slice(0, 5),
latestComments,
costCents: costRow.costCents,
usageSamples: latestRuns
.filter((run) => run.usageJson)
.slice(0, 3)
.map((run) => ({ runId: run.id, usageJson: run.usageJson ?? null })),
nextAction: latestRuns.find((run) => run.nextAction)?.nextAction ?? null,
thresholds,
generatedAt: now,
};
}
async function resolveReviewOwnerAgentId(sourceIssue: IssueRow, sourceAgent: AgentRow) {
const candidateIds: string[] = [];
if (sourceAgent.reportsTo) candidateIds.push(sourceAgent.reportsTo);
if (sourceIssue.createdByAgentId) candidateIds.push(sourceIssue.createdByAgentId);
if (sourceIssue.projectId) {
const project = await db
.select({ leadAgentId: projects.leadAgentId })
.from(projects)
.where(and(eq(projects.companyId, sourceIssue.companyId), eq(projects.id, sourceIssue.projectId)))
.then((rows) => rows[0] ?? null);
if (project?.leadAgentId) candidateIds.push(project.leadAgentId);
}
const roleCandidates = await db
.select({ id: agents.id })
.from(agents)
.where(and(eq(agents.companyId, sourceIssue.companyId), inArray(agents.role, ["cto", "ceo"])))
.orderBy(sql`case when ${agents.role} = 'cto' then 0 else 1 end`, asc(agents.createdAt), asc(agents.id));
candidateIds.push(...roleCandidates.map((agent) => agent.id));
const seen = new Set<string>();
for (const agentId of candidateIds) {
if (seen.has(agentId)) continue;
seen.add(agentId);
const candidate = await getAgent(agentId);
if (!candidate || candidate.companyId !== sourceIssue.companyId || !isAgentInvokable(candidate)) continue;
const budgetBlock = await budgets.getInvocationBlock(sourceIssue.companyId, candidate.id, {
issueId: sourceIssue.id,
projectId: sourceIssue.projectId ?? null,
});
if (!budgetBlock) return candidate.id;
}
return null;
}
function buildReviewMarkdown(evidence: ProductivityReviewEvidence, prefix: string) {
const latestRuns = evidence.latestRuns.length > 0
? evidence.latestRuns.map((run) =>
`- ${runUiLink(run, prefix)} \`${run.status}\` liveness \`${run.livenessState ?? "unknown"}\`, created ${run.createdAt.toISOString()}${run.nextAction ? `, next action: ${truncateInline(run.nextAction, 160)}` : ""}`,
).join("\n")
: "- none";
const latestComments = evidence.latestComments.length > 0
? evidence.latestComments.map((comment) =>
`- ${comment.createdAt.toISOString()}${comment.createdByRunId ? ` run \`${comment.createdByRunId}\`` : ""}: ${truncateInline(comment.body)}`,
).join("\n")
: "- none";
const usage = evidence.usageSamples.length > 0
? evidence.usageSamples.map((sample) => `- \`${sample.runId}\`: \`${JSON.stringify(sample.usageJson).slice(0, 500)}\``).join("\n")
: "- no usage payloads on sampled runs";
return [
"Paperclip detected an unusual productivity/progression pattern on an assigned issue.",
"",
"## Source",
"",
`- Source issue: ${issueUiLink(evidence.sourceIssue, prefix)}`,
`- Assigned agent: ${evidence.sourceAgent.name} (${evidence.sourceAgent.role})`,
`- Primary trigger: \`${evidence.trigger}\` (${formatTrigger(evidence.trigger)})`,
`- Trigger reasons: ${evidence.triggerReasons.join("; ")}`,
`- Generated at: ${evidence.generatedAt.toISOString()}`,
"",
"## Evidence",
"",
`- Total sampled issue-linked runs: ${evidence.totalRunCount}`,
`- Terminal sampled runs: ${evidence.terminalRunCount}`,
`- Active queued/running/scheduled runs: ${evidence.activeRunCount}`,
`- No-comment completed-run streak: ${evidence.noCommentStreak}`,
`- Current active elapsed time: ${msToHuman(evidence.elapsedMs)}`,
`- Runs in rolling windows: ${evidence.runCountLastHour}/1h, ${evidence.runCountLastSixHours}/6h`,
`- Assignee run-linked comments total/window: ${evidence.commentCount} total, ${evidence.commentCountLastHour}/1h, ${evidence.commentCountLastSixHours}/6h`,
`- Cost events total: ${evidence.costCents} cents`,
`- Current next action: ${evidence.nextAction ? truncateInline(evidence.nextAction, 500) : "none recorded"}`,
"",
"## Thresholds",
"",
`- No-comment streak: ${evidence.thresholds.noCommentStreakRuns} completed runs`,
`- Long active duration: ${msToHuman(evidence.thresholds.longActiveMs)}`,
`- High churn: ${evidence.thresholds.highChurnHourly}/1h or ${evidence.thresholds.highChurnSixHours}/6h runs/assignee-run comments`,
`- Resolved-review snooze: ${msToHuman(evidence.thresholds.resolvedSnoozeMs)}`,
"",
"## Latest Runs",
"",
latestRuns,
"",
"## Latest Assignee Run Comments",
"",
latestComments,
"",
"## Usage Samples",
"",
usage,
"",
"## Manager Decision",
"",
"- Close as productive if this pattern is expected.",
"- Continue with a snooze window if the current work should keep running without repeat review spam.",
"- Request decomposition, reroute, block with an unblock owner, or stop/cancel the source work if the work is inefficient.",
].join("\n");
}
function buildRefreshComment(evidence: ProductivityReviewEvidence, prefix: string) {
return [
"Productivity review evidence refreshed.",
"",
`- Source issue: ${issueUiLink(evidence.sourceIssue, prefix)}`,
`- Trigger: \`${evidence.trigger}\` (${formatTrigger(evidence.trigger)})`,
`- Reasons: ${evidence.triggerReasons.join("; ")}`,
`- No-comment streak: ${evidence.noCommentStreak}`,
`- Runs/assignee comments: ${evidence.runCountLastHour}/${evidence.commentCountLastHour} in 1h, ${evidence.runCountLastSixHours}/${evidence.commentCountLastSixHours} in 6h`,
`- Next action: ${evidence.nextAction ? truncateInline(evidence.nextAction, 300) : "none recorded"}`,
].join("\n");
}
async function createOrUpdateReview(
evidence: ProductivityReviewEvidence,
opts: { prefix: string },
) {
const existing = await findOpenProductivityReview(evidence.sourceIssue.companyId, evidence.sourceIssue.id);
if (existing) {
await issuesSvc.addComment(existing.id, buildRefreshComment(evidence, opts.prefix), {});
await logActivity(db, {
companyId: evidence.sourceIssue.companyId,
actorType: "system",
actorId: "system",
action: "issue.productivity_review_updated",
entityType: "issue",
entityId: existing.id,
agentId: existing.assigneeAgentId,
details: {
source: "productivity_review.reconcile",
sourceIssueId: evidence.sourceIssue.id,
trigger: evidence.trigger,
noCommentStreak: evidence.noCommentStreak,
runCountLastHour: evidence.runCountLastHour,
commentCountLastHour: evidence.commentCountLastHour,
},
});
return { kind: "updated" as const, reviewIssueId: existing.id };
}
const ownerAgentId = await resolveReviewOwnerAgentId(evidence.sourceIssue, evidence.sourceAgent);
let review: Awaited<ReturnType<typeof issuesSvc.create>>;
try {
review = await issuesSvc.create(evidence.sourceIssue.companyId, {
title: `Review productivity for ${evidence.sourceIssue.identifier ?? evidence.sourceIssue.title}`,
description: buildReviewMarkdown(evidence, opts.prefix),
status: "todo",
priority: evidence.trigger === "long_active_duration" ? "medium" : "high",
parentId: evidence.sourceIssue.id,
projectId: evidence.sourceIssue.projectId,
goalId: evidence.sourceIssue.goalId,
billingCode: evidence.sourceIssue.billingCode,
assigneeAgentId: ownerAgentId,
originKind: PRODUCTIVITY_REVIEW_ORIGIN_KIND,
originId: evidence.sourceIssue.id,
originFingerprint: productivityReviewFingerprint(evidence.sourceIssue.id),
requestDepth: clampIssueRequestDepth(evidence.sourceIssue.requestDepth + 1),
});
} catch (error) {
const maybe = error as { code?: string; constraint?: string; message?: string };
const uniqueConflict = maybe.code === "23505" &&
(
maybe.constraint === "issues_active_productivity_review_uq" ||
typeof maybe.message === "string" && maybe.message.includes("issues_active_productivity_review_uq")
);
if (!uniqueConflict) throw error;
const raced = await findOpenProductivityReview(evidence.sourceIssue.companyId, evidence.sourceIssue.id);
if (!raced) throw error;
return { kind: "existing" as const, reviewIssueId: raced.id };
}
await logActivity(db, {
companyId: evidence.sourceIssue.companyId,
actorType: "system",
actorId: "system",
action: "issue.productivity_review_created",
entityType: "issue",
entityId: review.id,
agentId: ownerAgentId,
details: {
source: "productivity_review.reconcile",
sourceIssueId: evidence.sourceIssue.id,
trigger: evidence.trigger,
noCommentStreak: evidence.noCommentStreak,
runCountLastHour: evidence.runCountLastHour,
commentCountLastHour: evidence.commentCountLastHour,
},
});
if (ownerAgentId && deps?.enqueueWakeup) {
await deps.enqueueWakeup(ownerAgentId, {
source: "assignment",
triggerDetail: "system",
reason: "issue_assigned",
payload: {
issueId: review.id,
sourceIssueId: evidence.sourceIssue.id,
trigger: evidence.trigger,
},
requestedByActorType: "system",
requestedByActorId: "productivity_review",
contextSnapshot: {
issueId: review.id,
taskId: review.id,
wakeReason: "issue_assigned",
source: PRODUCTIVITY_REVIEW_ORIGIN_KIND,
sourceIssueId: evidence.sourceIssue.id,
productivityReviewTrigger: evidence.trigger,
},
});
}
return { kind: "created" as const, reviewIssueId: review.id };
}
async function reconcileProductivityReviews(opts?: {
now?: Date;
companyId?: string;
thresholds?: Partial<ProductivityReviewThresholds>;
}) {
const now = opts?.now ?? new Date();
const thresholds = buildThresholds(opts?.thresholds);
const candidates = await db
.select()
.from(issues)
.where(
and(
opts?.companyId ? eq(issues.companyId, opts.companyId) : undefined,
isNull(issues.hiddenAt),
isNull(issues.assigneeUserId),
inArray(issues.status, ["todo", "in_progress"]),
sql`${issues.assigneeAgentId} is not null`,
sql`${issues.originKind} <> ${PRODUCTIVITY_REVIEW_ORIGIN_KIND}`,
),
)
.orderBy(asc(issues.updatedAt), asc(issues.id))
.limit(MAX_CANDIDATE_ISSUES);
const result = {
scanned: candidates.length,
created: 0,
updated: 0,
existing: 0,
snoozed: 0,
skipped: 0,
failed: 0,
reviewIssueIds: [] as string[],
failedIssueIds: [] as string[],
};
const prefixCache = new Map<string, string>();
for (const candidate of candidates) {
if (!candidate.assigneeAgentId) {
result.skipped += 1;
continue;
}
if (await isProductivityReviewDescendant(candidate)) {
result.skipped += 1;
continue;
}
if (await findRecentResolvedProductivityReview(candidate.companyId, candidate.id, thresholds, now)) {
result.snoozed += 1;
continue;
}
const sourceAgent = await getAgent(candidate.assigneeAgentId);
if (!sourceAgent || sourceAgent.companyId !== candidate.companyId) {
result.skipped += 1;
continue;
}
const evidence = await collectEvidence(candidate, sourceAgent, thresholds, now);
if (!evidence) {
result.skipped += 1;
continue;
}
let prefix = prefixCache.get(candidate.companyId);
if (!prefix) {
prefix = await getCompanyIssuePrefix(candidate.companyId);
prefixCache.set(candidate.companyId, prefix);
}
try {
const outcome = await createOrUpdateReview(evidence, { prefix });
if (outcome.kind === "created") result.created += 1;
else if (outcome.kind === "updated") result.updated += 1;
else result.existing += 1;
result.reviewIssueIds.push(outcome.reviewIssueId);
} catch (err) {
result.failed += 1;
result.failedIssueIds.push(candidate.id);
logger.warn(
{
err,
companyId: candidate.companyId,
issueId: candidate.id,
requestDepth: candidate.requestDepth,
},
"productivity review reconciliation skipped malformed candidate",
);
}
}
return result;
}
async function isProductivityReviewContinuationHoldActive(input: {
companyId: string;
issueId: string;
agentId: string;
now?: Date;
thresholds?: Partial<ProductivityReviewThresholds>;
}) {
const now = input.now ?? new Date();
const thresholds = buildThresholds(input.thresholds);
const [sourceIssue, sourceAgent, openReview] = await Promise.all([
db
.select()
.from(issues)
.where(and(eq(issues.companyId, input.companyId), eq(issues.id, input.issueId)))
.then((rows) => rows[0] ?? null),
getAgent(input.agentId),
findOpenProductivityReview(input.companyId, input.issueId),
]);
if (!sourceIssue || !sourceAgent || !openReview) return { held: false as const };
if (sourceAgent.companyId !== input.companyId) return { held: false as const };
const evidence = await collectEvidence(sourceIssue, sourceAgent, thresholds, now);
if (!evidence || !isSoftStopTrigger(evidence.trigger)) return { held: false as const };
return {
held: true as const,
reviewIssueId: openReview.id,
reviewIdentifier: openReview.identifier,
trigger: evidence.trigger,
reason: evidence.triggerReasons.join("; "),
};
}
async function recordContinuationHold(input: {
companyId: string;
issueId: string;
runId: string;
agentId: string;
reviewIssueId: string;
trigger: ProductivityReviewTrigger;
reason: string;
}) {
await logActivity(db, {
companyId: input.companyId,
actorType: "system",
actorId: "system",
agentId: input.agentId,
runId: input.runId,
action: "issue.productivity_review_continuation_held",
entityType: "issue",
entityId: input.issueId,
details: {
source: "productivity_review.continuation_hold",
reviewIssueId: input.reviewIssueId,
trigger: input.trigger,
reason: input.reason,
},
});
}
return {
reconcileProductivityReviews,
isProductivityReviewContinuationHoldActive,
recordContinuationHold,
};
}

View File

@@ -1,5 +1,6 @@
export const RECOVERY_ORIGIN_KINDS = {
issueGraphLivenessEscalation: "harness_liveness_escalation",
issueProductivityReview: "issue_productivity_review",
strandedIssueRecovery: "stranded_issue_recovery",
staleActiveRunEvaluation: "stale_active_run_evaluation",
} as const;

View File

@@ -72,7 +72,7 @@ type RecoveryWakeup = (
type LatestIssueRun = Pick<
typeof heartbeatRuns.$inferSelect,
"id" | "agentId" | "status" | "error" | "errorCode" | "contextSnapshot"
"id" | "agentId" | "status" | "error" | "errorCode" | "contextSnapshot" | "livenessState"
> | null;
type WatchdogDecisionActor =
@@ -188,6 +188,18 @@ function isUnsuccessfulTerminalIssueRun(latestRun: LatestIssueRun) {
);
}
function isSuccessfulInProgressContinuationRun(latestRun: LatestIssueRun) {
return latestRun?.status === "succeeded";
}
function isProductiveContinuationRun(latestRun: LatestIssueRun) {
return latestRun?.status === "succeeded" &&
(latestRun.livenessState === "advanced" ||
latestRun.livenessState === "completed" ||
latestRun.livenessState === "blocked" ||
latestRun.livenessState === "needs_followup");
}
function parseLivenessIncidentKey(incidentKey: string | null | undefined) {
if (!incidentKey) return null;
return parseIssueGraphLivenessIncidentKey(incidentKey);
@@ -299,6 +311,7 @@ export function recoveryService(db: Db, deps: { enqueueWakeup: RecoveryWakeup })
error: heartbeatRuns.error,
errorCode: heartbeatRuns.errorCode,
contextSnapshot: heartbeatRuns.contextSnapshot,
livenessState: heartbeatRuns.livenessState,
})
.from(heartbeatRuns)
.where(
@@ -1572,6 +1585,8 @@ export function recoveryService(db: Db, deps: { enqueueWakeup: RecoveryWakeup })
assignmentDispatched: 0,
dispatchRequeued: 0,
continuationRequeued: 0,
productiveContinuationObserved: 0,
successfulContinuationObserved: 0,
orphanBlockersAssigned: 0,
escalated: 0,
skipped: 0,
@@ -1690,6 +1705,15 @@ export function recoveryService(db: Db, deps: { enqueueWakeup: RecoveryWakeup })
result.skipped += 1;
continue;
}
if (isSuccessfulInProgressContinuationRun(latestRun)) {
if (isProductiveContinuationRun(latestRun)) {
result.productiveContinuationObserved += 1;
} else {
result.successfulContinuationObserved += 1;
}
result.skipped += 1;
continue;
}
if (didAutomaticRecoveryFail(latestRun, "issue_continuation_needed")) {
const failureSummary = summarizeRunFailureForIssueComment(latestRun);
const updated = await escalateStrandedAssignedIssue({

View File

@@ -38,6 +38,7 @@ function makeCompany(id: string): Company {
issueCounter: 1,
budgetMonthlyCents: 0,
spentMonthlyCents: 0,
attachmentMaxBytes: 10 * 1024 * 1024,
requireBoardApprovalForNewAgents: false,
feedbackDataSharingEnabled: false,
feedbackDataSharingConsentAt: null,

View File

@@ -36,6 +36,7 @@ export const storybookCompanies: Company[] = [
issueCounter: 1641,
budgetMonthlyCents: 250_000,
spentMonthlyCents: 67_500,
attachmentMaxBytes: 10 * 1024 * 1024,
requireBoardApprovalForNewAgents: true,
feedbackDataSharingEnabled: true,
feedbackDataSharingConsentAt: null,
@@ -58,6 +59,7 @@ export const storybookCompanies: Company[] = [
issueCounter: 88,
budgetMonthlyCents: 180_000,
spentMonthlyCents: 39_500,
attachmentMaxBytes: 10 * 1024 * 1024,
requireBoardApprovalForNewAgents: false,
feedbackDataSharingEnabled: false,
feedbackDataSharingConsentAt: null,
@@ -80,6 +82,7 @@ export const storybookCompanies: Company[] = [
issueCounter: 204,
budgetMonthlyCents: 90_000,
spentMonthlyCents: 91_200,
attachmentMaxBytes: 10 * 1024 * 1024,
requireBoardApprovalForNewAgents: true,
feedbackDataSharingEnabled: false,
feedbackDataSharingConsentAt: null,