From 9c6f55159503b4f091cf8102c1fe47d21e134c19 Mon Sep 17 00:00:00 2001 From: Dotta <34892728+cryppadotta@users.noreply.github.com> Date: Mon, 20 Apr 2026 08:52:51 -0500 Subject: [PATCH] [codex] Add plugin orchestration host APIs (#4114) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit ## Thinking Path > - Paperclip orchestrates AI agents for zero-human companies. > - The plugin system is the extension path for optional capabilities that should not require core product changes for every integration. > - Plugins need scoped host APIs for issue orchestration, documents, wakeups, summaries, activity attribution, and isolated database state. > - Without those host APIs, richer plugins either cannot coordinate Paperclip work safely or need privileged core-side special cases. > - This pull request adds the plugin orchestration host surface, scoped route dispatch, a database namespace layer, and a smoke plugin that exercises the contract. > - The benefit is a broader plugin API that remains company-scoped, auditable, and covered by tests. ## What Changed - Added plugin orchestration host APIs for issue creation, document access, wakeups, summaries, plugin-origin activity, and scoped API route dispatch. - Added plugin database namespace tables, schema exports, migration checks, and idempotent replay coverage under migration `0059_plugin_database_namespaces`. - Added shared plugin route/API types and validators used by server and SDK boundaries. - Expanded plugin SDK types, protocol helpers, worker RPC host behavior, and testing utilities for orchestration flows. - Added the `plugin-orchestration-smoke-example` package to exercise scoped routes, restricted database namespaces, issue orchestration, documents, wakeups, summaries, and UI status surfaces. - Kept the new orchestration smoke fixture out of the root pnpm workspace importer so this PR preserves the repository policy of not committing `pnpm-lock.yaml`. - Updated plugin docs and database docs for the new orchestration and database namespace surfaces. - Rebased the branch onto `public-gh/master`, resolved conflicts, and removed `pnpm-lock.yaml` from the final PR diff. ## Verification - `pnpm install --frozen-lockfile` - `pnpm --filter @paperclipai/db typecheck` - `pnpm exec vitest run packages/db/src/client.test.ts` - `pnpm exec vitest run server/src/__tests__/plugin-database.test.ts server/src/__tests__/plugin-orchestration-apis.test.ts server/src/__tests__/plugin-routes-authz.test.ts server/src/__tests__/plugin-scoped-api-routes.test.ts server/src/__tests__/plugin-sdk-orchestration-contract.test.ts` - From `packages/plugins/examples/plugin-orchestration-smoke-example`: `pnpm exec vitest run --config ./vitest.config.ts` - `pnpm --dir packages/plugins/examples/plugin-orchestration-smoke-example run typecheck` - `pnpm --filter @paperclipai/server typecheck` - PR CI on latest head `293fc67c`: `policy`, `verify`, `e2e`, and `security/snyk` all passed. ## Risks - Medium risk: this expands plugin host authority, so route auth, company scoping, and plugin-origin activity attribution need careful review. - Medium risk: database namespace migration behavior must remain idempotent for environments that may have seen earlier branch versions. - Medium risk: the orchestration smoke fixture is intentionally excluded from the root workspace importer to avoid a `pnpm-lock.yaml` PR diff; direct fixture verification remains listed above. - Low operational risk from the PR setup itself: the branch is rebased onto current `master`, the migration is ordered after upstream `0057`/`0058`, and `pnpm-lock.yaml` is not in the final diff. > For core feature work, check [`ROADMAP.md`](ROADMAP.md) first and discuss it in `#dev` before opening the PR. Feature PRs that overlap with planned core work may need to be redirected — check the roadmap first. See `CONTRIBUTING.md`. Roadmap checked: this work aligns with the completed Plugin system milestone and extends the plugin surface rather than duplicating an unrelated planned core feature. ## Model Used - OpenAI Codex, GPT-5-based coding agent in a tool-enabled CLI environment. Exact hosted model build and context-window size are not exposed by the runtime; reasoning/tool use were enabled for repository inspection, editing, testing, git operations, and PR creation. ## Checklist - [x] I have included a thinking path that traces from project context to this change - [x] I have specified the model used (with version and capability details) - [x] I have checked ROADMAP.md and confirmed this PR does not duplicate planned core work - [x] I have run tests locally and they pass - [x] I have added or updated tests where applicable - [x] If this change affects the UI, I have included before/after screenshots (N/A: no core UI screen change; example plugin UI contract is covered by tests) - [x] I have updated relevant documentation to reflect my changes - [x] I have considered and documented any risks above - [x] I will address all Greptile and reviewer comments before requesting merge --------- Co-authored-by: Paperclip --- doc/DATABASE.md | 10 + doc/plugins/PLUGIN_AUTHORING_GUIDE.md | 56 +- doc/plugins/PLUGIN_SPEC.md | 63 +- packages/db/src/client.test.ts | 74 ++ .../0059_plugin_database_namespaces.sql | 41 + packages/db/src/migrations/meta/_journal.json | 9 +- packages/db/src/schema/index.ts | 1 + packages/db/src/schema/plugin_database.ts | 75 ++ .../.gitignore | 3 + .../README.md | 48 ++ .../esbuild.config.mjs | 17 + .../migrations/001_orchestration_smoke.sql | 10 + .../package.json | 46 ++ .../rollup.config.mjs | 28 + .../src/manifest.ts | 82 ++ .../src/ui/index.tsx | 134 ++++ .../src/worker.ts | 253 ++++++ .../tests/plugin.spec.ts | 162 ++++ .../tsconfig.json | 27 + .../vitest.config.ts | 8 + packages/plugins/sdk/README.md | 153 ++++ packages/plugins/sdk/src/define-plugin.ts | 31 + .../plugins/sdk/src/host-client-factory.ts | 68 +- packages/plugins/sdk/src/index.ts | 32 + packages/plugins/sdk/src/protocol.ts | 137 ++++ packages/plugins/sdk/src/testing.ts | 273 ++++++- packages/plugins/sdk/src/types.ts | 278 ++++++- packages/plugins/sdk/src/worker-rpc-host.ts | 142 +++- packages/shared/src/constants.ts | 68 +- packages/shared/src/index.ts | 25 + packages/shared/src/types/activity.ts | 2 +- packages/shared/src/types/index.ts | 9 + packages/shared/src/types/plugin.ts | 81 ++ packages/shared/src/validators/index.ts | 4 + packages/shared/src/validators/plugin.ts | 105 +++ pnpm-workspace.yaml | 3 + server/src/__tests__/plugin-database.test.ts | 269 +++++++ .../plugin-orchestration-apis.test.ts | 372 +++++++++ .../src/__tests__/plugin-routes-authz.test.ts | 74 +- .../plugin-scoped-api-routes.test.ts | 427 ++++++++++ .../plugin-sdk-orchestration-contract.test.ts | 240 ++++++ server/src/app.ts | 6 +- server/src/config.ts | 2 + server/src/index.ts | 7 +- server/src/routes/activity.ts | 2 +- server/src/routes/plugins.ts | 301 ++++++- server/src/services/activity-log.ts | 41 +- server/src/services/heartbeat.ts | 43 +- .../services/plugin-capability-validator.ts | 14 + server/src/services/plugin-database.ts | 498 ++++++++++++ server/src/services/plugin-host-services.ts | 738 +++++++++++++++++- server/src/services/plugin-loader.ts | 42 +- server/src/services/plugin-worker-manager.ts | 3 + 53 files changed, 5584 insertions(+), 53 deletions(-) create mode 100644 packages/db/src/migrations/0059_plugin_database_namespaces.sql create mode 100644 packages/db/src/schema/plugin_database.ts create mode 100644 packages/plugins/examples/plugin-orchestration-smoke-example/.gitignore create mode 100644 packages/plugins/examples/plugin-orchestration-smoke-example/README.md create mode 100644 packages/plugins/examples/plugin-orchestration-smoke-example/esbuild.config.mjs create mode 100644 packages/plugins/examples/plugin-orchestration-smoke-example/migrations/001_orchestration_smoke.sql create mode 100644 packages/plugins/examples/plugin-orchestration-smoke-example/package.json create mode 100644 packages/plugins/examples/plugin-orchestration-smoke-example/rollup.config.mjs create mode 100644 packages/plugins/examples/plugin-orchestration-smoke-example/src/manifest.ts create mode 100644 packages/plugins/examples/plugin-orchestration-smoke-example/src/ui/index.tsx create mode 100644 packages/plugins/examples/plugin-orchestration-smoke-example/src/worker.ts create mode 100644 packages/plugins/examples/plugin-orchestration-smoke-example/tests/plugin.spec.ts create mode 100644 packages/plugins/examples/plugin-orchestration-smoke-example/tsconfig.json create mode 100644 packages/plugins/examples/plugin-orchestration-smoke-example/vitest.config.ts create mode 100644 server/src/__tests__/plugin-database.test.ts create mode 100644 server/src/__tests__/plugin-orchestration-apis.test.ts create mode 100644 server/src/__tests__/plugin-scoped-api-routes.test.ts create mode 100644 server/src/__tests__/plugin-sdk-orchestration-contract.test.ts create mode 100644 server/src/services/plugin-database.ts diff --git a/doc/DATABASE.md b/doc/DATABASE.md index d2425d14d9..328cdf9fba 100644 --- a/doc/DATABASE.md +++ b/doc/DATABASE.md @@ -94,6 +94,16 @@ Set `DATABASE_URL` in your `.env`: DATABASE_URL=postgres://postgres.[PROJECT-REF]:[PASSWORD]@aws-0-[REGION].pooler.supabase.com:6543/postgres ``` +For hosted deployments that use a pooled runtime URL, set +`DATABASE_MIGRATION_URL` to the direct connection URL. Paperclip uses it for +startup schema checks/migrations and plugin namespace migrations, while the app +continues to use `DATABASE_URL` for runtime queries: + +```sh +DATABASE_URL=postgres://postgres.[PROJECT-REF]:[PASSWORD]@aws-0-[REGION].pooler.supabase.com:6543/postgres +DATABASE_MIGRATION_URL=postgres://postgres.[PROJECT-REF]:[PASSWORD]@aws-0-[REGION].pooler.supabase.com:5432/postgres +``` + If using connection pooling (port 6543), the `postgres` client must disable prepared statements. Update `packages/db/src/client.ts`: ```ts diff --git a/doc/plugins/PLUGIN_AUTHORING_GUIDE.md b/doc/plugins/PLUGIN_AUTHORING_GUIDE.md index 075156fd0f..e3c243783a 100644 --- a/doc/plugins/PLUGIN_AUTHORING_GUIDE.md +++ b/doc/plugins/PLUGIN_AUTHORING_GUIDE.md @@ -10,6 +10,9 @@ It is intentionally narrower than [PLUGIN_SPEC.md](./PLUGIN_SPEC.md). The spec i - Plugin UI runs as same-origin JavaScript inside the main Paperclip app. - Worker-side host APIs are capability-gated. - Plugin UI is not sandboxed by manifest capabilities. +- Plugin database migrations are restricted to a host-derived plugin namespace. +- Plugin-owned JSON API routes must be declared in the manifest and are mounted + only under `/api/plugins/:pluginId/api/*`. - There is no host-provided shared React component kit for plugins yet. - `ctx.assets` is not supported in the current runtime. @@ -77,10 +80,12 @@ Worker: - secrets - activity - state +- database namespace via `ctx.db` +- scoped JSON API routes declared with `apiRoutes` - entities - projects and project workspaces - companies -- issues and comments +- issues, comments, namespaced `plugin:` origins, blocker relations, checkout assertions, assignment wakeups, and orchestration summaries - agents and agent sessions - goals - data/actions @@ -89,6 +94,55 @@ Worker: - metrics - logger +### Plugin database declarations + +First-party or otherwise trusted orchestration plugins can declare: + +```ts +database: { + migrationsDir: "migrations", + coreReadTables: ["issues"], +} +``` + +Required capabilities are `database.namespace.migrate` and +`database.namespace.read`; add `database.namespace.write` for runtime mutations. +The host derives `ctx.db.namespace`, runs SQL files in filename order before the +worker starts, records checksums in `plugin_migrations`, and rejects changed +already-applied migrations. + +Migration SQL may create or alter objects only inside `ctx.db.namespace`. It may +reference whitelisted `public` core tables for foreign keys or read-only views, +but may not mutate/alter/drop/truncate public tables, create extensions, +triggers, untrusted languages, or runtime multi-statement SQL. Runtime +`ctx.db.query()` is restricted to `SELECT`; runtime `ctx.db.execute()` is +restricted to namespace-local `INSERT`, `UPDATE`, and `DELETE`. + +### Scoped plugin API routes + +Plugins can expose JSON-only routes under their own namespace: + +```ts +apiRoutes: [ + { + routeKey: "initialize", + method: "POST", + path: "/issues/:issueId/smoke", + auth: "board-or-agent", + capability: "api.routes.register", + checkoutPolicy: "required-for-agent-in-progress", + companyResolution: { from: "issue", param: "issueId" }, + }, +] +``` + +The host resolves the plugin, checks that it is ready, enforces +`api.routes.register`, matches the declared method/path, resolves company access, +and applies checkout policy before dispatching to the worker's `onApiRequest` +handler. The worker receives sanitized headers, route params, query, parsed JSON +body, actor context, and company id. Do not use plugin routes to claim core +paths; they always remain under `/api/plugins/:pluginId/api/*`. + UI: - `usePluginData` diff --git a/doc/plugins/PLUGIN_SPEC.md b/doc/plugins/PLUGIN_SPEC.md index f3ec64736b..ed8dea6b2c 100644 --- a/doc/plugins/PLUGIN_SPEC.md +++ b/doc/plugins/PLUGIN_SPEC.md @@ -28,6 +28,9 @@ Current limitations to keep in mind: - The repo example plugins under `packages/plugins/examples/` are development conveniences. They work from a source checkout and should not be assumed to exist in a generic published build unless they are explicitly shipped with that build. - Dynamic plugin install is not yet cloud-ready for horizontally scaled or ephemeral deployments. There is no shared artifact store, install coordination, or cross-node distribution layer yet. - The current runtime does not yet ship a real host-provided plugin UI component kit, and it does not support plugin asset uploads/reads. Treat those as future-scope ideas in this spec, not current implementation promises. +- Scoped plugin API routes are JSON-only and must be declared in `apiRoutes`. + They mount under `/api/plugins/:pluginId/api/*`; plugins cannot shadow core + API routes. In practice, that means the current implementation is a good fit for local development and self-hosted persistent deployments, but not yet for multi-instance cloud plugin distribution. @@ -624,7 +627,46 @@ Required SDK clients: Plugins that need filesystem, git, terminal, or process operations handle those directly using standard Node APIs or libraries. The host provides project workspace metadata through `ctx.projects` so plugins can resolve workspace paths, but the host does not proxy low-level OS operations. -## 14.1 Example SDK Shape +## 14.1 Issue Orchestration APIs + +Trusted orchestration plugins can create and update Paperclip issues through `ctx.issues` instead of importing server internals. The public issue contract includes parent/project/goal links, board or agent assignees, blocker IDs, labels, billing code, request depth, execution workspace inheritance, and plugin origin metadata. + +Origin rules: + +- Built-in core issues keep built-in origins such as `manual` and `routine_execution`. +- Plugin-managed issues use `plugin:` or a sub-kind such as `plugin::feature`. +- The host derives the default plugin origin from the installed plugin key and rejects attempts to set `plugin:` origins. +- `originId` is plugin-defined and should be stable for idempotent generated work. + +Relation and read helpers: + +- `ctx.issues.relations.get(issueId, companyId)` +- `ctx.issues.relations.setBlockedBy(issueId, blockerIssueIds, companyId)` +- `ctx.issues.relations.addBlockers(issueId, blockerIssueIds, companyId)` +- `ctx.issues.relations.removeBlockers(issueId, blockerIssueIds, companyId)` +- `ctx.issues.getSubtree(issueId, companyId, options)` +- `ctx.issues.summaries.getOrchestration({ issueId, companyId, includeSubtree, billingCode })` + +Governance helpers: + +- `ctx.issues.assertCheckoutOwner({ issueId, companyId, actorAgentId, actorRunId })` lets plugin actions preserve agent-run checkout ownership. +- `ctx.issues.requestWakeup(issueId, companyId, options)` requests assignment wakeups through host heartbeat semantics, including terminal-status, blocker, assignee, and budget hard-stop checks. +- `ctx.issues.requestWakeups(issueIds, companyId, options)` applies the same host-owned wakeup semantics to a batch and may use an idempotency key prefix for stable coordinator retries. + +Plugin-originated issue, relation, document, comment, and wakeup mutations must write activity entries with `actorType: "plugin"` and details fields for `sourcePluginId`, `sourcePluginKey`, `initiatingActorType`, `initiatingActorId`, and `initiatingRunId` when a user or agent run initiated the plugin work. + +Scoped API routes: + +- `apiRoutes[]` declares `routeKey`, `method`, plugin-local `path`, `auth`, + `capability`, optional checkout policy, and company resolution. +- The host enforces auth, company access, `api.routes.register`, route matching, + and checkout policy before worker dispatch. +- The worker implements `onApiRequest(input)` and returns a JSON response shape + `{ status?, headers?, body? }`. +- Only safe request headers are forwarded; auth/cookie headers are never passed + to the worker. + +## 14.2 Example SDK Shape ```ts /** Top-level helper for defining a plugin with type checking */ @@ -696,16 +738,24 @@ The host enforces capabilities in the SDK layer and refuses calls outside the gr - `project.workspaces.read` - `issues.read` - `issue.comments.read` +- `issue.documents.read` +- `issue.relations.read` +- `issue.subtree.read` - `agents.read` - `goals.read` - `activity.read` - `costs.read` +- `issues.orchestration.read` ### Data Write - `issues.create` - `issues.update` - `issue.comments.create` +- `issue.documents.write` +- `issue.relations.write` +- `issues.checkout` +- `issues.wakeup` - `assets.write` - `assets.read` - `activity.log.write` @@ -772,6 +822,13 @@ Minimum event set: - `issue.created` - `issue.updated` - `issue.comment.created` +- `issue.document.created` +- `issue.document.updated` +- `issue.document.deleted` +- `issue.relations.updated` +- `issue.checked_out` +- `issue.released` +- `issue.assignment_wakeup_requested` - `agent.created` - `agent.updated` - `agent.status_changed` @@ -781,6 +838,8 @@ Minimum event set: - `agent.run.cancelled` - `approval.created` - `approval.decided` +- `budget.incident.opened` +- `budget.incident.resolved` - `cost_event.created` - `activity.logged` @@ -1238,6 +1297,8 @@ Plugin-originated mutations should write: - `actor_type = plugin` - `actor_id = ` +- details include `sourcePluginId` and `sourcePluginKey` +- details include `initiatingActorType`, `initiatingActorId`, and `initiatingRunId` when a user or agent run triggered the plugin work ## 21.5 Plugin Migrations diff --git a/packages/db/src/client.test.ts b/packages/db/src/client.test.ts index 81cc2acef9..552db3b0d7 100644 --- a/packages/db/src/client.test.ts +++ b/packages/db/src/client.test.ts @@ -467,4 +467,78 @@ describeEmbeddedPostgres("applyPendingMigrations", () => { }, 20_000, ); + + it( + "replays migration 0059 safely when plugin_database_namespaces already exists", + async () => { + const connectionString = await createTempDatabase(); + + await applyPendingMigrations(connectionString); + + const sql = postgres(connectionString, { max: 1, onnotice: () => {} }); + try { + const pluginNamespacesHash = await migrationHash( + "0059_plugin_database_namespaces.sql", + ); + + await sql.unsafe( + `DELETE FROM "drizzle"."__drizzle_migrations" WHERE hash = '${pluginNamespacesHash}'`, + ); + + const tables = await sql.unsafe<{ table_name: string }[]>( + ` + SELECT table_name + FROM information_schema.tables + WHERE table_schema = 'public' + AND table_name IN ('plugin_database_namespaces', 'plugin_migrations') + ORDER BY table_name + `, + ); + expect(tables.map((row) => row.table_name)).toEqual([ + "plugin_database_namespaces", + "plugin_migrations", + ]); + } finally { + await sql.end(); + } + + const pendingState = await inspectMigrations(connectionString); + expect(pendingState).toMatchObject({ + status: "needsMigrations", + pendingMigrations: ["0059_plugin_database_namespaces.sql"], + reason: "pending-migrations", + }); + + await applyPendingMigrations(connectionString); + + const finalState = await inspectMigrations(connectionString); + expect(finalState.status).toBe("upToDate"); + + const verifySql = postgres(connectionString, { max: 1, onnotice: () => {} }); + try { + const indexes = await verifySql.unsafe<{ indexname: string }[]>( + ` + SELECT indexname + FROM pg_indexes + WHERE schemaname = 'public' + AND tablename IN ('plugin_database_namespaces', 'plugin_migrations') + ORDER BY indexname + `, + ); + expect(indexes.map((row) => row.indexname)).toEqual( + expect.arrayContaining([ + "plugin_database_namespaces_namespace_idx", + "plugin_database_namespaces_plugin_idx", + "plugin_database_namespaces_status_idx", + "plugin_migrations_plugin_idx", + "plugin_migrations_plugin_key_idx", + "plugin_migrations_status_idx", + ]), + ); + } finally { + await verifySql.end(); + } + }, + 20_000, + ); }); diff --git a/packages/db/src/migrations/0059_plugin_database_namespaces.sql b/packages/db/src/migrations/0059_plugin_database_namespaces.sql new file mode 100644 index 0000000000..031713b826 --- /dev/null +++ b/packages/db/src/migrations/0059_plugin_database_namespaces.sql @@ -0,0 +1,41 @@ +CREATE TABLE IF NOT EXISTS "plugin_database_namespaces" ( + "id" uuid PRIMARY KEY DEFAULT gen_random_uuid() NOT NULL, + "plugin_id" uuid NOT NULL, + "plugin_key" text NOT NULL, + "namespace_name" text NOT NULL, + "namespace_mode" text DEFAULT 'schema' NOT NULL, + "status" text DEFAULT 'active' NOT NULL, + "created_at" timestamp with time zone DEFAULT now() NOT NULL, + "updated_at" timestamp with time zone DEFAULT now() NOT NULL +); +--> statement-breakpoint +CREATE TABLE IF NOT EXISTS "plugin_migrations" ( + "id" uuid PRIMARY KEY DEFAULT gen_random_uuid() NOT NULL, + "plugin_id" uuid NOT NULL, + "plugin_key" text NOT NULL, + "namespace_name" text NOT NULL, + "migration_key" text NOT NULL, + "checksum" text NOT NULL, + "plugin_version" text NOT NULL, + "status" text NOT NULL, + "started_at" timestamp with time zone DEFAULT now() NOT NULL, + "applied_at" timestamp with time zone, + "error_message" text +); +--> statement-breakpoint +DO $$ BEGIN + IF NOT EXISTS (SELECT 1 FROM pg_constraint WHERE conname = 'plugin_database_namespaces_plugin_id_plugins_id_fk') THEN + ALTER TABLE "plugin_database_namespaces" ADD CONSTRAINT "plugin_database_namespaces_plugin_id_plugins_id_fk" FOREIGN KEY ("plugin_id") REFERENCES "public"."plugins"("id") ON DELETE cascade ON UPDATE no action; + END IF; +END $$;--> statement-breakpoint +DO $$ BEGIN + IF NOT EXISTS (SELECT 1 FROM pg_constraint WHERE conname = 'plugin_migrations_plugin_id_plugins_id_fk') THEN + ALTER TABLE "plugin_migrations" ADD CONSTRAINT "plugin_migrations_plugin_id_plugins_id_fk" FOREIGN KEY ("plugin_id") REFERENCES "public"."plugins"("id") ON DELETE cascade ON UPDATE no action; + END IF; +END $$;--> statement-breakpoint +CREATE UNIQUE INDEX IF NOT EXISTS "plugin_database_namespaces_plugin_idx" ON "plugin_database_namespaces" USING btree ("plugin_id");--> statement-breakpoint +CREATE UNIQUE INDEX IF NOT EXISTS "plugin_database_namespaces_namespace_idx" ON "plugin_database_namespaces" USING btree ("namespace_name");--> statement-breakpoint +CREATE INDEX IF NOT EXISTS "plugin_database_namespaces_status_idx" ON "plugin_database_namespaces" USING btree ("status");--> statement-breakpoint +CREATE UNIQUE INDEX IF NOT EXISTS "plugin_migrations_plugin_key_idx" ON "plugin_migrations" USING btree ("plugin_id","migration_key");--> statement-breakpoint +CREATE INDEX IF NOT EXISTS "plugin_migrations_plugin_idx" ON "plugin_migrations" USING btree ("plugin_id");--> statement-breakpoint +CREATE INDEX IF NOT EXISTS "plugin_migrations_status_idx" ON "plugin_migrations" USING btree ("status"); diff --git a/packages/db/src/migrations/meta/_journal.json b/packages/db/src/migrations/meta/_journal.json index 760e7bc4b7..3a47133a82 100644 --- a/packages/db/src/migrations/meta/_journal.json +++ b/packages/db/src/migrations/meta/_journal.json @@ -414,6 +414,13 @@ "when": 1776542245004, "tag": "0058_wealthy_starbolt", "breakpoints": true + }, + { + "idx": 59, + "version": "7", + "when": 1776542246000, + "tag": "0059_plugin_database_namespaces", + "breakpoints": true } ] -} \ No newline at end of file +} diff --git a/packages/db/src/schema/index.ts b/packages/db/src/schema/index.ts index 4814303cce..54bb5119c7 100644 --- a/packages/db/src/schema/index.ts +++ b/packages/db/src/schema/index.ts @@ -60,6 +60,7 @@ export { pluginConfig } from "./plugin_config.js"; export { pluginCompanySettings } from "./plugin_company_settings.js"; export { pluginState } from "./plugin_state.js"; export { pluginEntities } from "./plugin_entities.js"; +export { pluginDatabaseNamespaces, pluginMigrations } from "./plugin_database.js"; export { pluginJobs, pluginJobRuns } from "./plugin_jobs.js"; export { pluginWebhookDeliveries } from "./plugin_webhooks.js"; export { pluginLogs } from "./plugin_logs.js"; diff --git a/packages/db/src/schema/plugin_database.ts b/packages/db/src/schema/plugin_database.ts new file mode 100644 index 0000000000..2a279d0771 --- /dev/null +++ b/packages/db/src/schema/plugin_database.ts @@ -0,0 +1,75 @@ +import { + pgTable, + uuid, + text, + timestamp, + index, + uniqueIndex, +} from "drizzle-orm/pg-core"; +import type { + PluginDatabaseMigrationStatus, + PluginDatabaseNamespaceMode, + PluginDatabaseNamespaceStatus, +} from "@paperclipai/shared"; +import { plugins } from "./plugins.js"; + +/** + * Database namespace allocated to an installed plugin. + * + * Namespaces are deterministic and owned by the host. Plugin SQL may create + * objects only inside its namespace, while selected public core tables remain + * read-only join targets through runtime checks. + */ +export const pluginDatabaseNamespaces = pgTable( + "plugin_database_namespaces", + { + id: uuid("id").primaryKey().defaultRandom(), + pluginId: uuid("plugin_id") + .notNull() + .references(() => plugins.id, { onDelete: "cascade" }), + pluginKey: text("plugin_key").notNull(), + namespaceName: text("namespace_name").notNull(), + namespaceMode: text("namespace_mode").$type().notNull().default("schema"), + status: text("status").$type().notNull().default("active"), + createdAt: timestamp("created_at", { withTimezone: true }).notNull().defaultNow(), + updatedAt: timestamp("updated_at", { withTimezone: true }).notNull().defaultNow(), + }, + (table) => ({ + pluginIdx: uniqueIndex("plugin_database_namespaces_plugin_idx").on(table.pluginId), + namespaceIdx: uniqueIndex("plugin_database_namespaces_namespace_idx").on(table.namespaceName), + statusIdx: index("plugin_database_namespaces_status_idx").on(table.status), + }), +); + +/** + * Per-plugin migration ledger. + * + * Every migration file is recorded with a checksum. A previously applied + * migration whose checksum changes is rejected during later activation. + */ +export const pluginMigrations = pgTable( + "plugin_migrations", + { + id: uuid("id").primaryKey().defaultRandom(), + pluginId: uuid("plugin_id") + .notNull() + .references(() => plugins.id, { onDelete: "cascade" }), + pluginKey: text("plugin_key").notNull(), + namespaceName: text("namespace_name").notNull(), + migrationKey: text("migration_key").notNull(), + checksum: text("checksum").notNull(), + pluginVersion: text("plugin_version").notNull(), + status: text("status").$type().notNull(), + startedAt: timestamp("started_at", { withTimezone: true }).notNull().defaultNow(), + appliedAt: timestamp("applied_at", { withTimezone: true }), + errorMessage: text("error_message"), + }, + (table) => ({ + pluginMigrationIdx: uniqueIndex("plugin_migrations_plugin_key_idx").on( + table.pluginId, + table.migrationKey, + ), + pluginIdx: index("plugin_migrations_plugin_idx").on(table.pluginId), + statusIdx: index("plugin_migrations_status_idx").on(table.status), + }), +); diff --git a/packages/plugins/examples/plugin-orchestration-smoke-example/.gitignore b/packages/plugins/examples/plugin-orchestration-smoke-example/.gitignore new file mode 100644 index 0000000000..0430cc3560 --- /dev/null +++ b/packages/plugins/examples/plugin-orchestration-smoke-example/.gitignore @@ -0,0 +1,3 @@ +dist +node_modules +.paperclip-sdk diff --git a/packages/plugins/examples/plugin-orchestration-smoke-example/README.md b/packages/plugins/examples/plugin-orchestration-smoke-example/README.md new file mode 100644 index 0000000000..cd4ea091ea --- /dev/null +++ b/packages/plugins/examples/plugin-orchestration-smoke-example/README.md @@ -0,0 +1,48 @@ +# Plugin Orchestration Smoke Example + +This first-party example validates the orchestration-grade plugin host surface. +It is intentionally small and exists as an acceptance fixture rather than a +product plugin. + +## What it exercises + +- `apiRoutes` under `/api/plugins/:pluginId/api/*` +- restricted database migrations and runtime `ctx.db` +- plugin-owned rows joined to `public.issues` +- plugin-created child issues with namespaced origin metadata +- billing codes, workspace inheritance, blocker relations, documents, wakeups, + and orchestration summaries +- issue detail and settings UI slots that surface route, capability, namespace, + and smoke status + +## Development + +```bash +pnpm install +pnpm typecheck +pnpm test +pnpm build +``` + +## Install Into Paperclip + +Use an absolute local path during development: + +```bash +curl -X POST http://127.0.0.1:3100/api/plugins/install \ + -H "Content-Type: application/json" \ + -d '{"packageName":"/absolute/path/to/paperclip/packages/plugins/examples/plugin-orchestration-smoke-example","isLocalPath":true}' +``` + +## Scoped Route Smoke + +After the plugin is ready, run the scoped route against an existing issue: + +```bash +curl -X POST http://127.0.0.1:3100/api/plugins/paperclipai.plugin-orchestration-smoke-example/api/issues//smoke \ + -H "Content-Type: application/json" \ + -d '{"assigneeAgentId":""}' +``` + +The route returns the generated child issue, resolved blocker, billing code, +subtree ids, and wakeup result. diff --git a/packages/plugins/examples/plugin-orchestration-smoke-example/esbuild.config.mjs b/packages/plugins/examples/plugin-orchestration-smoke-example/esbuild.config.mjs new file mode 100644 index 0000000000..b5cfd36ed9 --- /dev/null +++ b/packages/plugins/examples/plugin-orchestration-smoke-example/esbuild.config.mjs @@ -0,0 +1,17 @@ +import esbuild from "esbuild"; +import { createPluginBundlerPresets } from "@paperclipai/plugin-sdk/bundlers"; + +const presets = createPluginBundlerPresets({ uiEntry: "src/ui/index.tsx" }); +const watch = process.argv.includes("--watch"); + +const workerCtx = await esbuild.context(presets.esbuild.worker); +const manifestCtx = await esbuild.context(presets.esbuild.manifest); +const uiCtx = await esbuild.context(presets.esbuild.ui); + +if (watch) { + await Promise.all([workerCtx.watch(), manifestCtx.watch(), uiCtx.watch()]); + console.log("esbuild watch mode enabled for worker, manifest, and ui"); +} else { + await Promise.all([workerCtx.rebuild(), manifestCtx.rebuild(), uiCtx.rebuild()]); + await Promise.all([workerCtx.dispose(), manifestCtx.dispose(), uiCtx.dispose()]); +} diff --git a/packages/plugins/examples/plugin-orchestration-smoke-example/migrations/001_orchestration_smoke.sql b/packages/plugins/examples/plugin-orchestration-smoke-example/migrations/001_orchestration_smoke.sql new file mode 100644 index 0000000000..6fb6029185 --- /dev/null +++ b/packages/plugins/examples/plugin-orchestration-smoke-example/migrations/001_orchestration_smoke.sql @@ -0,0 +1,10 @@ +CREATE TABLE plugin_orchestration_smoke_1e8c264c64.smoke_runs ( + id uuid PRIMARY KEY, + root_issue_id uuid NOT NULL REFERENCES public.issues(id) ON DELETE CASCADE, + child_issue_id uuid REFERENCES public.issues(id) ON DELETE SET NULL, + blocker_issue_id uuid REFERENCES public.issues(id) ON DELETE SET NULL, + billing_code text NOT NULL, + last_summary jsonb NOT NULL DEFAULT '{}'::jsonb, + created_at timestamptz NOT NULL DEFAULT now(), + updated_at timestamptz NOT NULL DEFAULT now() +); diff --git a/packages/plugins/examples/plugin-orchestration-smoke-example/package.json b/packages/plugins/examples/plugin-orchestration-smoke-example/package.json new file mode 100644 index 0000000000..fb9985ca3f --- /dev/null +++ b/packages/plugins/examples/plugin-orchestration-smoke-example/package.json @@ -0,0 +1,46 @@ +{ + "name": "@paperclipai/plugin-orchestration-smoke-example", + "version": "0.1.0", + "type": "module", + "private": true, + "description": "First-party smoke plugin for orchestration-grade Paperclip plugin APIs", + "scripts": { + "prebuild": "node ../../../../scripts/ensure-plugin-build-deps.mjs", + "build": "node ./esbuild.config.mjs", + "build:rollup": "rollup -c", + "dev": "node ./esbuild.config.mjs --watch", + "dev:ui": "paperclip-plugin-dev-server --root . --ui-dir dist/ui --port 4177", + "test": "vitest run --config ./vitest.config.ts", + "typecheck": "pnpm --filter @paperclipai/plugin-sdk build && tsc --noEmit" + }, + "paperclipPlugin": { + "manifest": "./dist/manifest.js", + "worker": "./dist/worker.js", + "ui": "./dist/ui/" + }, + "keywords": [ + "paperclip", + "plugin", + "connector" + ], + "author": "Paperclip", + "license": "MIT", + "dependencies": { + "@paperclipai/plugin-sdk": "workspace:*" + }, + "devDependencies": { + "@paperclipai/shared": "workspace:*", + "@rollup/plugin-node-resolve": "^16.0.1", + "@rollup/plugin-typescript": "^12.1.2", + "@types/node": "^24.6.0", + "@types/react": "^19.0.8", + "esbuild": "^0.27.3", + "rollup": "^4.38.0", + "tslib": "^2.8.1", + "typescript": "^5.7.3", + "vitest": "^3.0.5" + }, + "peerDependencies": { + "react": ">=18" + } +} diff --git a/packages/plugins/examples/plugin-orchestration-smoke-example/rollup.config.mjs b/packages/plugins/examples/plugin-orchestration-smoke-example/rollup.config.mjs new file mode 100644 index 0000000000..ccee40a744 --- /dev/null +++ b/packages/plugins/examples/plugin-orchestration-smoke-example/rollup.config.mjs @@ -0,0 +1,28 @@ +import { nodeResolve } from "@rollup/plugin-node-resolve"; +import typescript from "@rollup/plugin-typescript"; +import { createPluginBundlerPresets } from "@paperclipai/plugin-sdk/bundlers"; + +const presets = createPluginBundlerPresets({ uiEntry: "src/ui/index.tsx" }); + +function withPlugins(config) { + if (!config) return null; + return { + ...config, + plugins: [ + nodeResolve({ + extensions: [".ts", ".tsx", ".js", ".jsx", ".mjs"], + }), + typescript({ + tsconfig: "./tsconfig.json", + declaration: false, + declarationMap: false, + }), + ], + }; +} + +export default [ + withPlugins(presets.rollup.manifest), + withPlugins(presets.rollup.worker), + withPlugins(presets.rollup.ui), +].filter(Boolean); diff --git a/packages/plugins/examples/plugin-orchestration-smoke-example/src/manifest.ts b/packages/plugins/examples/plugin-orchestration-smoke-example/src/manifest.ts new file mode 100644 index 0000000000..12bf4c2560 --- /dev/null +++ b/packages/plugins/examples/plugin-orchestration-smoke-example/src/manifest.ts @@ -0,0 +1,82 @@ +import type { PaperclipPluginManifestV1 } from "@paperclipai/plugin-sdk"; + +const manifest: PaperclipPluginManifestV1 = { + id: "paperclipai.plugin-orchestration-smoke-example", + apiVersion: 1, + version: "0.1.0", + displayName: "Plugin Orchestration Smoke Example", + description: "First-party smoke plugin that exercises Paperclip orchestration-grade plugin APIs.", + author: "Paperclip", + categories: ["automation", "ui"], + capabilities: [ + "api.routes.register", + "database.namespace.migrate", + "database.namespace.read", + "database.namespace.write", + "issues.read", + "issues.create", + "issues.wakeup", + "issue.relations.read", + "issue.relations.write", + "issue.documents.read", + "issue.documents.write", + "issue.subtree.read", + "issues.orchestration.read", + "ui.dashboardWidget.register", + "ui.detailTab.register", + "instance.settings.register" + ], + entrypoints: { + worker: "./dist/worker.js", + ui: "./dist/ui" + }, + database: { + namespaceSlug: "orchestration_smoke", + migrationsDir: "migrations", + coreReadTables: ["issues"] + }, + apiRoutes: [ + { + routeKey: "initialize", + method: "POST", + path: "/issues/:issueId/smoke", + auth: "board-or-agent", + capability: "api.routes.register", + checkoutPolicy: "required-for-agent-in-progress", + companyResolution: { from: "issue", param: "issueId" } + }, + { + routeKey: "summary", + method: "GET", + path: "/issues/:issueId/smoke", + auth: "board-or-agent", + capability: "api.routes.register", + companyResolution: { from: "issue", param: "issueId" } + } + ], + ui: { + slots: [ + { + type: "dashboardWidget", + id: "health-widget", + displayName: "Orchestration Smoke Health", + exportName: "DashboardWidget" + }, + { + type: "taskDetailView", + id: "issue-panel", + displayName: "Orchestration Smoke", + exportName: "IssuePanel", + entityTypes: ["issue"] + }, + { + type: "settingsPage", + id: "settings", + displayName: "Orchestration Smoke", + exportName: "SettingsPage" + } + ] + } +}; + +export default manifest; diff --git a/packages/plugins/examples/plugin-orchestration-smoke-example/src/ui/index.tsx b/packages/plugins/examples/plugin-orchestration-smoke-example/src/ui/index.tsx new file mode 100644 index 0000000000..14075b603a --- /dev/null +++ b/packages/plugins/examples/plugin-orchestration-smoke-example/src/ui/index.tsx @@ -0,0 +1,134 @@ +import { + usePluginAction, + usePluginData, + type PluginDetailTabProps, + type PluginSettingsPageProps, + type PluginWidgetProps, +} from "@paperclipai/plugin-sdk/ui"; +import type React from "react"; + +type SurfaceStatus = { + status: "ok" | "degraded" | "error"; + checkedAt: string; + databaseNamespace: string; + routeKeys: string[]; + capabilities: string[]; + summary: null | { + rootIssueId: string; + childIssueId: string | null; + blockerIssueId: string | null; + billingCode: string; + subtreeIssueIds: string[]; + wakeupQueued: boolean; + }; +}; + +const panelStyle = { + display: "grid", + gap: 10, + fontSize: 13, + lineHeight: 1.45, +} satisfies React.CSSProperties; + +const rowStyle = { + display: "flex", + justifyContent: "space-between", + gap: 12, +} satisfies React.CSSProperties; + +const buttonStyle = { + border: "1px solid #1f2937", + background: "#111827", + color: "#fff", + borderRadius: 6, + padding: "6px 10px", + font: "inherit", + cursor: "pointer", +} satisfies React.CSSProperties; + +function SurfaceRows({ data }: { data: SurfaceStatus }) { + return ( +
+
Status{data.status}
+
Namespace{data.databaseNamespace}
+
Routes{data.routeKeys.join(", ")}
+
Capabilities{data.capabilities.length}
+
+ ); +} + +export function DashboardWidget({ context }: PluginWidgetProps) { + const { data, loading, error } = usePluginData("surface-status", { + companyId: context.companyId, + }); + + if (loading) return
Loading orchestration smoke status...
; + if (error) return
Orchestration smoke error: {error.message}
; + if (!data) return null; + + return ( +
+ Orchestration Smoke + +
Checked {data.checkedAt}
+
+ ); +} + +export function IssuePanel({ context }: PluginDetailTabProps) { + const { data, loading, error, refresh } = usePluginData("surface-status", { + companyId: context.companyId, + issueId: context.entityId, + }); + const initialize = usePluginAction("initialize-smoke"); + + if (loading) return
Loading orchestration smoke...
; + if (error) return
Orchestration smoke error: {error.message}
; + if (!data) return null; + + return ( +
+
+ Orchestration Smoke + +
+ + {data.summary ? ( +
+
Child{data.summary.childIssueId ?? "none"}
+
Blocker{data.summary.blockerIssueId ?? "none"}
+
Billing{data.summary.billingCode}
+
Subtree{data.summary.subtreeIssueIds.length}
+
Wakeup{data.summary.wakeupQueued ? "queued" : "not queued"}
+
+ ) : ( +
No smoke run recorded for this issue.
+ )} +
+ ); +} + +export function SettingsPage({ context }: PluginSettingsPageProps) { + const { data, loading, error } = usePluginData("surface-status", { + companyId: context.companyId, + }); + + if (loading) return
Loading orchestration smoke settings...
; + if (error) return
Orchestration smoke settings error: {error.message}
; + if (!data) return null; + + return ( +
+ Orchestration Smoke Surface + +
+ ); +} diff --git a/packages/plugins/examples/plugin-orchestration-smoke-example/src/worker.ts b/packages/plugins/examples/plugin-orchestration-smoke-example/src/worker.ts new file mode 100644 index 0000000000..e9a567c3db --- /dev/null +++ b/packages/plugins/examples/plugin-orchestration-smoke-example/src/worker.ts @@ -0,0 +1,253 @@ +import { randomUUID } from "node:crypto"; +import { definePlugin, runWorker, type PluginApiRequestInput } from "@paperclipai/plugin-sdk"; + +type SmokeInput = { + companyId: string; + issueId: string; + assigneeAgentId?: string | null; + actorAgentId?: string | null; + actorUserId?: string | null; + actorRunId?: string | null; +}; + +type SmokeSummary = { + rootIssueId: string; + childIssueId: string | null; + blockerIssueId: string | null; + billingCode: string; + joinedRows: unknown[]; + subtreeIssueIds: string[]; + wakeupQueued: boolean; +}; + +let readSmokeSummary: ((companyId: string, issueId: string) => Promise) | null = null; +let initializeSmoke: ((input: SmokeInput) => Promise) | null = null; + +function tableName(namespace: string) { + return `${namespace}.smoke_runs`; +} + +function stringField(value: unknown): string | null { + return typeof value === "string" && value.trim().length > 0 ? value : null; +} + +const plugin = definePlugin({ + async setup(ctx) { + readSmokeSummary = async function readSummary(companyId: string, issueId: string): Promise { + const rows = await ctx.db.query<{ + root_issue_id: string; + child_issue_id: string | null; + blocker_issue_id: string | null; + billing_code: string; + issue_title: string; + last_summary: unknown; + }>( + `SELECT s.root_issue_id, s.child_issue_id, s.blocker_issue_id, s.billing_code, i.title AS issue_title, s.last_summary + FROM ${tableName(ctx.db.namespace)} s + JOIN public.issues i ON i.id = s.root_issue_id + WHERE s.root_issue_id = $1`, + [issueId], + ); + const row = rows[0]; + if (!row) return null; + const orchestration = await ctx.issues.summaries.getOrchestration({ + issueId, + companyId, + includeSubtree: true, + billingCode: row.billing_code, + }); + return { + rootIssueId: row.root_issue_id, + childIssueId: row.child_issue_id, + blockerIssueId: row.blocker_issue_id, + billingCode: row.billing_code, + joinedRows: rows, + subtreeIssueIds: orchestration.subtreeIssueIds, + wakeupQueued: Boolean((row.last_summary as { wakeupQueued?: unknown } | null)?.wakeupQueued), + }; + }; + + initializeSmoke = async function runSmoke(input: SmokeInput): Promise { + const root = await ctx.issues.get(input.issueId, input.companyId); + if (!root) throw new Error(`Issue not found: ${input.issueId}`); + + const billingCode = `plugin-smoke:${input.issueId}`; + const actor = { + actorAgentId: input.actorAgentId ?? null, + actorUserId: input.actorUserId ?? null, + actorRunId: input.actorRunId ?? null, + }; + const blocker = await ctx.issues.create({ + companyId: input.companyId, + parentId: input.issueId, + inheritExecutionWorkspaceFromIssueId: input.issueId, + title: "Orchestration smoke blocker", + description: "Resolved blocker used to verify plugin relation writes without preventing the smoke wakeup.", + status: "done", + priority: "low", + billingCode, + originKind: `plugin:${ctx.manifest.id}:blocker`, + originId: `${input.issueId}:blocker`, + actor, + }); + + const child = await ctx.issues.create({ + companyId: input.companyId, + parentId: input.issueId, + inheritExecutionWorkspaceFromIssueId: input.issueId, + title: "Orchestration smoke child", + description: "Generated by the orchestration smoke plugin to verify issue, document, relation, wakeup, and summary APIs.", + status: "todo", + priority: "medium", + assigneeAgentId: input.assigneeAgentId ?? root.assigneeAgentId ?? undefined, + billingCode, + originKind: `plugin:${ctx.manifest.id}:child`, + originId: `${input.issueId}:child`, + blockedByIssueIds: [blocker.id], + actor, + }); + + await ctx.issues.relations.setBlockedBy(child.id, [blocker.id], input.companyId, actor); + await ctx.issues.documents.upsert({ + issueId: child.id, + companyId: input.companyId, + key: "orchestration-smoke", + title: "Orchestration Smoke", + format: "markdown", + body: [ + "# Orchestration Smoke", + "", + `- Root issue: ${input.issueId}`, + `- Child issue: ${child.id}`, + `- Billing code: ${billingCode}`, + ].join("\n"), + changeSummary: "Recorded orchestration smoke output", + }); + + const wakeup = await ctx.issues.requestWakeup(child.id, input.companyId, { + reason: "plugin:orchestration_smoke", + contextSource: "plugin-orchestration-smoke", + idempotencyKey: `${input.issueId}:child`, + ...actor, + }); + const orchestration = await ctx.issues.summaries.getOrchestration({ + issueId: input.issueId, + companyId: input.companyId, + includeSubtree: true, + billingCode, + }); + const summarySnapshot = { + childIssueId: child.id, + blockerIssueId: blocker.id, + wakeupQueued: wakeup.queued, + subtreeIssueIds: orchestration.subtreeIssueIds, + }; + + await ctx.db.execute( + `INSERT INTO ${tableName(ctx.db.namespace)} (id, root_issue_id, child_issue_id, blocker_issue_id, billing_code, last_summary) + VALUES ($1, $2, $3, $4, $5, $6::jsonb) + ON CONFLICT (id) DO UPDATE SET + child_issue_id = EXCLUDED.child_issue_id, + blocker_issue_id = EXCLUDED.blocker_issue_id, + billing_code = EXCLUDED.billing_code, + last_summary = EXCLUDED.last_summary, + updated_at = now()`, + [ + randomUUID(), + input.issueId, + child.id, + blocker.id, + billingCode, + JSON.stringify(summarySnapshot), + ], + ); + + return { + rootIssueId: input.issueId, + childIssueId: child.id, + blockerIssueId: blocker.id, + billingCode, + joinedRows: await ctx.db.query( + `SELECT s.id, s.billing_code, i.title AS root_title + FROM ${tableName(ctx.db.namespace)} s + JOIN public.issues i ON i.id = s.root_issue_id + WHERE s.root_issue_id = $1`, + [input.issueId], + ), + subtreeIssueIds: orchestration.subtreeIssueIds, + wakeupQueued: wakeup.queued, + }; + }; + + ctx.data.register("surface-status", async (params) => { + const companyId = stringField(params.companyId); + const issueId = stringField(params.issueId); + return { + status: "ok", + checkedAt: new Date().toISOString(), + databaseNamespace: ctx.db.namespace, + routeKeys: (ctx.manifest.apiRoutes ?? []).map((route) => route.routeKey), + capabilities: ctx.manifest.capabilities, + summary: companyId && issueId ? await readSmokeSummary?.(companyId, issueId) ?? null : null, + }; + }); + + ctx.actions.register("initialize-smoke", async (params) => { + const companyId = stringField(params.companyId); + const issueId = stringField(params.issueId); + if (!companyId || !issueId) throw new Error("companyId and issueId are required"); + if (!initializeSmoke) throw new Error("Smoke initializer is not ready"); + return initializeSmoke({ + companyId, + issueId, + assigneeAgentId: stringField(params.assigneeAgentId), + actorAgentId: stringField(params.actorAgentId), + actorUserId: stringField(params.actorUserId), + actorRunId: stringField(params.actorRunId), + }); + }); + }, + + async onApiRequest(input: PluginApiRequestInput) { + if (input.routeKey === "summary") { + const issueId = input.params.issueId; + return { + body: await readSmokeSummary?.(input.companyId, issueId) ?? null, + }; + } + + if (input.routeKey === "initialize") { + if (!initializeSmoke) throw new Error("Smoke initializer is not ready"); + const body = input.body as Record | null; + return { + status: 201, + body: await initializeSmoke({ + companyId: input.companyId, + issueId: input.params.issueId, + assigneeAgentId: stringField(body?.assigneeAgentId), + actorAgentId: input.actor.agentId ?? null, + actorUserId: input.actor.userId ?? null, + actorRunId: input.actor.runId ?? null, + }), + }; + } + + return { + status: 404, + body: { error: `Unknown orchestration smoke route: ${input.routeKey}` }, + }; + }, + + async onHealth() { + return { + status: "ok", + message: "Orchestration smoke plugin worker is running", + details: { + surfaces: ["database", "scoped-api-route", "issue-panel", "orchestration-apis"], + }, + }; + } +}); + +export default plugin; +runWorker(plugin, import.meta.url); diff --git a/packages/plugins/examples/plugin-orchestration-smoke-example/tests/plugin.spec.ts b/packages/plugins/examples/plugin-orchestration-smoke-example/tests/plugin.spec.ts new file mode 100644 index 0000000000..7ac7b8e519 --- /dev/null +++ b/packages/plugins/examples/plugin-orchestration-smoke-example/tests/plugin.spec.ts @@ -0,0 +1,162 @@ +import { randomUUID } from "node:crypto"; +import { describe, expect, it } from "vitest"; +import { pluginManifestV1Schema, type Issue } from "@paperclipai/shared"; +import { createTestHarness } from "@paperclipai/plugin-sdk/testing"; +import manifest from "../src/manifest.js"; +import plugin from "../src/worker.js"; + +function issue(input: Partial & Pick): Issue { + const now = new Date(); + const { id, companyId, title, ...rest } = input; + return { + id, + companyId, + projectId: null, + projectWorkspaceId: null, + goalId: null, + parentId: null, + title, + description: null, + status: "todo", + priority: "medium", + assigneeAgentId: null, + assigneeUserId: null, + checkoutRunId: null, + executionRunId: null, + executionAgentNameKey: null, + executionLockedAt: null, + createdByAgentId: null, + createdByUserId: null, + issueNumber: null, + identifier: null, + originKind: "manual", + originId: null, + originRunId: null, + requestDepth: 0, + billingCode: null, + assigneeAdapterOverrides: null, + executionWorkspaceId: null, + executionWorkspacePreference: null, + executionWorkspaceSettings: null, + startedAt: null, + completedAt: null, + cancelledAt: null, + hiddenAt: null, + createdAt: now, + updatedAt: now, + ...rest, + }; +} + +describe("orchestration smoke plugin", () => { + it("declares the Phase 1 orchestration surfaces", () => { + expect(pluginManifestV1Schema.parse(manifest)).toMatchObject({ + id: "paperclipai.plugin-orchestration-smoke-example", + database: { + migrationsDir: "migrations", + coreReadTables: ["issues"], + }, + apiRoutes: [ + expect.objectContaining({ routeKey: "initialize" }), + expect.objectContaining({ routeKey: "summary" }), + ], + }); + }); + + it("creates plugin-owned orchestration rows, issue tree, document, wakeup, and summary reads", async () => { + const companyId = randomUUID(); + const rootIssueId = randomUUID(); + const agentId = randomUUID(); + const harness = createTestHarness({ manifest }); + harness.seed({ + issues: [ + issue({ + id: rootIssueId, + companyId, + title: "Root orchestration issue", + assigneeAgentId: agentId, + }), + ], + }); + await plugin.definition.setup(harness.ctx); + + const result = await harness.performAction<{ + rootIssueId: string; + childIssueId: string; + blockerIssueId: string; + billingCode: string; + subtreeIssueIds: string[]; + wakeupQueued: boolean; + }>("initialize-smoke", { + companyId, + issueId: rootIssueId, + assigneeAgentId: agentId, + }); + + expect(result.rootIssueId).toBe(rootIssueId); + expect(result.childIssueId).toEqual(expect.any(String)); + expect(result.blockerIssueId).toEqual(expect.any(String)); + expect(result.billingCode).toBe(`plugin-smoke:${rootIssueId}`); + expect(result.wakeupQueued).toBe(true); + expect(result.subtreeIssueIds).toEqual(expect.arrayContaining([rootIssueId, result.childIssueId])); + expect(harness.dbExecutes[0]?.sql).toContain(".smoke_runs"); + expect(harness.dbQueries.some((entry) => entry.sql.includes("JOIN public.issues"))).toBe(true); + + const relations = await harness.ctx.issues.relations.get(result.childIssueId, companyId); + expect(relations.blockedBy).toEqual([ + expect.objectContaining({ + id: result.blockerIssueId, + status: "done", + }), + ]); + const docs = await harness.ctx.issues.documents.list(result.childIssueId, companyId); + expect(docs).toEqual([ + expect.objectContaining({ + key: "orchestration-smoke", + title: "Orchestration Smoke", + }), + ]); + }); + + it("dispatches the scoped API route through the same smoke path", async () => { + const companyId = randomUUID(); + const rootIssueId = randomUUID(); + const agentId = randomUUID(); + const harness = createTestHarness({ manifest }); + harness.seed({ + issues: [ + issue({ + id: rootIssueId, + companyId, + title: "Scoped API root", + assigneeAgentId: agentId, + }), + ], + }); + await plugin.definition.setup(harness.ctx); + + await expect(plugin.definition.onApiRequest?.({ + routeKey: "initialize", + method: "POST", + path: `/issues/${rootIssueId}/smoke`, + params: { issueId: rootIssueId }, + query: {}, + body: { assigneeAgentId: agentId }, + actor: { + actorType: "user", + actorId: "board", + userId: "board", + agentId: null, + runId: null, + }, + companyId, + headers: {}, + })).resolves.toMatchObject({ + status: 201, + body: expect.objectContaining({ + rootIssueId, + wakeupQueued: true, + }), + }); + }); +}); diff --git a/packages/plugins/examples/plugin-orchestration-smoke-example/tsconfig.json b/packages/plugins/examples/plugin-orchestration-smoke-example/tsconfig.json new file mode 100644 index 0000000000..a697519ed9 --- /dev/null +++ b/packages/plugins/examples/plugin-orchestration-smoke-example/tsconfig.json @@ -0,0 +1,27 @@ +{ + "compilerOptions": { + "target": "ES2022", + "module": "NodeNext", + "moduleResolution": "NodeNext", + "lib": [ + "ES2022", + "DOM" + ], + "jsx": "react-jsx", + "strict": true, + "skipLibCheck": true, + "declaration": true, + "declarationMap": true, + "sourceMap": true, + "outDir": "dist", + "rootDir": "." + }, + "include": [ + "src", + "tests" + ], + "exclude": [ + "dist", + "node_modules" + ] +} diff --git a/packages/plugins/examples/plugin-orchestration-smoke-example/vitest.config.ts b/packages/plugins/examples/plugin-orchestration-smoke-example/vitest.config.ts new file mode 100644 index 0000000000..649a293ef9 --- /dev/null +++ b/packages/plugins/examples/plugin-orchestration-smoke-example/vitest.config.ts @@ -0,0 +1,8 @@ +import { defineConfig } from "vitest/config"; + +export default defineConfig({ + test: { + include: ["tests/**/*.spec.ts"], + environment: "node", + }, +}); diff --git a/packages/plugins/sdk/README.md b/packages/plugins/sdk/README.md index d6424921fa..e785b3eee7 100644 --- a/packages/plugins/sdk/README.md +++ b/packages/plugins/sdk/README.md @@ -118,10 +118,13 @@ Subscribe in `setup` with `ctx.events.on(name, handler)` or `ctx.events.on(name, | `project.created`, `project.updated` | project | | `project.workspace_created`, `project.workspace_updated`, `project.workspace_deleted` | project_workspace | | `issue.created`, `issue.updated`, `issue.comment.created` | issue | +| `issue.document.created`, `issue.document.updated`, `issue.document.deleted` | issue | +| `issue.relations.updated`, `issue.checked_out`, `issue.released`, `issue.assignment_wakeup_requested` | issue | | `agent.created`, `agent.updated`, `agent.status_changed` | agent | | `agent.run.started`, `agent.run.finished`, `agent.run.failed`, `agent.run.cancelled` | run | | `goal.created`, `goal.updated` | goal | | `approval.created`, `approval.decided` | approval | +| `budget.incident.opened`, `budget.incident.resolved` | budget_incident | | `cost_event.created` | cost | | `activity.logged` | activity | @@ -301,18 +304,29 @@ Declare in `manifest.capabilities`. Grouped by scope: | | `project.workspaces.read` | | | `issues.read` | | | `issue.comments.read` | +| | `issue.documents.read` | +| | `issue.relations.read` | +| | `issue.subtree.read` | | | `agents.read` | | | `goals.read` | | | `goals.create` | | | `goals.update` | | | `activity.read` | | | `costs.read` | +| | `issues.orchestration.read` | +| | `database.namespace.read` | | | `issues.create` | | | `issues.update` | +| | `issues.checkout` | +| | `issues.wakeup` | | | `issue.comments.create` | +| | `issue.documents.write` | +| | `issue.relations.write` | | | `activity.log.write` | | | `metrics.write` | | | `telemetry.track` | +| | `database.namespace.migrate` | +| | `database.namespace.write` | | **Instance** | `instance.settings.register` | | | `plugin.state.read` | | | `plugin.state.write` | @@ -320,6 +334,7 @@ Declare in `manifest.capabilities`. Grouped by scope: | | `events.emit` | | | `jobs.schedule` | | | `webhooks.receive` | +| | `api.routes.register` | | | `http.outbound` | | | `secrets.read-ref` | | **Agent** | `agent.tools.register` | @@ -337,6 +352,144 @@ Declare in `manifest.capabilities`. Grouped by scope: Full list in code: import `PLUGIN_CAPABILITIES` from `@paperclipai/plugin-sdk`. +### Restricted Database Namespace + +Trusted orchestration plugins can declare a host-owned PostgreSQL namespace: + +```ts +database: { + migrationsDir: "migrations", + coreReadTables: ["issues"], +} +``` + +Declare `database.namespace.migrate` and `database.namespace.read`; add +`database.namespace.write` when the worker needs runtime writes. Migrations run +before worker startup, are checksum-recorded, and may create or alter objects +only inside the plugin namespace. Runtime `ctx.db.query()` allows `SELECT` from +`ctx.db.namespace` plus manifest-whitelisted `public` core tables. Runtime +`ctx.db.execute()` allows `INSERT`, `UPDATE`, and `DELETE` only against the +plugin namespace. + +### Scoped API Routes + +Manifest-declared `apiRoutes` expose JSON routes under +`/api/plugins/:pluginId/api/*` without letting a plugin claim core paths: + +```ts +apiRoutes: [ + { + routeKey: "initialize", + method: "POST", + path: "/issues/:issueId/smoke", + auth: "board-or-agent", + capability: "api.routes.register", + checkoutPolicy: "required-for-agent-in-progress", + companyResolution: { from: "issue", param: "issueId" }, + }, +] +``` + +Implement `onApiRequest(input)` in the worker to handle the route. The host +performs auth, company access, capability, route matching, and checkout policy +before dispatch. The worker receives route params, query, parsed JSON body, +sanitized headers, actor context, and `companyId`; responses are JSON `{ status?, +headers?, body? }`. + +## Issue Orchestration APIs + +Workflow plugins can use `ctx.issues` for orchestration-grade issue operations without importing host server internals. + +Expanded create/update fields include blockers, billing code, board or agent assignees, labels, namespaced plugin origins, request depth, and safe execution workspace fields: + +```ts +const child = await ctx.issues.create({ + companyId, + parentId: missionIssueId, + inheritExecutionWorkspaceFromIssueId: missionIssueId, + title: "Implement feature slice", + status: "todo", + assigneeAgentId: workerAgentId, + billingCode: "mission:alpha", + originKind: "plugin:paperclip.missions:feature", + originId: "mission-alpha:feature-1", + blockedByIssueIds: [planningIssueId], +}); +``` + +If `originKind` is omitted, the host stores `plugin:`. Plugins may use sub-kinds such as `plugin::feature`, but the host rejects attempts to set another plugin's namespace. + +Blocker relationships are also exposed as first-class helpers: + +```ts +const relations = await ctx.issues.relations.get(child.id, companyId); +await ctx.issues.relations.setBlockedBy(child.id, [planningIssueId], companyId); +await ctx.issues.relations.addBlockers(child.id, [validationIssueId], companyId); +await ctx.issues.relations.removeBlockers(child.id, [planningIssueId], companyId); +``` + +Subtree reads can include just the issue tree, or compact related data for orchestration dashboards: + +```ts +const subtree = await ctx.issues.getSubtree(missionIssueId, companyId, { + includeRoot: true, + includeRelations: true, + includeDocuments: true, + includeActiveRuns: true, + includeAssignees: true, +}); +``` + +Agent-run actions can assert checkout ownership before mutating in-progress work: + +```ts +await ctx.issues.assertCheckoutOwner({ + issueId, + companyId, + actorAgentId: runCtx.agentId, + actorRunId: runCtx.runId, +}); +``` + +Plugins can request assignment wakeups through the host so budget stops, execution locks, blocker checks, and heartbeat policy still apply: + +```ts +await ctx.issues.requestWakeup(child.id, companyId, { + reason: "mission_advance", + contextSource: "missions.advance", +}); + +await ctx.issues.requestWakeups([featureIssueId, validationIssueId], companyId, { + reason: "mission_advance", + contextSource: "missions.advance", + idempotencyKeyPrefix: `mission:${missionIssueId}:advance`, +}); +``` + +Use `ctx.issues.summaries.getOrchestration()` when a workflow needs compact reads across a root issue or subtree: + +```ts +const summary = await ctx.issues.summaries.getOrchestration({ + issueId: missionIssueId, + companyId, + includeSubtree: true, + billingCode: "mission:alpha", +}); +``` + +Required capabilities: + +| API | Capability | +|-----|------------| +| `ctx.issues.relations.get` | `issue.relations.read` | +| `ctx.issues.relations.setBlockedBy` / `addBlockers` / `removeBlockers` | `issue.relations.write` | +| `ctx.issues.getSubtree` | `issue.subtree.read` | +| `ctx.issues.assertCheckoutOwner` | `issues.checkout` | +| `ctx.issues.requestWakeup` / `requestWakeups` | `issues.wakeup` | +| `ctx.issues.summaries.getOrchestration` | `issues.orchestration.read` | + +Plugin-originated mutations are logged with `actorType: "plugin"` and details fields `sourcePluginId`, `sourcePluginKey`, `initiatingActorType`, `initiatingActorId`, and `initiatingRunId` when a user or agent run initiated the plugin work. + ## UI quick start ```tsx diff --git a/packages/plugins/sdk/src/define-plugin.ts b/packages/plugins/sdk/src/define-plugin.ts index 43fefdd281..fc0dd40874 100644 --- a/packages/plugins/sdk/src/define-plugin.ts +++ b/packages/plugins/sdk/src/define-plugin.ts @@ -107,6 +107,30 @@ export interface PluginWebhookInput { requestId: string; } +export interface PluginApiRequestInput { + routeKey: string; + method: string; + path: string; + params: Record; + query: Record; + body: unknown; + actor: { + actorType: "user" | "agent"; + actorId: string; + agentId?: string | null; + userId?: string | null; + runId?: string | null; + }; + companyId: string; + headers: Record; +} + +export interface PluginApiResponse { + status?: number; + headers?: Record; + body?: unknown; +} + // --------------------------------------------------------------------------- // Plugin definition // --------------------------------------------------------------------------- @@ -197,6 +221,13 @@ export interface PluginDefinition { * @see PLUGIN_SPEC.md §13.7 — `handleWebhook` */ onWebhook?(input: PluginWebhookInput): Promise; + + /** + * Called for manifest-declared scoped JSON API routes under + * `/api/plugins/:pluginId/api/*` after the host has enforced auth, company + * access, capabilities, and checkout policy. + */ + onApiRequest?(input: PluginApiRequestInput): Promise; } // --------------------------------------------------------------------------- diff --git a/packages/plugins/sdk/src/host-client-factory.ts b/packages/plugins/sdk/src/host-client-factory.ts index 8b98cc87a3..4041745921 100644 --- a/packages/plugins/sdk/src/host-client-factory.ts +++ b/packages/plugins/sdk/src/host-client-factory.ts @@ -97,6 +97,13 @@ export interface HostServices { delete(params: WorkerToHostMethods["state.delete"][0]): Promise; }; + /** Provides restricted plugin database namespace methods. */ + db: { + namespace(params: WorkerToHostMethods["db.namespace"][0]): Promise; + query(params: WorkerToHostMethods["db.query"][0]): Promise; + execute(params: WorkerToHostMethods["db.execute"][0]): Promise; + }; + /** Provides `entities.upsert`, `entities.list`. */ entities: { upsert(params: WorkerToHostMethods["entities.upsert"][0]): Promise; @@ -160,12 +167,21 @@ export interface HostServices { getWorkspaceForIssue(params: WorkerToHostMethods["projects.getWorkspaceForIssue"][0]): Promise; }; - /** Provides `issues.list`, `issues.get`, `issues.create`, `issues.update`, `issues.listComments`, `issues.createComment`. */ + /** Provides issue read/write, relation, checkout, wakeup, summary, comment methods. */ issues: { list(params: WorkerToHostMethods["issues.list"][0]): Promise; get(params: WorkerToHostMethods["issues.get"][0]): Promise; create(params: WorkerToHostMethods["issues.create"][0]): Promise; update(params: WorkerToHostMethods["issues.update"][0]): Promise; + getRelations(params: WorkerToHostMethods["issues.relations.get"][0]): Promise; + setBlockedBy(params: WorkerToHostMethods["issues.relations.setBlockedBy"][0]): Promise; + addBlockers(params: WorkerToHostMethods["issues.relations.addBlockers"][0]): Promise; + removeBlockers(params: WorkerToHostMethods["issues.relations.removeBlockers"][0]): Promise; + assertCheckoutOwner(params: WorkerToHostMethods["issues.assertCheckoutOwner"][0]): Promise; + getSubtree(params: WorkerToHostMethods["issues.getSubtree"][0]): Promise; + requestWakeup(params: WorkerToHostMethods["issues.requestWakeup"][0]): Promise; + requestWakeups(params: WorkerToHostMethods["issues.requestWakeups"][0]): Promise; + getOrchestrationSummary(params: WorkerToHostMethods["issues.summaries.getOrchestration"][0]): Promise; listComments(params: WorkerToHostMethods["issues.listComments"][0]): Promise; createComment(params: WorkerToHostMethods["issues.createComment"][0]): Promise; }; @@ -269,6 +285,10 @@ const METHOD_CAPABILITY_MAP: Record { + return services.db.namespace(params); + }), + "db.query": gated("db.query", async (params) => { + return services.db.query(params); + }), + "db.execute": gated("db.execute", async (params) => { + return services.db.execute(params); + }), + // Entities "entities.upsert": gated("entities.upsert", async (params) => { return services.entities.upsert(params); @@ -503,6 +542,33 @@ export function createHostClientHandlers( "issues.update": gated("issues.update", async (params) => { return services.issues.update(params); }), + "issues.relations.get": gated("issues.relations.get", async (params) => { + return services.issues.getRelations(params); + }), + "issues.relations.setBlockedBy": gated("issues.relations.setBlockedBy", async (params) => { + return services.issues.setBlockedBy(params); + }), + "issues.relations.addBlockers": gated("issues.relations.addBlockers", async (params) => { + return services.issues.addBlockers(params); + }), + "issues.relations.removeBlockers": gated("issues.relations.removeBlockers", async (params) => { + return services.issues.removeBlockers(params); + }), + "issues.assertCheckoutOwner": gated("issues.assertCheckoutOwner", async (params) => { + return services.issues.assertCheckoutOwner(params); + }), + "issues.getSubtree": gated("issues.getSubtree", async (params) => { + return services.issues.getSubtree(params); + }), + "issues.requestWakeup": gated("issues.requestWakeup", async (params) => { + return services.issues.requestWakeup(params); + }), + "issues.requestWakeups": gated("issues.requestWakeups", async (params) => { + return services.issues.requestWakeups(params); + }), + "issues.summaries.getOrchestration": gated("issues.summaries.getOrchestration", async (params) => { + return services.issues.getOrchestrationSummary(params); + }), "issues.listComments": gated("issues.listComments", async (params) => { return services.issues.listComments(params); }), diff --git a/packages/plugins/sdk/src/index.ts b/packages/plugins/sdk/src/index.ts index 69e22c4f11..8232621c4d 100644 --- a/packages/plugins/sdk/src/index.ts +++ b/packages/plugins/sdk/src/index.ts @@ -95,6 +95,8 @@ export type { PluginHealthDiagnostics, PluginConfigValidationResult, PluginWebhookInput, + PluginApiRequestInput, + PluginApiResponse, } from "./define-plugin.js"; export type { TestHarness, @@ -171,6 +173,22 @@ export type { PluginProjectsClient, PluginCompaniesClient, PluginIssuesClient, + PluginIssueMutationActor, + PluginIssueRelationsClient, + PluginIssueRelationSummary, + PluginIssueCheckoutOwnership, + PluginIssueWakeupResult, + PluginIssueWakeupBatchResult, + PluginIssueRunSummary, + PluginIssueApprovalSummary, + PluginIssueCostSummary, + PluginBudgetIncidentSummary, + PluginIssueInvocationBlockSummary, + PluginIssueOrchestrationSummary, + PluginIssueSubtreeOptions, + PluginIssueAssigneeSummary, + PluginIssueSubtree, + PluginIssueSummariesClient, PluginAgentsClient, PluginAgentSessionsClient, AgentSession, @@ -203,8 +221,10 @@ export type { Project, Issue, IssueComment, + IssueDocumentSummary, Agent, Goal, + PluginDatabaseClient, } from "./types.js"; // Manifest and constant types re-exported from @paperclipai/shared @@ -221,7 +241,12 @@ export type { PluginLauncherRenderDeclaration, PluginLauncherDeclaration, PluginMinimumHostVersion, + PluginDatabaseDeclaration, + PluginApiRouteCompanyResolution, + PluginApiRouteDeclaration, PluginRecord, + PluginDatabaseNamespaceRecord, + PluginMigrationRecord, PluginConfig, JsonSchema, PluginStatus, @@ -238,6 +263,13 @@ export type { PluginJobRunStatus, PluginJobRunTrigger, PluginWebhookDeliveryStatus, + PluginDatabaseCoreReadTable, + PluginDatabaseMigrationStatus, + PluginDatabaseNamespaceMode, + PluginDatabaseNamespaceStatus, + PluginApiRouteAuthMode, + PluginApiRouteCheckoutPolicy, + PluginApiRouteMethod, PluginEventType, PluginBridgeErrorCode, } from "./types.js"; diff --git a/packages/plugins/sdk/src/protocol.ts b/packages/plugins/sdk/src/protocol.ts index 9d81eb8af7..f96db65bbb 100644 --- a/packages/plugins/sdk/src/protocol.ts +++ b/packages/plugins/sdk/src/protocol.ts @@ -34,6 +34,12 @@ export type { PluginLauncherRenderContextSnapshot } from "@paperclipai/shared"; import type { PluginEvent, + PluginIssueCheckoutOwnership, + PluginIssueOrchestrationSummary, + PluginIssueRelationSummary, + PluginIssueSubtree, + PluginIssueWakeupBatchResult, + PluginIssueWakeupResult, PluginJobContext, PluginWorkspace, ToolRunContext, @@ -41,6 +47,8 @@ import type { } from "./types.js"; import type { PluginHealthDiagnostics, + PluginApiRequestInput, + PluginApiResponse, PluginConfigValidationResult, PluginWebhookInput, } from "./define-plugin.js"; @@ -219,6 +227,8 @@ export interface InitializeParams { }; /** Host API version. */ apiVersion: number; + /** Host-derived plugin database namespace, when the manifest declares database access. */ + databaseNamespace?: string | null; } /** @@ -374,6 +384,8 @@ export interface HostToWorkerMethods { runJob: [params: RunJobParams, result: void]; /** @see PLUGIN_SPEC.md §13.7 */ handleWebhook: [params: PluginWebhookInput, result: void]; + /** Scoped plugin API route dispatch. */ + handleApiRequest: [params: PluginApiRequestInput, result: PluginApiResponse]; /** @see PLUGIN_SPEC.md §13.8 */ getData: [params: GetDataParams, result: unknown]; /** @see PLUGIN_SPEC.md §13.9 */ @@ -399,6 +411,7 @@ export const HOST_TO_WORKER_OPTIONAL_METHODS: readonly HostToWorkerMethodName[] "onEvent", "runJob", "handleWebhook", + "handleApiRequest", "getData", "performAction", "executeTool", @@ -432,6 +445,20 @@ export interface WorkerToHostMethods { result: void, ]; + // Restricted plugin database namespace + "db.namespace": [ + params: Record, + result: string, + ]; + "db.query": [ + params: { sql: string; params?: unknown[] }, + result: unknown[], + ]; + "db.execute": [ + params: { sql: string; params?: unknown[] }, + result: { rowCount: number }, + ]; + // Entities "entities.upsert": [ params: { @@ -569,6 +596,8 @@ export interface WorkerToHostMethods { companyId: string; projectId?: string; assigneeAgentId?: string; + originKind?: string; + originId?: string; status?: string; limit?: number; offset?: number; @@ -588,8 +617,23 @@ export interface WorkerToHostMethods { inheritExecutionWorkspaceFromIssueId?: string; title: string; description?: string; + status?: string; priority?: string; assigneeAgentId?: string; + assigneeUserId?: string | null; + requestDepth?: number; + billingCode?: string | null; + originKind?: string | null; + originId?: string | null; + originRunId?: string | null; + blockedByIssueIds?: string[]; + labelIds?: string[]; + executionWorkspaceId?: string | null; + executionWorkspacePreference?: string | null; + executionWorkspaceSettings?: Record | null; + actorAgentId?: string | null; + actorUserId?: string | null; + actorRunId?: string | null; }, result: Issue, ]; @@ -601,6 +645,99 @@ export interface WorkerToHostMethods { }, result: Issue, ]; + "issues.relations.get": [ + params: { issueId: string; companyId: string }, + result: PluginIssueRelationSummary, + ]; + "issues.relations.setBlockedBy": [ + params: { + issueId: string; + companyId: string; + blockedByIssueIds: string[]; + actorAgentId?: string | null; + actorUserId?: string | null; + actorRunId?: string | null; + }, + result: PluginIssueRelationSummary, + ]; + "issues.relations.addBlockers": [ + params: { + issueId: string; + companyId: string; + blockerIssueIds: string[]; + actorAgentId?: string | null; + actorUserId?: string | null; + actorRunId?: string | null; + }, + result: PluginIssueRelationSummary, + ]; + "issues.relations.removeBlockers": [ + params: { + issueId: string; + companyId: string; + blockerIssueIds: string[]; + actorAgentId?: string | null; + actorUserId?: string | null; + actorRunId?: string | null; + }, + result: PluginIssueRelationSummary, + ]; + "issues.assertCheckoutOwner": [ + params: { + issueId: string; + companyId: string; + actorAgentId: string; + actorRunId: string; + }, + result: PluginIssueCheckoutOwnership, + ]; + "issues.getSubtree": [ + params: { + issueId: string; + companyId: string; + includeRoot?: boolean; + includeRelations?: boolean; + includeDocuments?: boolean; + includeActiveRuns?: boolean; + includeAssignees?: boolean; + }, + result: PluginIssueSubtree, + ]; + "issues.requestWakeup": [ + params: { + issueId: string; + companyId: string; + reason?: string; + contextSource?: string; + idempotencyKey?: string | null; + actorAgentId?: string | null; + actorUserId?: string | null; + actorRunId?: string | null; + }, + result: PluginIssueWakeupResult, + ]; + "issues.requestWakeups": [ + params: { + issueIds: string[]; + companyId: string; + reason?: string; + contextSource?: string; + idempotencyKeyPrefix?: string | null; + actorAgentId?: string | null; + actorUserId?: string | null; + actorRunId?: string | null; + }, + result: PluginIssueWakeupBatchResult[], + ]; + "issues.summaries.getOrchestration": [ + params: { + issueId: string; + companyId: string; + includeSubtree?: boolean; + billingCode?: string | null; + }, + result: PluginIssueOrchestrationSummary, + ]; "issues.listComments": [ params: { issueId: string; companyId: string }, result: IssueComment[], diff --git a/packages/plugins/sdk/src/testing.ts b/packages/plugins/sdk/src/testing.ts index 41e91d542f..0a04164309 100644 --- a/packages/plugins/sdk/src/testing.ts +++ b/packages/plugins/sdk/src/testing.ts @@ -3,10 +3,12 @@ import type { PaperclipPluginManifestV1, PluginCapability, PluginEventType, + PluginIssueOriginKind, Company, Project, Issue, IssueComment, + IssueDocument, Agent, Goal, } from "@paperclipai/shared"; @@ -72,6 +74,8 @@ export interface TestHarness { activity: Array<{ message: string; entityType?: string; entityId?: string; metadata?: Record }>; metrics: Array<{ name: string; value: number; tags?: Record }>; telemetry: Array<{ eventName: string; dimensions?: Record }>; + dbQueries: Array<{ sql: string; params?: unknown[] }>; + dbExecutes: Array<{ sql: string; params?: unknown[] }>; } type EventRegistration = { @@ -134,6 +138,8 @@ export function createTestHarness(options: TestHarnessOptions): TestHarness { const activity: TestHarness["activity"] = []; const metrics: TestHarness["metrics"] = []; const telemetry: TestHarness["telemetry"] = []; + const dbQueries: TestHarness["dbQueries"] = []; + const dbExecutes: TestHarness["dbExecutes"] = []; const state = new Map(); const entities = new Map(); @@ -141,7 +147,9 @@ export function createTestHarness(options: TestHarnessOptions): TestHarness { const companies = new Map(); const projects = new Map(); const issues = new Map(); + const blockedByIssueIds = new Map(); const issueComments = new Map(); + const issueDocuments = new Map(); const agents = new Map(); const goals = new Map(); const projectWorkspaces = new Map(); @@ -156,6 +164,42 @@ export function createTestHarness(options: TestHarnessOptions): TestHarness { const actionHandlers = new Map) => Promise>(); const toolHandlers = new Map Promise>(); + function issueRelationSummary(issueId: string) { + const issue = issues.get(issueId); + if (!issue) throw new Error(`Issue not found: ${issueId}`); + const summarize = (candidateId: string) => { + const related = issues.get(candidateId); + if (!related || related.companyId !== issue.companyId) return null; + return { + id: related.id, + identifier: related.identifier, + title: related.title, + status: related.status, + priority: related.priority, + assigneeAgentId: related.assigneeAgentId, + assigneeUserId: related.assigneeUserId, + }; + }; + const blockedBy = (blockedByIssueIds.get(issueId) ?? []) + .map(summarize) + .filter((value): value is NonNullable => value !== null); + const blocks = [...blockedByIssueIds.entries()] + .filter(([, blockers]) => blockers.includes(issueId)) + .map(([blockedIssueId]) => summarize(blockedIssueId)) + .filter((value): value is NonNullable => value !== null); + return { blockedBy, blocks }; + } + + const defaultPluginOriginKind: PluginIssueOriginKind = `plugin:${manifest.id}`; + function normalizePluginOriginKind(originKind: unknown = defaultPluginOriginKind): PluginIssueOriginKind { + if (originKind == null || originKind === "") return defaultPluginOriginKind; + if (typeof originKind !== "string") throw new Error("Plugin issue originKind must be a string"); + if (originKind === defaultPluginOriginKind || originKind.startsWith(`${defaultPluginOriginKind}:`)) { + return originKind as PluginIssueOriginKind; + } + throw new Error(`Plugin may only use originKind values under ${defaultPluginOriginKind}`); + } + const ctx: PluginContext = { manifest, config: { @@ -195,6 +239,19 @@ export function createTestHarness(options: TestHarnessOptions): TestHarness { launchers.set(launcher.id, launcher); }, }, + db: { + namespace: manifest.database ? `test_${manifest.id.replace(/[^a-z0-9_]+/g, "_")}` : "", + async query(sql, params) { + requireCapability(manifest, capabilitySet, "database.namespace.read"); + dbQueries.push({ sql, params }); + return []; + }, + async execute(sql, params) { + requireCapability(manifest, capabilitySet, "database.namespace.write"); + dbExecutes.push({ sql, params }); + return { rowCount: 0 }; + }, + }, http: { async fetch(url, init) { requireCapability(manifest, capabilitySet, "http.outbound"); @@ -338,6 +395,11 @@ export function createTestHarness(options: TestHarnessOptions): TestHarness { out = out.filter((issue) => issue.companyId === companyId); if (input?.projectId) out = out.filter((issue) => issue.projectId === input.projectId); if (input?.assigneeAgentId) out = out.filter((issue) => issue.assigneeAgentId === input.assigneeAgentId); + if (input?.originKind) { + if (input.originKind.startsWith("plugin:")) normalizePluginOriginKind(input.originKind); + out = out.filter((issue) => issue.originKind === input.originKind); + } + if (input?.originId) out = out.filter((issue) => issue.originId === input.originId); if (input?.status) out = out.filter((issue) => issue.status === input.status); if (input?.offset) out = out.slice(input.offset); if (input?.limit) out = out.slice(0, input.limit); @@ -360,10 +422,10 @@ export function createTestHarness(options: TestHarnessOptions): TestHarness { parentId: input.parentId ?? null, title: input.title, description: input.description ?? null, - status: "todo", + status: input.status ?? "todo", priority: input.priority ?? "medium", assigneeAgentId: input.assigneeAgentId ?? null, - assigneeUserId: null, + assigneeUserId: input.assigneeUserId ?? null, checkoutRunId: null, executionRunId: null, executionAgentNameKey: null, @@ -372,12 +434,15 @@ export function createTestHarness(options: TestHarnessOptions): TestHarness { createdByUserId: null, issueNumber: null, identifier: null, - requestDepth: 0, - billingCode: null, + originKind: normalizePluginOriginKind(input.originKind), + originId: input.originId ?? null, + originRunId: input.originRunId ?? null, + requestDepth: input.requestDepth ?? 0, + billingCode: input.billingCode ?? null, assigneeAdapterOverrides: null, - executionWorkspaceId: null, - executionWorkspacePreference: null, - executionWorkspaceSettings: null, + executionWorkspaceId: input.executionWorkspaceId ?? null, + executionWorkspacePreference: input.executionWorkspacePreference ?? null, + executionWorkspaceSettings: input.executionWorkspaceSettings ?? null, startedAt: null, completedAt: null, cancelledAt: null, @@ -386,20 +451,75 @@ export function createTestHarness(options: TestHarnessOptions): TestHarness { updatedAt: now, }; issues.set(record.id, record); + if (input.blockedByIssueIds) blockedByIssueIds.set(record.id, [...new Set(input.blockedByIssueIds)]); return record; }, async update(issueId, patch, companyId) { requireCapability(manifest, capabilitySet, "issues.update"); const record = issues.get(issueId); if (!isInCompany(record, companyId)) throw new Error(`Issue not found: ${issueId}`); + const { blockedByIssueIds: nextBlockedByIssueIds, ...issuePatch } = patch; + if (issuePatch.originKind !== undefined) { + issuePatch.originKind = normalizePluginOriginKind(issuePatch.originKind); + } const updated: Issue = { ...record, - ...patch, + ...issuePatch, updatedAt: new Date(), }; issues.set(issueId, updated); + if (nextBlockedByIssueIds !== undefined) { + blockedByIssueIds.set(issueId, [...new Set(nextBlockedByIssueIds)]); + } return updated; }, + async assertCheckoutOwner(input) { + requireCapability(manifest, capabilitySet, "issues.checkout"); + const record = issues.get(input.issueId); + if (!isInCompany(record, input.companyId)) throw new Error(`Issue not found: ${input.issueId}`); + if ( + record.status !== "in_progress" || + record.assigneeAgentId !== input.actorAgentId || + (record.checkoutRunId !== null && record.checkoutRunId !== input.actorRunId) + ) { + throw new Error("Issue run ownership conflict"); + } + return { + issueId: record.id, + status: record.status, + assigneeAgentId: record.assigneeAgentId, + checkoutRunId: record.checkoutRunId, + adoptedFromRunId: null, + }; + }, + async requestWakeup(issueId, companyId) { + requireCapability(manifest, capabilitySet, "issues.wakeup"); + const record = issues.get(issueId); + if (!isInCompany(record, companyId)) throw new Error(`Issue not found: ${issueId}`); + if (!record.assigneeAgentId) throw new Error("Issue has no assigned agent to wake"); + if (["backlog", "done", "cancelled"].includes(record.status)) { + throw new Error(`Issue is not wakeable in status: ${record.status}`); + } + const unresolved = issueRelationSummary(issueId).blockedBy.filter((blocker) => blocker.status !== "done"); + if (unresolved.length > 0) throw new Error("Issue is blocked by unresolved blockers"); + return { queued: true, runId: randomUUID() }; + }, + async requestWakeups(issueIds, companyId) { + requireCapability(manifest, capabilitySet, "issues.wakeup"); + const results = []; + for (const issueId of issueIds) { + const record = issues.get(issueId); + if (!isInCompany(record, companyId)) throw new Error(`Issue not found: ${issueId}`); + if (!record.assigneeAgentId) throw new Error("Issue has no assigned agent to wake"); + if (["backlog", "done", "cancelled"].includes(record.status)) { + throw new Error(`Issue is not wakeable in status: ${record.status}`); + } + const unresolved = issueRelationSummary(issueId).blockedBy.filter((blocker) => blocker.status !== "done"); + if (unresolved.length > 0) throw new Error("Issue is blocked by unresolved blockers"); + results.push({ issueId, queued: true, runId: randomUUID() }); + } + return results; + }, async listComments(issueId, companyId) { requireCapability(manifest, capabilitySet, "issue.comments.read"); if (!isInCompany(issues.get(issueId), companyId)) return []; @@ -431,12 +551,14 @@ export function createTestHarness(options: TestHarnessOptions): TestHarness { async list(issueId, companyId) { requireCapability(manifest, capabilitySet, "issue.documents.read"); if (!isInCompany(issues.get(issueId), companyId)) return []; - return []; + return [...issueDocuments.values()] + .filter((document) => document.issueId === issueId && document.companyId === companyId) + .map(({ body: _body, ...summary }) => summary); }, - async get(issueId, _key, companyId) { + async get(issueId, key, companyId) { requireCapability(manifest, capabilitySet, "issue.documents.read"); if (!isInCompany(issues.get(issueId), companyId)) return null; - return null; + return issueDocuments.get(`${issueId}|${key}`) ?? null; }, async upsert(input) { requireCapability(manifest, capabilitySet, "issue.documents.write"); @@ -444,7 +566,27 @@ export function createTestHarness(options: TestHarnessOptions): TestHarness { if (!isInCompany(parentIssue, input.companyId)) { throw new Error(`Issue not found: ${input.issueId}`); } - throw new Error("documents.upsert is not implemented in test context"); + const now = new Date(); + const existing = issueDocuments.get(`${input.issueId}|${input.key}`); + const document: IssueDocument = { + id: existing?.id ?? randomUUID(), + companyId: input.companyId, + issueId: input.issueId, + key: input.key, + title: input.title ?? existing?.title ?? null, + format: "markdown", + latestRevisionId: randomUUID(), + latestRevisionNumber: (existing?.latestRevisionNumber ?? 0) + 1, + createdByAgentId: existing?.createdByAgentId ?? null, + createdByUserId: existing?.createdByUserId ?? null, + updatedByAgentId: null, + updatedByUserId: null, + createdAt: existing?.createdAt ?? now, + updatedAt: now, + body: input.body, + }; + issueDocuments.set(`${input.issueId}|${input.key}`, document); + return document; }, async delete(issueId, _key, companyId) { requireCapability(manifest, capabilitySet, "issue.documents.write"); @@ -452,6 +594,104 @@ export function createTestHarness(options: TestHarnessOptions): TestHarness { if (!isInCompany(parentIssue, companyId)) { throw new Error(`Issue not found: ${issueId}`); } + issueDocuments.delete(`${issueId}|${_key}`); + }, + }, + relations: { + async get(issueId, companyId) { + requireCapability(manifest, capabilitySet, "issue.relations.read"); + if (!isInCompany(issues.get(issueId), companyId)) throw new Error(`Issue not found: ${issueId}`); + return issueRelationSummary(issueId); + }, + async setBlockedBy(issueId, nextBlockedByIssueIds, companyId) { + requireCapability(manifest, capabilitySet, "issue.relations.write"); + if (!isInCompany(issues.get(issueId), companyId)) throw new Error(`Issue not found: ${issueId}`); + blockedByIssueIds.set(issueId, [...new Set(nextBlockedByIssueIds)]); + return issueRelationSummary(issueId); + }, + async addBlockers(issueId, blockerIssueIds, companyId) { + requireCapability(manifest, capabilitySet, "issue.relations.write"); + if (!isInCompany(issues.get(issueId), companyId)) throw new Error(`Issue not found: ${issueId}`); + const next = new Set(blockedByIssueIds.get(issueId) ?? []); + for (const blockerIssueId of blockerIssueIds) next.add(blockerIssueId); + blockedByIssueIds.set(issueId, [...next]); + return issueRelationSummary(issueId); + }, + async removeBlockers(issueId, blockerIssueIds, companyId) { + requireCapability(manifest, capabilitySet, "issue.relations.write"); + if (!isInCompany(issues.get(issueId), companyId)) throw new Error(`Issue not found: ${issueId}`); + const removals = new Set(blockerIssueIds); + blockedByIssueIds.set( + issueId, + (blockedByIssueIds.get(issueId) ?? []).filter((blockerIssueId) => !removals.has(blockerIssueId)), + ); + return issueRelationSummary(issueId); + }, + }, + async getSubtree(issueId, companyId, options) { + requireCapability(manifest, capabilitySet, "issue.subtree.read"); + const root = issues.get(issueId); + if (!isInCompany(root, companyId)) throw new Error(`Issue not found: ${issueId}`); + const includeRoot = options?.includeRoot !== false; + const allIds = [root.id]; + let frontier = [root.id]; + while (frontier.length > 0) { + const children = [...issues.values()] + .filter((issue) => issue.companyId === companyId && frontier.includes(issue.parentId ?? "")) + .map((issue) => issue.id) + .filter((id) => !allIds.includes(id)); + allIds.push(...children); + frontier = children; + } + const issueIds = includeRoot ? allIds : allIds.filter((id) => id !== root.id); + const subtreeIssues = issueIds.map((id) => issues.get(id)).filter((candidate): candidate is Issue => Boolean(candidate)); + return { + rootIssueId: root.id, + companyId, + issueIds, + issues: subtreeIssues, + ...(options?.includeRelations + ? { relations: Object.fromEntries(issueIds.map((id) => [id, issueRelationSummary(id)])) } + : {}), + ...(options?.includeDocuments ? { documents: Object.fromEntries(issueIds.map((id) => [id, []])) } : {}), + ...(options?.includeActiveRuns ? { activeRuns: Object.fromEntries(issueIds.map((id) => [id, []])) } : {}), + ...(options?.includeAssignees ? { assignees: {} } : {}), + }; + }, + summaries: { + async getOrchestration(input) { + requireCapability(manifest, capabilitySet, "issues.orchestration.read"); + const root = issues.get(input.issueId); + if (!isInCompany(root, input.companyId)) throw new Error(`Issue not found: ${input.issueId}`); + const subtreeIssueIds = [root.id]; + if (input.includeSubtree) { + let frontier = [root.id]; + while (frontier.length > 0) { + const children = [...issues.values()] + .filter((issue) => issue.companyId === input.companyId && frontier.includes(issue.parentId ?? "")) + .map((issue) => issue.id) + .filter((id) => !subtreeIssueIds.includes(id)); + subtreeIssueIds.push(...children); + frontier = children; + } + } + return { + issueId: root.id, + companyId: input.companyId, + subtreeIssueIds, + relations: Object.fromEntries(subtreeIssueIds.map((id) => [id, issueRelationSummary(id)])), + approvals: [], + runs: [], + costs: { + costCents: 0, + inputTokens: 0, + cachedInputTokens: 0, + outputTokens: 0, + billingCode: input.billingCode ?? null, + }, + openBudgetIncidents: [], + invocationBlocks: [], + }; }, }, }, @@ -660,7 +900,12 @@ export function createTestHarness(options: TestHarnessOptions): TestHarness { seed(input) { for (const row of input.companies ?? []) companies.set(row.id, row); for (const row of input.projects ?? []) projects.set(row.id, row); - for (const row of input.issues ?? []) issues.set(row.id, row); + for (const row of input.issues ?? []) { + issues.set(row.id, row); + if (row.blockedBy) { + blockedByIssueIds.set(row.id, row.blockedBy.map((blocker) => blocker.id)); + } + } for (const row of input.issueComments ?? []) { const list = issueComments.get(row.issueId) ?? []; list.push(row); @@ -738,6 +983,8 @@ export function createTestHarness(options: TestHarnessOptions): TestHarness { activity, metrics, telemetry, + dbQueries, + dbExecutes, }; return harness; diff --git a/packages/plugins/sdk/src/types.ts b/packages/plugins/sdk/src/types.ts index f8a6ca4f9f..4639b73fea 100644 --- a/packages/plugins/sdk/src/types.ts +++ b/packages/plugins/sdk/src/types.ts @@ -21,6 +21,8 @@ import type { IssueComment, IssueDocument, IssueDocumentSummary, + IssueRelationIssueSummary, + PluginIssueOriginKind, Agent, Goal, } from "@paperclipai/shared"; @@ -40,7 +42,12 @@ export type { PluginLauncherRenderDeclaration, PluginLauncherDeclaration, PluginMinimumHostVersion, + PluginDatabaseDeclaration, + PluginApiRouteDeclaration, + PluginApiRouteCompanyResolution, PluginRecord, + PluginDatabaseNamespaceRecord, + PluginMigrationRecord, PluginConfig, JsonSchema, PluginStatus, @@ -57,6 +64,13 @@ export type { PluginJobRunStatus, PluginJobRunTrigger, PluginWebhookDeliveryStatus, + PluginDatabaseCoreReadTable, + PluginDatabaseMigrationStatus, + PluginDatabaseNamespaceMode, + PluginDatabaseNamespaceStatus, + PluginApiRouteAuthMode, + PluginApiRouteCheckoutPolicy, + PluginApiRouteMethod, PluginEventType, PluginBridgeErrorCode, Company, @@ -65,6 +79,8 @@ export type { IssueComment, IssueDocument, IssueDocumentSummary, + IssueRelationIssueSummary, + PluginIssueOriginKind, Agent, Goal, } from "@paperclipai/shared"; @@ -407,6 +423,17 @@ export interface PluginLaunchersClient { register(launcher: PluginLauncherRegistration): void; } +export interface PluginDatabaseClient { + /** Host-derived PostgreSQL schema name for this plugin's namespace. */ + namespace: string; + + /** Run a restricted SELECT against the plugin namespace and whitelisted core tables. */ + query>(sql: string, params?: unknown[]): Promise; + + /** Run a restricted INSERT, UPDATE, or DELETE against the plugin namespace. */ + execute(sql: string, params?: unknown[]): Promise<{ rowCount: number }>; +} + /** * `ctx.http` — make outbound HTTP requests. * @@ -867,6 +894,178 @@ export interface PluginIssueDocumentsClient { delete(issueId: string, key: string, companyId: string): Promise; } +export interface PluginIssueMutationActor { + /** Agent that initiated the plugin operation, when the plugin is acting from an agent run. */ + actorAgentId?: string | null; + /** Board/user that initiated the plugin operation, when known. */ + actorUserId?: string | null; + /** Heartbeat run that initiated the operation. Required for checkout-aware agent actions. */ + actorRunId?: string | null; +} + +export interface PluginIssueRelationSummary { + blockedBy: IssueRelationIssueSummary[]; + blocks: IssueRelationIssueSummary[]; +} + +export interface PluginIssueRelationsClient { + /** Read blocker relationships for an issue. Requires `issue.relations.read`. */ + get(issueId: string, companyId: string): Promise; + /** Replace the issue's blocked-by relation set. Requires `issue.relations.write`. */ + setBlockedBy( + issueId: string, + blockedByIssueIds: string[], + companyId: string, + actor?: PluginIssueMutationActor, + ): Promise; + /** Add one or more blockers while preserving existing blockers. Requires `issue.relations.write`. */ + addBlockers( + issueId: string, + blockerIssueIds: string[], + companyId: string, + actor?: PluginIssueMutationActor, + ): Promise; + /** Remove one or more blockers while preserving all other blockers. Requires `issue.relations.write`. */ + removeBlockers( + issueId: string, + blockerIssueIds: string[], + companyId: string, + actor?: PluginIssueMutationActor, + ): Promise; +} + +export interface PluginIssueCheckoutOwnership { + issueId: string; + status: Issue["status"]; + assigneeAgentId: string | null; + checkoutRunId: string | null; + adoptedFromRunId: string | null; +} + +export interface PluginIssueWakeupResult { + queued: boolean; + runId: string | null; +} + +export interface PluginIssueWakeupBatchResult { + issueId: string; + queued: boolean; + runId: string | null; +} + +export interface PluginIssueRunSummary { + id: string; + issueId: string | null; + agentId: string; + status: string; + invocationSource: string; + triggerDetail: string | null; + startedAt: string | null; + finishedAt: string | null; + error: string | null; + createdAt: string; +} + +export interface PluginIssueApprovalSummary { + issueId: string; + id: string; + type: string; + status: string; + requestedByAgentId: string | null; + requestedByUserId: string | null; + decidedByUserId: string | null; + decidedAt: string | null; + createdAt: string; +} + +export interface PluginIssueCostSummary { + costCents: number; + inputTokens: number; + cachedInputTokens: number; + outputTokens: number; + billingCode: string | null; +} + +export interface PluginBudgetIncidentSummary { + id: string; + scopeType: string; + scopeId: string; + metric: string; + windowKind: string; + thresholdType: string; + amountLimit: number; + amountObserved: number; + status: string; + approvalId: string | null; + createdAt: string; +} + +export interface PluginIssueInvocationBlockSummary { + issueId: string; + agentId: string; + scopeType: "company" | "agent" | "project"; + scopeId: string; + scopeName: string; + reason: string; +} + +export interface PluginIssueOrchestrationSummary { + issueId: string; + companyId: string; + subtreeIssueIds: string[]; + relations: Record; + approvals: PluginIssueApprovalSummary[]; + runs: PluginIssueRunSummary[]; + costs: PluginIssueCostSummary; + openBudgetIncidents: PluginBudgetIncidentSummary[]; + invocationBlocks: PluginIssueInvocationBlockSummary[]; +} + +export interface PluginIssueSubtreeOptions { + /** Include the root issue in the result. Defaults to true. */ + includeRoot?: boolean; + /** Include blocker relationship summaries keyed by issue ID. */ + includeRelations?: boolean; + /** Include issue document summaries keyed by issue ID. */ + includeDocuments?: boolean; + /** Include queued/running heartbeat runs keyed by issue ID. */ + includeActiveRuns?: boolean; + /** Include assignee summaries keyed by agent ID. */ + includeAssignees?: boolean; +} + +export interface PluginIssueAssigneeSummary { + id: string; + name: string; + role: string; + title: string | null; + status: Agent["status"]; +} + +export interface PluginIssueSubtree { + rootIssueId: string; + companyId: string; + issueIds: string[]; + issues: Issue[]; + relations?: Record; + documents?: Record; + activeRuns?: Record; + assignees?: Record; +} + +export interface PluginIssueSummariesClient { + /** + * Read the compact orchestration inputs a workflow plugin needs for an + * issue or issue subtree. Requires `issues.orchestration.read`. + */ + getOrchestration(input: { + issueId: string; + companyId: string; + includeSubtree?: boolean; + billingCode?: string | null; + }): Promise; +} + /** * `ctx.issues` — read and mutate issues plus comments. * @@ -874,6 +1073,9 @@ export interface PluginIssueDocumentsClient { * - `issues.read` for read operations * - `issues.create` for create * - `issues.update` for update + * - `issues.checkout` for checkout ownership assertions + * - `issues.wakeup` for assignment wakeup requests + * - `issues.orchestration.read` for orchestration summaries * - `issue.comments.read` for `listComments` * - `issue.comments.create` for `createComment` * - `issue.documents.read` for `documents.list` and `documents.get` @@ -884,6 +1086,8 @@ export interface PluginIssuesClient { companyId: string; projectId?: string; assigneeAgentId?: string; + originKind?: PluginIssueOriginKind; + originId?: string; status?: Issue["status"]; limit?: number; offset?: number; @@ -897,17 +1101,80 @@ export interface PluginIssuesClient { inheritExecutionWorkspaceFromIssueId?: string; title: string; description?: string; + status?: Issue["status"]; priority?: Issue["priority"]; assigneeAgentId?: string; + assigneeUserId?: string | null; + requestDepth?: number; + billingCode?: string | null; + originKind?: PluginIssueOriginKind; + originId?: string | null; + originRunId?: string | null; + blockedByIssueIds?: string[]; + labelIds?: string[]; + executionWorkspaceId?: string | null; + executionWorkspacePreference?: string | null; + executionWorkspaceSettings?: Record | null; + actor?: PluginIssueMutationActor; }): Promise; update( issueId: string, patch: Partial>, + | "title" + | "description" + | "status" + | "priority" + | "assigneeAgentId" + | "assigneeUserId" + | "billingCode" + | "originKind" + | "originId" + | "originRunId" + | "requestDepth" + | "executionWorkspaceId" + | "executionWorkspacePreference" + >> & { + blockedByIssueIds?: string[]; + labelIds?: string[]; + executionWorkspaceSettings?: Record | null; + }, companyId: string, + actor?: PluginIssueMutationActor, ): Promise; + assertCheckoutOwner(input: { + issueId: string; + companyId: string; + actorAgentId: string; + actorRunId: string; + }): Promise; + /** + * Read a root issue's descendants with optional relation/document/run/assignee + * summaries. Requires `issue.subtree.read`. + */ + getSubtree( + issueId: string, + companyId: string, + options?: PluginIssueSubtreeOptions, + ): Promise; + requestWakeup( + issueId: string, + companyId: string, + options?: { + reason?: string; + contextSource?: string; + idempotencyKey?: string | null; + } & PluginIssueMutationActor, + ): Promise; + requestWakeups( + issueIds: string[], + companyId: string, + options?: { + reason?: string; + contextSource?: string; + idempotencyKeyPrefix?: string | null; + } & PluginIssueMutationActor, + ): Promise; listComments(issueId: string, companyId: string): Promise; createComment( issueId: string, @@ -917,6 +1184,10 @@ export interface PluginIssuesClient { ): Promise; /** Read and write issue documents. Requires `issue.documents.read` / `issue.documents.write`. */ documents: PluginIssueDocumentsClient; + /** Read and write blocker relationships. */ + relations: PluginIssueRelationsClient; + /** Read compact orchestration summaries. */ + summaries: PluginIssueSummariesClient; } /** @@ -1138,6 +1409,9 @@ export interface PluginContext { /** Register launcher metadata that the host can surface in plugin UI entry points. */ launchers: PluginLaunchersClient; + /** Restricted plugin-owned database namespace. Requires database namespace capabilities. */ + db: PluginDatabaseClient; + /** Make outbound HTTP requests. Requires `http.outbound`. */ http: PluginHttpClient; diff --git a/packages/plugins/sdk/src/worker-rpc-host.ts b/packages/plugins/sdk/src/worker-rpc-host.ts index 483dbc7082..db3fadb2b9 100644 --- a/packages/plugins/sdk/src/worker-rpc-host.ts +++ b/packages/plugins/sdk/src/worker-rpc-host.ts @@ -42,6 +42,7 @@ import type { PaperclipPluginManifestV1 } from "@paperclipai/shared"; import type { PaperclipPlugin } from "./define-plugin.js"; import type { + PluginApiRequestInput, PluginHealthDiagnostics, PluginConfigValidationResult, PluginWebhookInput, @@ -250,6 +251,7 @@ export function startWorkerRpcHost(options: WorkerRpcHostOptions): WorkerRpcHost let initialized = false; let manifest: PaperclipPluginManifestV1 | null = null; let currentConfig: Record = {}; + let databaseNamespace: string | null = null; // Plugin handler registrations (populated during setup()) const eventHandlers: EventRegistration[] = []; @@ -416,6 +418,18 @@ export function startWorkerRpcHost(options: WorkerRpcHostOptions): WorkerRpcHost }, }, + db: { + get namespace() { + return databaseNamespace ?? ""; + }, + async query>(sql: string, params?: unknown[]): Promise { + return callHost("db.query", { sql, params }) as Promise; + }, + async execute(sql: string, params?: unknown[]) { + return callHost("db.execute", { sql, params }); + }, + }, + http: { async fetch(url: string, init?: RequestInit): Promise { const serializedInit: Record = {}; @@ -574,6 +588,8 @@ export function startWorkerRpcHost(options: WorkerRpcHostOptions): WorkerRpcHost companyId: input.companyId, projectId: input.projectId, assigneeAgentId: input.assigneeAgentId, + originKind: input.originKind, + originId: input.originId, status: input.status, limit: input.limit, offset: input.offset, @@ -593,19 +609,81 @@ export function startWorkerRpcHost(options: WorkerRpcHostOptions): WorkerRpcHost inheritExecutionWorkspaceFromIssueId: input.inheritExecutionWorkspaceFromIssueId, title: input.title, description: input.description, + status: input.status, priority: input.priority, assigneeAgentId: input.assigneeAgentId, + assigneeUserId: input.assigneeUserId, + requestDepth: input.requestDepth, + billingCode: input.billingCode, + originKind: input.originKind, + originId: input.originId, + originRunId: input.originRunId, + blockedByIssueIds: input.blockedByIssueIds, + labelIds: input.labelIds, + executionWorkspaceId: input.executionWorkspaceId, + executionWorkspacePreference: input.executionWorkspacePreference, + executionWorkspaceSettings: input.executionWorkspaceSettings, + actorAgentId: input.actor?.actorAgentId, + actorUserId: input.actor?.actorUserId, + actorRunId: input.actor?.actorRunId, }); }, - async update(issueId: string, patch, companyId: string) { + async update(issueId: string, patch, companyId: string, actor) { return callHost("issues.update", { issueId, - patch: patch as Record, + patch: { + ...(patch as Record), + actorAgentId: actor?.actorAgentId, + actorUserId: actor?.actorUserId, + actorRunId: actor?.actorRunId, + }, companyId, }); }, + async assertCheckoutOwner(input) { + return callHost("issues.assertCheckoutOwner", input); + }, + + async getSubtree(issueId: string, companyId: string, options) { + return callHost("issues.getSubtree", { + issueId, + companyId, + includeRoot: options?.includeRoot, + includeRelations: options?.includeRelations, + includeDocuments: options?.includeDocuments, + includeActiveRuns: options?.includeActiveRuns, + includeAssignees: options?.includeAssignees, + }); + }, + + async requestWakeup(issueId: string, companyId: string, options) { + return callHost("issues.requestWakeup", { + issueId, + companyId, + reason: options?.reason, + contextSource: options?.contextSource, + idempotencyKey: options?.idempotencyKey, + actorAgentId: options?.actorAgentId, + actorUserId: options?.actorUserId, + actorRunId: options?.actorRunId, + }); + }, + + async requestWakeups(issueIds: string[], companyId: string, options) { + return callHost("issues.requestWakeups", { + issueIds, + companyId, + reason: options?.reason, + contextSource: options?.contextSource, + idempotencyKeyPrefix: options?.idempotencyKeyPrefix, + actorAgentId: options?.actorAgentId, + actorUserId: options?.actorUserId, + actorRunId: options?.actorRunId, + }); + }, + async listComments(issueId: string, companyId: string) { return callHost("issues.listComments", { issueId, companyId }); }, @@ -639,6 +717,51 @@ export function startWorkerRpcHost(options: WorkerRpcHostOptions): WorkerRpcHost return callHost("issues.documents.delete", { issueId, key, companyId }); }, }, + + relations: { + async get(issueId: string, companyId: string) { + return callHost("issues.relations.get", { issueId, companyId }); + }, + + async setBlockedBy(issueId: string, blockedByIssueIds: string[], companyId: string, actor) { + return callHost("issues.relations.setBlockedBy", { + issueId, + companyId, + blockedByIssueIds, + actorAgentId: actor?.actorAgentId, + actorUserId: actor?.actorUserId, + actorRunId: actor?.actorRunId, + }); + }, + + async addBlockers(issueId: string, blockerIssueIds: string[], companyId: string, actor) { + return callHost("issues.relations.addBlockers", { + issueId, + companyId, + blockerIssueIds, + actorAgentId: actor?.actorAgentId, + actorUserId: actor?.actorUserId, + actorRunId: actor?.actorRunId, + }); + }, + + async removeBlockers(issueId: string, blockerIssueIds: string[], companyId: string, actor) { + return callHost("issues.relations.removeBlockers", { + issueId, + companyId, + blockerIssueIds, + actorAgentId: actor?.actorAgentId, + actorUserId: actor?.actorUserId, + actorRunId: actor?.actorRunId, + }); + }, + }, + + summaries: { + async getOrchestration(input) { + return callHost("issues.summaries.getOrchestration", input); + }, + }, }, agents: { @@ -879,6 +1002,9 @@ export function startWorkerRpcHost(options: WorkerRpcHostOptions): WorkerRpcHost case "handleWebhook": return handleWebhook(params as PluginWebhookInput); + case "handleApiRequest": + return handleApiRequest(params as PluginApiRequestInput); + case "getData": return handleGetData(params as GetDataParams); @@ -907,6 +1033,7 @@ export function startWorkerRpcHost(options: WorkerRpcHostOptions): WorkerRpcHost manifest = params.manifest; currentConfig = params.config; + databaseNamespace = params.databaseNamespace ?? null; // Call the plugin's setup function await plugin.definition.setup(ctx); @@ -919,6 +1046,7 @@ export function startWorkerRpcHost(options: WorkerRpcHostOptions): WorkerRpcHost if (plugin.definition.onConfigChanged) supportedMethods.push("configChanged"); if (plugin.definition.onHealth) supportedMethods.push("health"); if (plugin.definition.onShutdown) supportedMethods.push("shutdown"); + if (plugin.definition.onApiRequest) supportedMethods.push("handleApiRequest"); return { ok: true, supportedMethods }; } @@ -1020,6 +1148,16 @@ export function startWorkerRpcHost(options: WorkerRpcHostOptions): WorkerRpcHost await plugin.definition.onWebhook(params); } + async function handleApiRequest(params: PluginApiRequestInput): Promise { + if (!plugin.definition.onApiRequest) { + throw Object.assign( + new Error("handleApiRequest is not implemented by this plugin"), + { code: PLUGIN_RPC_ERROR_CODES.METHOD_NOT_IMPLEMENTED }, + ); + } + return plugin.definition.onApiRequest(params); + } + async function handleGetData(params: GetDataParams): Promise { const handler = dataHandlers.get(params.key); if (!handler) { diff --git a/packages/shared/src/constants.ts b/packages/shared/src/constants.ts index 8b74632fdd..035e7bb7ed 100644 --- a/packages/shared/src/constants.ts +++ b/packages/shared/src/constants.ts @@ -138,7 +138,9 @@ export const ISSUE_PRIORITIES = ["critical", "high", "medium", "low"] as const; export type IssuePriority = (typeof ISSUE_PRIORITIES)[number]; export const ISSUE_ORIGIN_KINDS = ["manual", "routine_execution"] as const; -export type IssueOriginKind = (typeof ISSUE_ORIGIN_KINDS)[number]; +export type BuiltInIssueOriginKind = (typeof ISSUE_ORIGIN_KINDS)[number]; +export type PluginIssueOriginKind = `plugin:${string}`; +export type IssueOriginKind = BuiltInIssueOriginKind | PluginIssueOriginKind; export const ISSUE_RELATION_TYPES = ["blocks"] as const; export type IssueRelationType = (typeof ISSUE_RELATION_TYPES)[number]; @@ -498,6 +500,8 @@ export const PLUGIN_CAPABILITIES = [ "projects.read", "project.workspaces.read", "issues.read", + "issue.relations.read", + "issue.subtree.read", "issue.comments.read", "issue.documents.read", "agents.read", @@ -506,9 +510,14 @@ export const PLUGIN_CAPABILITIES = [ "goals.update", "activity.read", "costs.read", + "issues.orchestration.read", + "database.namespace.read", // Data Write "issues.create", "issues.update", + "issue.relations.write", + "issues.checkout", + "issues.wakeup", "issue.comments.create", "issue.documents.write", "agents.pause", @@ -521,6 +530,8 @@ export const PLUGIN_CAPABILITIES = [ "activity.log.write", "metrics.write", "telemetry.track", + "database.namespace.migrate", + "database.namespace.write", // Plugin State "plugin.state.read", "plugin.state.write", @@ -529,6 +540,7 @@ export const PLUGIN_CAPABILITIES = [ "events.emit", "jobs.schedule", "webhooks.receive", + "api.routes.register", "http.outbound", "secrets.read-ref", // Agent Tools @@ -544,6 +556,51 @@ export const PLUGIN_CAPABILITIES = [ ] as const; export type PluginCapability = (typeof PLUGIN_CAPABILITIES)[number]; +export const PLUGIN_DATABASE_NAMESPACE_MODES = ["schema"] as const; +export type PluginDatabaseNamespaceMode = (typeof PLUGIN_DATABASE_NAMESPACE_MODES)[number]; + +export const PLUGIN_DATABASE_NAMESPACE_STATUSES = [ + "active", + "migration_failed", +] as const; +export type PluginDatabaseNamespaceStatus = (typeof PLUGIN_DATABASE_NAMESPACE_STATUSES)[number]; + +export const PLUGIN_DATABASE_MIGRATION_STATUSES = [ + "applied", + "failed", +] as const; +export type PluginDatabaseMigrationStatus = (typeof PLUGIN_DATABASE_MIGRATION_STATUSES)[number]; + +export const PLUGIN_DATABASE_CORE_READ_TABLES = [ + "companies", + "projects", + "goals", + "agents", + "issues", + "issue_documents", + "issue_relations", + "issue_comments", + "heartbeat_runs", + "cost_events", + "approvals", + "issue_approvals", + "budget_incidents", +] as const; +export type PluginDatabaseCoreReadTable = (typeof PLUGIN_DATABASE_CORE_READ_TABLES)[number]; + +export const PLUGIN_API_ROUTE_METHODS = ["GET", "POST", "PATCH", "DELETE"] as const; +export type PluginApiRouteMethod = (typeof PLUGIN_API_ROUTE_METHODS)[number]; + +export const PLUGIN_API_ROUTE_AUTH_MODES = ["board", "agent", "board-or-agent", "webhook"] as const; +export type PluginApiRouteAuthMode = (typeof PLUGIN_API_ROUTE_AUTH_MODES)[number]; + +export const PLUGIN_API_ROUTE_CHECKOUT_POLICIES = [ + "none", + "required-for-agent-in-progress", + "always-for-agent", +] as const; +export type PluginApiRouteCheckoutPolicy = (typeof PLUGIN_API_ROUTE_CHECKOUT_POLICIES)[number]; + /** * UI extension slot types. Each slot type corresponds to a mount point in the * Paperclip UI where plugin components can be rendered. @@ -742,6 +799,13 @@ export const PLUGIN_EVENT_TYPES = [ "issue.created", "issue.updated", "issue.comment.created", + "issue.document.created", + "issue.document.updated", + "issue.document.deleted", + "issue.relations.updated", + "issue.checked_out", + "issue.released", + "issue.assignment_wakeup_requested", "agent.created", "agent.updated", "agent.status_changed", @@ -753,6 +817,8 @@ export const PLUGIN_EVENT_TYPES = [ "goal.updated", "approval.created", "approval.decided", + "budget.incident.opened", + "budget.incident.resolved", "cost_event.created", "activity.logged", ] as const; diff --git a/packages/shared/src/index.ts b/packages/shared/src/index.ts index dc8562926d..bddbb76949 100644 --- a/packages/shared/src/index.ts +++ b/packages/shared/src/index.ts @@ -83,6 +83,13 @@ export { PLUGIN_JOB_RUN_STATUSES, PLUGIN_JOB_RUN_TRIGGERS, PLUGIN_WEBHOOK_DELIVERY_STATUSES, + PLUGIN_DATABASE_NAMESPACE_MODES, + PLUGIN_DATABASE_NAMESPACE_STATUSES, + PLUGIN_DATABASE_MIGRATION_STATUSES, + PLUGIN_DATABASE_CORE_READ_TABLES, + PLUGIN_API_ROUTE_METHODS, + PLUGIN_API_ROUTE_AUTH_MODES, + PLUGIN_API_ROUTE_CHECKOUT_POLICIES, PLUGIN_EVENT_TYPES, PLUGIN_BRIDGE_ERROR_CODES, type CompanyStatus, @@ -96,6 +103,8 @@ export { type AgentIconName, type IssueStatus, type IssuePriority, + type BuiltInIssueOriginKind, + type PluginIssueOriginKind, type IssueOriginKind, type IssueRelationType, type SystemIssueDocumentKey, @@ -159,6 +168,13 @@ export { type PluginJobRunStatus, type PluginJobRunTrigger, type PluginWebhookDeliveryStatus, + type PluginDatabaseNamespaceMode, + type PluginDatabaseNamespaceStatus, + type PluginDatabaseMigrationStatus, + type PluginDatabaseCoreReadTable, + type PluginApiRouteMethod, + type PluginApiRouteAuthMode, + type PluginApiRouteCheckoutPolicy, type PluginEventType, type PluginBridgeErrorCode, } from "./constants.js"; @@ -397,8 +413,13 @@ export type { PluginLauncherDeclaration, PluginMinimumHostVersion, PluginUiDeclaration, + PluginDatabaseDeclaration, + PluginApiRouteCompanyResolution, + PluginApiRouteDeclaration, PaperclipPluginManifestV1, PluginRecord, + PluginDatabaseNamespaceRecord, + PluginMigrationRecord, PluginStateRecord, PluginConfig, PluginEntityRecord, @@ -677,6 +698,8 @@ export { pluginLauncherActionDeclarationSchema, pluginLauncherRenderDeclarationSchema, pluginLauncherDeclarationSchema, + pluginDatabaseDeclarationSchema, + pluginApiRouteDeclarationSchema, pluginManifestV1Schema, installPluginSchema, upsertPluginConfigSchema, @@ -693,6 +716,8 @@ export { type PluginLauncherActionDeclarationInput, type PluginLauncherRenderDeclarationInput, type PluginLauncherDeclarationInput, + type PluginDatabaseDeclarationInput, + type PluginApiRouteDeclarationInput, type PluginManifestV1Input, type InstallPlugin, type UpsertPluginConfig, diff --git a/packages/shared/src/types/activity.ts b/packages/shared/src/types/activity.ts index d0232e0090..3bc098b663 100644 --- a/packages/shared/src/types/activity.ts +++ b/packages/shared/src/types/activity.ts @@ -1,7 +1,7 @@ export interface ActivityEvent { id: string; companyId: string; - actorType: "agent" | "user" | "system"; + actorType: "agent" | "user" | "system" | "plugin"; actorId: string; action: string; entityType: string; diff --git a/packages/shared/src/types/index.ts b/packages/shared/src/types/index.ts index 600d97a80b..971bf14faf 100644 --- a/packages/shared/src/types/index.ts +++ b/packages/shared/src/types/index.ts @@ -244,8 +244,13 @@ export type { PluginLauncherDeclaration, PluginMinimumHostVersion, PluginUiDeclaration, + PluginDatabaseDeclaration, + PluginApiRouteCompanyResolution, + PluginApiRouteDeclaration, PaperclipPluginManifestV1, PluginRecord, + PluginDatabaseNamespaceRecord, + PluginMigrationRecord, PluginStateRecord, PluginConfig, PluginEntityRecord, @@ -253,4 +258,8 @@ export type { PluginJobRecord, PluginJobRunRecord, PluginWebhookDeliveryRecord, + PluginDatabaseCoreReadTable, + PluginDatabaseMigrationStatus, + PluginDatabaseNamespaceMode, + PluginDatabaseNamespaceStatus, } from "./plugin.js"; diff --git a/packages/shared/src/types/plugin.ts b/packages/shared/src/types/plugin.ts index 5117b501ff..74fdd83d97 100644 --- a/packages/shared/src/types/plugin.ts +++ b/packages/shared/src/types/plugin.ts @@ -9,6 +9,13 @@ import type { PluginLauncherAction, PluginLauncherBounds, PluginLauncherRenderEnvironment, + PluginApiRouteAuthMode, + PluginApiRouteCheckoutPolicy, + PluginApiRouteMethod, + PluginDatabaseCoreReadTable, + PluginDatabaseMigrationStatus, + PluginDatabaseNamespaceMode, + PluginDatabaseNamespaceStatus, } from "../constants.js"; // --------------------------------------------------------------------------- @@ -21,6 +28,13 @@ import type { */ export type JsonSchema = Record; +export type { + PluginDatabaseCoreReadTable, + PluginDatabaseMigrationStatus, + PluginDatabaseNamespaceMode, + PluginDatabaseNamespaceStatus, +} from "../constants.js"; + // --------------------------------------------------------------------------- // Manifest sub-types — nested declarations within PaperclipPluginManifestV1 // --------------------------------------------------------------------------- @@ -190,6 +204,44 @@ export interface PluginUiDeclaration { launchers?: PluginLauncherDeclaration[]; } +/** + * Declares restricted database access for trusted orchestration plugins. + * + * The host derives the final namespace from the plugin key and optional slug, + * applies SQL migrations before worker startup, and gates runtime SQL through + * the `database.namespace.*` capabilities. + */ +export interface PluginDatabaseDeclaration { + /** Optional stable human-readable slug included in the host-derived namespace. */ + namespaceSlug?: string; + /** SQL migration directory relative to the plugin package root. */ + migrationsDir: string; + /** Public core tables this plugin may read or join at runtime. */ + coreReadTables?: PluginDatabaseCoreReadTable[]; +} + +export type PluginApiRouteCompanyResolution = + | { from: "body"; key: string } + | { from: "query"; key: string } + | { from: "issue"; param: string }; + +export interface PluginApiRouteDeclaration { + /** Stable plugin-defined route key passed to the worker. */ + routeKey: string; + /** HTTP method accepted by this route. */ + method: PluginApiRouteMethod; + /** Plugin-local path under `/api/plugins/:pluginId/api`, e.g. `/issues/:issueId/smoke`. */ + path: string; + /** Actor class allowed to call the route. */ + auth: PluginApiRouteAuthMode; + /** Capability required to expose the route. Currently `api.routes.register`. */ + capability: "api.routes.register"; + /** Optional checkout policy enforced by the host before worker dispatch. */ + checkoutPolicy?: PluginApiRouteCheckoutPolicy; + /** How the host resolves company access for this route. */ + companyResolution?: PluginApiRouteCompanyResolution; +} + // --------------------------------------------------------------------------- // Plugin Manifest V1 // --------------------------------------------------------------------------- @@ -240,6 +292,10 @@ export interface PaperclipPluginManifestV1 { webhooks?: PluginWebhookDeclaration[]; /** Agent tools this plugin contributes. Requires `agent.tools.register` capability. */ tools?: PluginToolDeclaration[]; + /** Restricted plugin-owned database namespace declaration. */ + database?: PluginDatabaseDeclaration; + /** Scoped JSON API routes mounted under `/api/plugins/:pluginId/api/*`. */ + apiRoutes?: PluginApiRouteDeclaration[]; /** * Legacy top-level launcher declarations. * Prefer `ui.launchers` for new manifests. @@ -286,6 +342,31 @@ export interface PluginRecord { updatedAt: Date; } +export interface PluginDatabaseNamespaceRecord { + id: string; + pluginId: string; + pluginKey: string; + namespaceName: string; + namespaceMode: PluginDatabaseNamespaceMode; + status: PluginDatabaseNamespaceStatus; + createdAt: Date; + updatedAt: Date; +} + +export interface PluginMigrationRecord { + id: string; + pluginId: string; + pluginKey: string; + namespaceName: string; + migrationKey: string; + checksum: string; + pluginVersion: string; + status: PluginDatabaseMigrationStatus; + startedAt: Date; + appliedAt: Date | null; + errorMessage: string | null; +} + // --------------------------------------------------------------------------- // Plugin State – represents a row in the `plugin_state` table // --------------------------------------------------------------------------- diff --git a/packages/shared/src/validators/index.ts b/packages/shared/src/validators/index.ts index 4a2a42321d..997203890e 100644 --- a/packages/shared/src/validators/index.ts +++ b/packages/shared/src/validators/index.ts @@ -299,6 +299,8 @@ export { pluginLauncherActionDeclarationSchema, pluginLauncherRenderDeclarationSchema, pluginLauncherDeclarationSchema, + pluginDatabaseDeclarationSchema, + pluginApiRouteDeclarationSchema, pluginManifestV1Schema, installPluginSchema, upsertPluginConfigSchema, @@ -315,6 +317,8 @@ export { type PluginLauncherActionDeclarationInput, type PluginLauncherRenderDeclarationInput, type PluginLauncherDeclarationInput, + type PluginDatabaseDeclarationInput, + type PluginApiRouteDeclarationInput, type PluginManifestV1Input, type InstallPlugin, type UpsertPluginConfig, diff --git a/packages/shared/src/validators/plugin.ts b/packages/shared/src/validators/plugin.ts index 92438fb9c0..b4c561b120 100644 --- a/packages/shared/src/validators/plugin.ts +++ b/packages/shared/src/validators/plugin.ts @@ -11,6 +11,10 @@ import { PLUGIN_LAUNCHER_BOUNDS, PLUGIN_LAUNCHER_RENDER_ENVIRONMENTS, PLUGIN_STATE_SCOPE_KINDS, + PLUGIN_DATABASE_CORE_READ_TABLES, + PLUGIN_API_ROUTE_AUTH_MODES, + PLUGIN_API_ROUTE_CHECKOUT_POLICIES, + PLUGIN_API_ROUTE_METHODS, } from "../constants.js"; // --------------------------------------------------------------------------- @@ -336,6 +340,48 @@ export const pluginLauncherDeclarationSchema = z.object({ export type PluginLauncherDeclarationInput = z.infer; +export const pluginDatabaseDeclarationSchema = z.object({ + namespaceSlug: z.string().regex(/^[a-z0-9][a-z0-9_]*$/, { + message: "namespaceSlug must be lowercase letters, digits, or underscores and start with a letter or digit", + }).max(40).optional(), + migrationsDir: z.string().min(1).refine( + (value) => !value.startsWith("/") && !value.includes("..") && !/[\\]/.test(value), + { message: "migrationsDir must be a relative package path without '..' or backslashes" }, + ), + coreReadTables: z.array(z.enum(PLUGIN_DATABASE_CORE_READ_TABLES)).optional(), +}); + +export type PluginDatabaseDeclarationInput = z.infer; + +export const pluginApiRouteDeclarationSchema = z.object({ + routeKey: z.string().min(1).max(100).regex(/^[a-z0-9][a-z0-9._:-]*$/, { + message: "routeKey must be lowercase letters, digits, dots, colons, underscores, or hyphens", + }), + method: z.enum(PLUGIN_API_ROUTE_METHODS), + path: z.string().min(1).regex(/^\/[a-zA-Z0-9:_./-]*$/, { + message: "path must start with / and contain only path-safe literal or :param segments", + }).refine( + (value) => + !value.includes("..") && + !value.includes("//") && + value !== "/api" && + !value.startsWith("/api/") && + value !== "/plugins" && + !value.startsWith("/plugins/"), + { message: "path must stay inside the plugin api namespace" }, + ), + auth: z.enum(PLUGIN_API_ROUTE_AUTH_MODES), + capability: z.literal("api.routes.register"), + checkoutPolicy: z.enum(PLUGIN_API_ROUTE_CHECKOUT_POLICIES).optional(), + companyResolution: z.discriminatedUnion("from", [ + z.object({ from: z.literal("body"), key: z.string().min(1) }), + z.object({ from: z.literal("query"), key: z.string().min(1) }), + z.object({ from: z.literal("issue"), param: z.string().min(1) }), + ]).optional(), +}); + +export type PluginApiRouteDeclarationInput = z.infer; + // --------------------------------------------------------------------------- // Plugin Manifest V1 schema // --------------------------------------------------------------------------- @@ -405,6 +451,8 @@ export const pluginManifestV1Schema = z.object({ jobs: z.array(pluginJobDeclarationSchema).optional(), webhooks: z.array(pluginWebhookDeclarationSchema).optional(), tools: z.array(pluginToolDeclarationSchema).optional(), + database: pluginDatabaseDeclarationSchema.optional(), + apiRoutes: z.array(pluginApiRouteDeclarationSchema).optional(), launchers: z.array(pluginLauncherDeclarationSchema).optional(), ui: z.object({ slots: z.array(pluginUiSlotDeclarationSchema).min(1).optional(), @@ -474,6 +522,42 @@ export const pluginManifestV1Schema = z.object({ } } + if (manifest.apiRoutes && manifest.apiRoutes.length > 0) { + if (!manifest.capabilities.includes("api.routes.register")) { + ctx.addIssue({ + code: z.ZodIssueCode.custom, + message: "Capability 'api.routes.register' is required when apiRoutes are declared", + path: ["capabilities"], + }); + } + } + + if (manifest.database) { + const requiredCapabilities = [ + "database.namespace.migrate", + "database.namespace.read", + ] as const; + for (const capability of requiredCapabilities) { + if (!manifest.capabilities.includes(capability)) { + ctx.addIssue({ + code: z.ZodIssueCode.custom, + message: `Capability '${capability}' is required when database migrations are declared`, + path: ["capabilities"], + }); + } + } + + const coreReadTables = manifest.database.coreReadTables ?? []; + const duplicates = coreReadTables.filter((table, i) => coreReadTables.indexOf(table) !== i); + if (duplicates.length > 0) { + ctx.addIssue({ + code: z.ZodIssueCode.custom, + message: `Duplicate database coreReadTables: ${[...new Set(duplicates)].join(", ")}`, + path: ["database", "coreReadTables"], + }); + } + } + // ── Uniqueness checks ────────────────────────────────────────────────── // Duplicate keys within a plugin's own manifest are always a bug. The host // would not know which declaration takes precedence, so we reject early. @@ -504,6 +588,27 @@ export const pluginManifestV1Schema = z.object({ } } + if (manifest.apiRoutes) { + const routeKeys = manifest.apiRoutes.map((route) => route.routeKey); + const duplicateKeys = routeKeys.filter((key, i) => routeKeys.indexOf(key) !== i); + if (duplicateKeys.length > 0) { + ctx.addIssue({ + code: z.ZodIssueCode.custom, + message: `Duplicate api route keys: ${[...new Set(duplicateKeys)].join(", ")}`, + path: ["apiRoutes"], + }); + } + const routeSignatures = manifest.apiRoutes.map((route) => `${route.method} ${route.path}`); + const duplicateRoutes = routeSignatures.filter((sig, i) => routeSignatures.indexOf(sig) !== i); + if (duplicateRoutes.length > 0) { + ctx.addIssue({ + code: z.ZodIssueCode.custom, + message: `Duplicate api routes: ${[...new Set(duplicateRoutes)].join(", ")}`, + path: ["apiRoutes"], + }); + } + } + // tool names must be unique within the plugin (namespaced at runtime) if (manifest.tools) { const toolNames = manifest.tools.map((t) => t.name); diff --git a/pnpm-workspace.yaml b/pnpm-workspace.yaml index 4ac3cfcc5f..dc540f76b2 100644 --- a/pnpm-workspace.yaml +++ b/pnpm-workspace.yaml @@ -3,6 +3,9 @@ packages: - packages/adapters/* - packages/plugins/* - packages/plugins/examples/* + # Keep this smoke fixture installable as a local plugin example without + # forcing PRs to commit pnpm-lock.yaml for a new workspace importer. + - "!packages/plugins/examples/plugin-orchestration-smoke-example" - server - ui - cli diff --git a/server/src/__tests__/plugin-database.test.ts b/server/src/__tests__/plugin-database.test.ts new file mode 100644 index 0000000000..9aaca2f15e --- /dev/null +++ b/server/src/__tests__/plugin-database.test.ts @@ -0,0 +1,269 @@ +import { randomUUID } from "node:crypto"; +import { mkdtemp, rm, mkdir, writeFile } from "node:fs/promises"; +import os from "node:os"; +import path from "node:path"; +import { and, eq, sql } from "drizzle-orm"; +import { afterAll, afterEach, beforeAll, describe, expect, it } from "vitest"; +import { + companies, + createDb, + issueRelations, + issues, + pluginDatabaseNamespaces, + pluginMigrations, + plugins, +} from "@paperclipai/db"; +import type { PaperclipPluginManifestV1 } from "@paperclipai/shared"; +import { + getEmbeddedPostgresTestSupport, + startEmbeddedPostgresTestDatabase, +} from "./helpers/embedded-postgres.js"; +import { + derivePluginDatabaseNamespace, + pluginDatabaseService, + validatePluginMigrationStatement, + validatePluginRuntimeExecute, + validatePluginRuntimeQuery, +} from "../services/plugin-database.js"; + +const embeddedPostgresSupport = await getEmbeddedPostgresTestSupport(); +const describeEmbeddedPostgres = embeddedPostgresSupport.supported ? describe : describe.skip; + +if (!embeddedPostgresSupport.supported) { + console.warn( + `Skipping embedded Postgres plugin database tests on this host: ${embeddedPostgresSupport.reason ?? "unsupported environment"}`, + ); +} + +describe("plugin database SQL validation", () => { + it("allows namespace migrations with whitelisted public foreign keys", () => { + expect(() => + validatePluginMigrationStatement( + "CREATE TABLE plugin_test.rows (id uuid PRIMARY KEY, issue_id uuid REFERENCES public.issues(id))", + "plugin_test", + ["issues"], + ) + ).not.toThrow(); + }); + + it("rejects migrations that create public objects", () => { + expect(() => + validatePluginMigrationStatement( + "CREATE TABLE public.rows (id uuid PRIMARY KEY)", + "plugin_test", + ["issues"], + ) + ).toThrow(/public/i); + }); + + it("allows whitelisted runtime reads but rejects public writes", () => { + expect(() => + validatePluginRuntimeQuery( + "SELECT r.id FROM plugin_test.rows r JOIN public.issues i ON i.id = r.issue_id", + "plugin_test", + ["issues"], + ) + ).not.toThrow(); + expect(() => + validatePluginRuntimeExecute("UPDATE public.issues SET title = $1", "plugin_test") + ).toThrow(/namespace/i); + }); + + it("targets anonymous DO blocks without rejecting do-prefixed aliases", () => { + expect(() => + validatePluginRuntimeQuery( + "SELECT EXTRACT(DOW FROM created_at) AS do_flag FROM plugin_test.rows", + "plugin_test", + ) + ).not.toThrow(); + expect(() => + validatePluginMigrationStatement("DO $$ BEGIN END $$;", "plugin_test") + ).toThrow(/disallowed/i); + }); +}); + +describeEmbeddedPostgres("plugin database namespaces", () => { + let db!: ReturnType; + let tempDb: Awaited> | null = null; + let packageRoots: string[] = []; + + beforeAll(async () => { + tempDb = await startEmbeddedPostgresTestDatabase("paperclip-plugin-db-"); + db = createDb(tempDb.connectionString); + }, 20_000); + + afterEach(async () => { + for (const pluginKey of ["paperclip.dbtest", "paperclip.escape"]) { + const namespace = derivePluginDatabaseNamespace(pluginKey); + await db.execute(sql.raw(`DROP SCHEMA IF EXISTS "${namespace}" CASCADE`)); + } + await db.delete(pluginMigrations); + await db.delete(pluginDatabaseNamespaces); + await db.delete(plugins); + await db.delete(issueRelations); + await db.delete(issues); + await db.delete(companies); + await Promise.all(packageRoots.map((root) => rm(root, { recursive: true, force: true }))); + packageRoots = []; + }); + + afterAll(async () => { + await tempDb?.cleanup(); + }); + + async function createPluginPackage(manifest: PaperclipPluginManifestV1, migrationSql: string) { + const packageRoot = await mkdtemp(path.join(os.tmpdir(), "paperclip-plugin-package-")); + packageRoots.push(packageRoot); + const migrationsDir = path.join(packageRoot, manifest.database!.migrationsDir); + await mkdir(migrationsDir, { recursive: true }); + await writeFile(path.join(migrationsDir, "001_init.sql"), migrationSql, "utf8"); + return packageRoot; + } + + async function installPluginRecord(manifest: PaperclipPluginManifestV1) { + const pluginId = randomUUID(); + await db.insert(plugins).values({ + id: pluginId, + pluginKey: manifest.id, + packageName: manifest.id, + version: manifest.version, + apiVersion: manifest.apiVersion, + categories: manifest.categories, + manifestJson: manifest, + status: "installed", + installOrder: 1, + }); + return pluginId; + } + + function manifest(pluginKey = "paperclip.dbtest"): PaperclipPluginManifestV1 { + return { + id: pluginKey, + apiVersion: 1, + version: "1.0.0", + displayName: "DB Test", + description: "Exercises restricted plugin database access.", + author: "Paperclip", + categories: ["automation"], + capabilities: [ + "database.namespace.migrate", + "database.namespace.read", + "database.namespace.write", + ], + entrypoints: { worker: "./dist/worker.js" }, + database: { + migrationsDir: "migrations", + coreReadTables: ["issues"], + }, + }; + } + + it("applies migrations once and allows whitelisted core joins at runtime", async () => { + const pluginManifest = manifest(); + const namespace = derivePluginDatabaseNamespace(pluginManifest.id); + const packageRoot = await createPluginPackage( + pluginManifest, + ` + CREATE TABLE ${namespace}.mission_rows ( + id uuid PRIMARY KEY, + issue_id uuid NOT NULL REFERENCES public.issues(id), + label text NOT NULL + ); + `, + ); + const pluginId = await installPluginRecord(pluginManifest); + const companyId = randomUUID(); + const issueId = randomUUID(); + await db.insert(companies).values({ + id: companyId, + name: "Paperclip", + issuePrefix: "TST", + requireBoardApprovalForNewAgents: false, + }); + await db.insert(issues).values({ + id: issueId, + companyId, + title: "Joined issue", + status: "todo", + priority: "medium", + identifier: "TST-1", + }); + + const pluginDb = pluginDatabaseService(db); + await pluginDb.applyMigrations(pluginId, pluginManifest, packageRoot); + await pluginDb.applyMigrations(pluginId, pluginManifest, packageRoot); + + await pluginDb.execute( + pluginId, + `INSERT INTO ${namespace}.mission_rows (id, issue_id, label) VALUES ($1, $2, $3)`, + [randomUUID(), issueId, "alpha"], + ); + const rows = await pluginDb.query<{ label: string; title: string }>( + pluginId, + `SELECT m.label, i.title FROM ${namespace}.mission_rows m JOIN public.issues i ON i.id = m.issue_id`, + ); + expect(rows).toEqual([{ label: "alpha", title: "Joined issue" }]); + + const migrations = await db + .select() + .from(pluginMigrations) + .where(and(eq(pluginMigrations.pluginId, pluginId), eq(pluginMigrations.status, "applied"))); + expect(migrations).toHaveLength(1); + }); + + it("rejects runtime writes to public core tables", async () => { + const pluginManifest = manifest(); + const namespace = derivePluginDatabaseNamespace(pluginManifest.id); + const packageRoot = await createPluginPackage( + pluginManifest, + `CREATE TABLE ${namespace}.notes (id uuid PRIMARY KEY, body text NOT NULL);`, + ); + const pluginId = await installPluginRecord(pluginManifest); + const pluginDb = pluginDatabaseService(db); + await pluginDb.applyMigrations(pluginId, pluginManifest, packageRoot); + + await expect( + pluginDb.execute(pluginId, "UPDATE public.issues SET title = $1", ["bad"]), + ).rejects.toThrow(/plugin namespace/i); + }); + + it("records a failed migration when SQL escapes the plugin namespace", async () => { + const pluginManifest = manifest("paperclip.escape"); + const packageRoot = await createPluginPackage( + pluginManifest, + "CREATE TABLE public.plugin_escape (id uuid PRIMARY KEY);", + ); + const pluginId = await installPluginRecord(pluginManifest); + + await expect( + pluginDatabaseService(db).applyMigrations(pluginId, pluginManifest, packageRoot), + ).rejects.toThrow(/public\.plugin_escape|public/i); + + const [migration] = await db + .select() + .from(pluginMigrations) + .where(eq(pluginMigrations.pluginId, pluginId)); + expect(migration?.status).toBe("failed"); + }); + + it("rejects checksum changes for already applied migrations", async () => { + const pluginManifest = manifest(); + const namespace = derivePluginDatabaseNamespace(pluginManifest.id); + const packageRoot = await createPluginPackage( + pluginManifest, + `CREATE TABLE ${namespace}.checksum_rows (id uuid PRIMARY KEY);`, + ); + const pluginId = await installPluginRecord(pluginManifest); + const pluginDb = pluginDatabaseService(db); + await pluginDb.applyMigrations(pluginId, pluginManifest, packageRoot); + + await writeFile( + path.join(packageRoot, "migrations", "001_init.sql"), + `CREATE TABLE ${namespace}.checksum_rows (id uuid PRIMARY KEY, note text);`, + "utf8", + ); + + await expect(pluginDb.applyMigrations(pluginId, pluginManifest, packageRoot)) + .rejects.toThrow(/checksum mismatch/i); + }); +}); diff --git a/server/src/__tests__/plugin-orchestration-apis.test.ts b/server/src/__tests__/plugin-orchestration-apis.test.ts new file mode 100644 index 0000000000..b8ee02e962 --- /dev/null +++ b/server/src/__tests__/plugin-orchestration-apis.test.ts @@ -0,0 +1,372 @@ +import { randomUUID } from "node:crypto"; +import { and, eq } from "drizzle-orm"; +import { afterAll, afterEach, beforeAll, describe, expect, it } from "vitest"; +import { + activityLog, + agentWakeupRequests, + agents, + companies, + costEvents, + createDb, + heartbeatRuns, + issueRelations, + issues, +} from "@paperclipai/db"; +import { + getEmbeddedPostgresTestSupport, + startEmbeddedPostgresTestDatabase, +} from "./helpers/embedded-postgres.js"; +import { buildHostServices } from "../services/plugin-host-services.js"; + +const embeddedPostgresSupport = await getEmbeddedPostgresTestSupport(); +const describeEmbeddedPostgres = embeddedPostgresSupport.supported ? describe : describe.skip; + +function createEventBusStub() { + return { + forPlugin() { + return { + emit: async () => {}, + subscribe: () => {}, + }; + }, + } as any; +} + +function issuePrefix(id: string) { + return `T${id.replace(/-/g, "").slice(0, 6).toUpperCase()}`; +} + +if (!embeddedPostgresSupport.supported) { + console.warn( + `Skipping embedded Postgres plugin orchestration API tests on this host: ${embeddedPostgresSupport.reason ?? "unsupported environment"}`, + ); +} + +describeEmbeddedPostgres("plugin orchestration APIs", () => { + let db!: ReturnType; + let tempDb: Awaited> | null = null; + + beforeAll(async () => { + tempDb = await startEmbeddedPostgresTestDatabase("paperclip-plugin-orchestration-"); + db = createDb(tempDb.connectionString); + }, 20_000); + + afterEach(async () => { + await db.delete(activityLog); + await db.delete(costEvents); + await db.delete(heartbeatRuns); + await db.delete(agentWakeupRequests); + await db.delete(issueRelations); + await db.delete(issues); + await db.delete(agents); + await db.delete(companies); + }); + + afterAll(async () => { + await tempDb?.cleanup(); + }); + + async function seedCompanyAndAgent() { + const companyId = randomUUID(); + const agentId = randomUUID(); + await db.insert(companies).values({ + id: companyId, + name: "Paperclip", + issuePrefix: issuePrefix(companyId), + requireBoardApprovalForNewAgents: false, + }); + await db.insert(agents).values({ + id: agentId, + companyId, + name: "Engineer", + role: "engineer", + status: "idle", + adapterType: "process", + adapterConfig: { command: "true" }, + runtimeConfig: {}, + permissions: {}, + }); + return { companyId, agentId }; + } + + it("creates plugin-origin issues with full orchestration fields and audit activity", async () => { + const { companyId, agentId } = await seedCompanyAndAgent(); + const blockerIssueId = randomUUID(); + const originRunId = randomUUID(); + await db.insert(heartbeatRuns).values({ + id: originRunId, + companyId, + agentId, + status: "running", + invocationSource: "assignment", + contextSnapshot: { issueId: blockerIssueId }, + }); + await db.insert(issues).values({ + id: blockerIssueId, + companyId, + title: "Blocker", + status: "todo", + priority: "medium", + identifier: `${issuePrefix(companyId)}-blocker`, + }); + + const services = buildHostServices(db, "plugin-record-id", "paperclip.missions", createEventBusStub()); + const issue = await services.issues.create({ + companyId, + title: "Plugin child issue", + status: "todo", + assigneeAgentId: agentId, + billingCode: "mission:alpha", + originId: "mission-alpha", + blockedByIssueIds: [blockerIssueId], + actorAgentId: agentId, + actorRunId: originRunId, + }); + + const [stored] = await db.select().from(issues).where(eq(issues.id, issue.id)); + expect(stored?.originKind).toBe("plugin:paperclip.missions"); + expect(stored?.originId).toBe("mission-alpha"); + expect(stored?.billingCode).toBe("mission:alpha"); + expect(stored?.assigneeAgentId).toBe(agentId); + expect(stored?.createdByAgentId).toBe(agentId); + expect(stored?.originRunId).toBe(originRunId); + + const [relation] = await db + .select() + .from(issueRelations) + .where(and(eq(issueRelations.issueId, blockerIssueId), eq(issueRelations.relatedIssueId, issue.id))); + expect(relation?.type).toBe("blocks"); + + const activities = await db + .select() + .from(activityLog) + .where(and(eq(activityLog.entityType, "issue"), eq(activityLog.entityId, issue.id))); + expect(activities).toEqual( + expect.arrayContaining([ + expect.objectContaining({ + actorType: "plugin", + actorId: "plugin-record-id", + action: "issue.created", + agentId, + details: expect.objectContaining({ + sourcePluginId: "plugin-record-id", + sourcePluginKey: "paperclip.missions", + initiatingActorType: "agent", + initiatingActorId: agentId, + initiatingRunId: originRunId, + }), + }), + ]), + ); + }); + + it("enforces plugin origin namespaces", async () => { + const { companyId } = await seedCompanyAndAgent(); + const services = buildHostServices(db, "plugin-record-id", "paperclip.missions", createEventBusStub()); + + const featureIssue = await services.issues.create({ + companyId, + title: "Feature issue", + originKind: "plugin:paperclip.missions:feature", + originId: "mission-alpha:feature-1", + }); + expect(featureIssue.originKind).toBe("plugin:paperclip.missions:feature"); + + await expect( + services.issues.create({ + companyId, + title: "Spoofed issue", + originKind: "plugin:other.plugin:feature", + }), + ).rejects.toThrow("Plugin may only use originKind values under plugin:paperclip.missions"); + + await expect( + services.issues.update({ + issueId: featureIssue.id, + companyId, + patch: { originKind: "plugin:other.plugin:feature" }, + }), + ).rejects.toThrow("Plugin may only use originKind values under plugin:paperclip.missions"); + }); + + it("asserts checkout ownership for run-scoped plugin actions", async () => { + const { companyId, agentId } = await seedCompanyAndAgent(); + const issueId = randomUUID(); + const runId = randomUUID(); + await db.insert(heartbeatRuns).values({ + id: runId, + companyId, + agentId, + status: "running", + invocationSource: "assignment", + contextSnapshot: { issueId }, + }); + await db.insert(issues).values({ + id: issueId, + companyId, + title: "Checked out issue", + status: "in_progress", + priority: "medium", + assigneeAgentId: agentId, + checkoutRunId: runId, + executionRunId: runId, + }); + + const services = buildHostServices(db, "plugin-record-id", "paperclip.missions", createEventBusStub()); + await expect( + services.issues.assertCheckoutOwner({ + issueId, + companyId, + actorAgentId: agentId, + actorRunId: runId, + }), + ).resolves.toMatchObject({ + issueId, + status: "in_progress", + assigneeAgentId: agentId, + checkoutRunId: runId, + }); + }); + + it("refuses plugin wakeups for issues with unresolved blockers", async () => { + const { companyId, agentId } = await seedCompanyAndAgent(); + const blockerIssueId = randomUUID(); + const blockedIssueId = randomUUID(); + await db.insert(issues).values([ + { + id: blockerIssueId, + companyId, + title: "Unresolved blocker", + status: "todo", + priority: "medium", + }, + { + id: blockedIssueId, + companyId, + title: "Blocked issue", + status: "todo", + priority: "medium", + assigneeAgentId: agentId, + }, + ]); + await db.insert(issueRelations).values({ + companyId, + issueId: blockerIssueId, + relatedIssueId: blockedIssueId, + type: "blocks", + }); + + const services = buildHostServices(db, "plugin-record-id", "paperclip.missions", createEventBusStub()); + await expect( + services.issues.requestWakeup({ + issueId: blockedIssueId, + companyId, + reason: "mission_advance", + }), + ).rejects.toThrow("Issue is blocked by unresolved blockers"); + }); + + it("narrows orchestration cost summaries by subtree and billing code", async () => { + const { companyId, agentId } = await seedCompanyAndAgent(); + const rootIssueId = randomUUID(); + const childIssueId = randomUUID(); + const unrelatedIssueId = randomUUID(); + await db.insert(issues).values([ + { + id: rootIssueId, + companyId, + title: "Root mission", + status: "todo", + priority: "medium", + billingCode: "mission:alpha", + }, + { + id: childIssueId, + companyId, + parentId: rootIssueId, + title: "Child mission", + status: "todo", + priority: "medium", + billingCode: "mission:alpha", + }, + { + id: unrelatedIssueId, + companyId, + title: "Different mission", + status: "todo", + priority: "medium", + billingCode: "mission:alpha", + }, + ]); + await db.insert(costEvents).values([ + { + companyId, + agentId, + issueId: rootIssueId, + billingCode: "mission:alpha", + provider: "test", + model: "unit", + inputTokens: 10, + cachedInputTokens: 1, + outputTokens: 2, + costCents: 100, + occurredAt: new Date(), + }, + { + companyId, + agentId, + issueId: childIssueId, + billingCode: "mission:alpha", + provider: "test", + model: "unit", + inputTokens: 20, + cachedInputTokens: 2, + outputTokens: 4, + costCents: 200, + occurredAt: new Date(), + }, + { + companyId, + agentId, + issueId: childIssueId, + billingCode: "mission:beta", + provider: "test", + model: "unit", + inputTokens: 30, + cachedInputTokens: 3, + outputTokens: 6, + costCents: 300, + occurredAt: new Date(), + }, + { + companyId, + agentId, + issueId: unrelatedIssueId, + billingCode: "mission:alpha", + provider: "test", + model: "unit", + inputTokens: 40, + cachedInputTokens: 4, + outputTokens: 8, + costCents: 400, + occurredAt: new Date(), + }, + ]); + + const services = buildHostServices(db, "plugin-record-id", "paperclip.missions", createEventBusStub()); + const summary = await services.issues.getOrchestrationSummary({ + companyId, + issueId: rootIssueId, + includeSubtree: true, + }); + + expect(new Set(summary.subtreeIssueIds)).toEqual(new Set([rootIssueId, childIssueId])); + expect(summary.costs).toMatchObject({ + billingCode: "mission:alpha", + costCents: 300, + inputTokens: 30, + cachedInputTokens: 3, + outputTokens: 6, + }); + }); +}); diff --git a/server/src/__tests__/plugin-routes-authz.test.ts b/server/src/__tests__/plugin-routes-authz.test.ts index 4c965b0d8b..95a9281cae 100644 --- a/server/src/__tests__/plugin-routes-authz.test.ts +++ b/server/src/__tests__/plugin-routes-authz.test.ts @@ -32,7 +32,11 @@ vi.mock("../services/live-events.js", () => ({ publishGlobalLiveEvent: vi.fn(), })); -async function createApp(actor: Record, loaderOverrides: Record = {}) { +async function createApp( + actor: Record, + loaderOverrides: Record = {}, + bridgeDeps?: Record, +) { const [{ pluginRoutes }, { errorHandler }] = await Promise.all([ import("../routes/plugins.js"), import("../middleware/index.js"), @@ -49,7 +53,7 @@ async function createApp(actor: Record, loaderOverrides: Record req.actor = actor as typeof req.actor; next(); }); - app.use("/api", pluginRoutes({} as never, loader as never)); + app.use("/api", pluginRoutes({} as never, loader as never, undefined, undefined, undefined, bridgeDeps as never)); app.use(errorHandler); return { app, loader }; @@ -195,3 +199,69 @@ describe("plugin install and upgrade authz", () => { expect(mockLifecycle.upgrade).toHaveBeenCalledWith(pluginId, "1.1.0"); }, 20_000); }); + +describe("scoped plugin API routes", () => { + beforeEach(() => { + vi.resetAllMocks(); + }); + + it("dispatches manifest-declared scoped routes after company access checks", async () => { + const pluginId = "11111111-1111-4111-8111-111111111111"; + const workerManager = { + call: vi.fn().mockResolvedValue({ + status: 202, + body: { ok: true }, + }), + }; + mockRegistry.getById.mockResolvedValue(null); + mockRegistry.getByKey.mockResolvedValue({ + id: pluginId, + pluginKey: "paperclip.example", + version: "1.0.0", + status: "ready", + manifestJson: { + id: "paperclip.example", + capabilities: ["api.routes.register"], + apiRoutes: [ + { + routeKey: "smoke", + method: "GET", + path: "/smoke", + auth: "board-or-agent", + capability: "api.routes.register", + companyResolution: { from: "query", key: "companyId" }, + }, + ], + }, + }); + + const { app } = await createApp( + { + type: "board", + userId: "admin-1", + source: "session", + isInstanceAdmin: false, + companyIds: ["company-1"], + }, + {}, + { workerManager }, + ); + + const res = await request(app) + .get("/api/plugins/paperclip.example/api/smoke") + .query({ companyId: "company-1" }); + + expect(res.status).toBe(202); + expect(res.body).toEqual({ ok: true }); + expect(workerManager.call).toHaveBeenCalledWith( + pluginId, + "handleApiRequest", + expect.objectContaining({ + routeKey: "smoke", + method: "GET", + companyId: "company-1", + query: { companyId: "company-1" }, + }), + ); + }, 20_000); +}); diff --git a/server/src/__tests__/plugin-scoped-api-routes.test.ts b/server/src/__tests__/plugin-scoped-api-routes.test.ts new file mode 100644 index 0000000000..6968e82246 --- /dev/null +++ b/server/src/__tests__/plugin-scoped-api-routes.test.ts @@ -0,0 +1,427 @@ +import express from "express"; +import request from "supertest"; +import { beforeEach, describe, expect, it, vi } from "vitest"; +import { pluginManifestV1Schema, type PaperclipPluginManifestV1 } from "@paperclipai/shared"; + +const mockRegistry = vi.hoisted(() => ({ + getById: vi.fn(), + getByKey: vi.fn(), +})); + +const mockLifecycle = vi.hoisted(() => ({ + load: vi.fn(), + upgrade: vi.fn(), +})); + +const mockIssueService = vi.hoisted(() => ({ + getById: vi.fn(), + assertCheckoutOwner: vi.fn(), +})); + +vi.mock("../services/plugin-registry.js", () => ({ + pluginRegistryService: () => mockRegistry, +})); + +vi.mock("../services/plugin-lifecycle.js", () => ({ + pluginLifecycleManager: () => mockLifecycle, +})); + +vi.mock("../services/issues.js", () => ({ + issueService: () => mockIssueService, +})); + +vi.mock("../services/activity-log.js", () => ({ + logActivity: vi.fn(), +})); + +vi.mock("../services/live-events.js", () => ({ + publishGlobalLiveEvent: vi.fn(), +})); + +function manifest(apiRoutes: NonNullable): PaperclipPluginManifestV1 { + return { + id: "paperclip.scoped-api-test", + apiVersion: 1, + version: "1.0.0", + displayName: "Scoped API Test", + description: "Test plugin for scoped API routes", + author: "Paperclip", + categories: ["automation"], + capabilities: ["api.routes.register"], + entrypoints: { worker: "dist/worker.js" }, + apiRoutes, + }; +} + +async function createApp(input: { + actor: Record; + plugin?: Record | null; + workerRunning?: boolean; + workerResult?: unknown; +}) { + const [{ pluginRoutes }, { errorHandler }] = await Promise.all([ + import("../routes/plugins.js"), + import("../middleware/index.js"), + ]); + + const workerManager = { + isRunning: vi.fn().mockReturnValue(input.workerRunning ?? true), + call: vi.fn().mockResolvedValue(input.workerResult ?? { status: 200, body: { ok: true } }), + }; + + mockRegistry.getById.mockResolvedValue(input.plugin ?? null); + mockRegistry.getByKey.mockResolvedValue(input.plugin ?? null); + + const app = express(); + app.use(express.json()); + app.use((req, _res, next) => { + req.actor = input.actor as typeof req.actor; + next(); + }); + app.use( + "/api", + pluginRoutes( + {} as never, + { installPlugin: vi.fn() } as never, + undefined, + undefined, + undefined, + { workerManager } as never, + ), + ); + app.use(errorHandler); + + return { app, workerManager }; +} + +describe("plugin scoped API routes", () => { + const pluginId = "11111111-1111-4111-8111-111111111111"; + const companyId = "22222222-2222-4222-8222-222222222222"; + const agentId = "33333333-3333-4333-8333-333333333333"; + const runId = "44444444-4444-4444-8444-444444444444"; + const issueId = "55555555-5555-4555-8555-555555555555"; + + beforeEach(() => { + vi.resetAllMocks(); + mockIssueService.getById.mockResolvedValue(null); + mockIssueService.assertCheckoutOwner.mockResolvedValue({ + id: issueId, + status: "in_progress", + assigneeAgentId: agentId, + checkoutRunId: runId, + adoptedFromRunId: null, + }); + }); + + it("dispatches a board GET route with params, query, actor, and company context", async () => { + const apiRoutes = manifest([ + { + routeKey: "summary.get", + method: "GET", + path: "/companies/:companySlug/summary", + auth: "board", + capability: "api.routes.register", + companyResolution: { from: "query", key: "companyId" }, + }, + ]); + const { app, workerManager } = await createApp({ + actor: { + type: "board", + userId: "user-1", + source: "local_implicit", + isInstanceAdmin: true, + }, + plugin: { + id: pluginId, + pluginKey: apiRoutes.id, + status: "ready", + manifestJson: apiRoutes, + }, + workerResult: { status: 201, body: { handled: true } }, + }); + + const res = await request(app) + .get(`/api/plugins/${pluginId}/api/companies/acme/summary?companyId=${companyId}&view=compact`) + .set("Authorization", "Bearer should-not-forward"); + + expect(res.status).toBe(201); + expect(res.body).toEqual({ handled: true }); + expect(workerManager.call).toHaveBeenCalledWith(pluginId, "handleApiRequest", expect.objectContaining({ + routeKey: "summary.get", + method: "GET", + params: { companySlug: "acme" }, + query: { companyId, view: "compact" }, + companyId, + actor: expect.objectContaining({ actorType: "user", actorId: "user-1" }), + })); + expect(workerManager.call.mock.calls[0]?.[2].headers.authorization).toBeUndefined(); + }); + + it("only forwards allowlisted response headers from plugin routes", async () => { + const apiRoutes = manifest([ + { + routeKey: "summary.get", + method: "GET", + path: "/companies/:companySlug/summary", + auth: "board", + capability: "api.routes.register", + companyResolution: { from: "query", key: "companyId" }, + }, + ]); + const { app } = await createApp({ + actor: { + type: "board", + userId: "user-1", + source: "local_implicit", + isInstanceAdmin: true, + }, + plugin: { + id: pluginId, + pluginKey: apiRoutes.id, + status: "ready", + manifestJson: apiRoutes, + }, + workerResult: { + status: 200, + body: { handled: true }, + headers: { + "cache-control": "no-store", + "content-security-policy": "default-src 'none'", + location: "https://example.invalid", + "x-request-id": "plugin-request", + }, + }, + }); + + const res = await request(app) + .get(`/api/plugins/${pluginId}/api/companies/acme/summary?companyId=${companyId}`); + + expect(res.status).toBe(200); + expect(res.headers["cache-control"]).toBe("no-store"); + expect(res.headers["x-request-id"]).toBe("plugin-request"); + expect(res.headers["content-security-policy"]).toBeUndefined(); + expect(res.headers.location).toBeUndefined(); + }); + + it("enforces agent checkout ownership before dispatching issue-scoped POST routes", async () => { + const apiRoutes = manifest([ + { + routeKey: "issue.advance", + method: "POST", + path: "/issues/:issueId/advance", + auth: "agent", + capability: "api.routes.register", + checkoutPolicy: "required-for-agent-in-progress", + companyResolution: { from: "issue", param: "issueId" }, + }, + ]); + mockIssueService.getById.mockResolvedValue({ + id: issueId, + companyId, + status: "in_progress", + assigneeAgentId: agentId, + }); + const { app, workerManager } = await createApp({ + actor: { + type: "agent", + agentId, + companyId, + runId, + source: "agent_key", + }, + plugin: { + id: pluginId, + pluginKey: apiRoutes.id, + status: "ready", + manifestJson: apiRoutes, + }, + }); + + const res = await request(app) + .post(`/api/plugins/${pluginId}/api/issues/${issueId}/advance`) + .send({ step: "next" }); + + expect(res.status).toBe(200); + expect(mockIssueService.assertCheckoutOwner).toHaveBeenCalledWith(issueId, agentId, runId); + expect(workerManager.call).toHaveBeenCalledWith(pluginId, "handleApiRequest", expect.objectContaining({ + routeKey: "issue.advance", + params: { issueId }, + body: { step: "next" }, + actor: expect.objectContaining({ actorType: "agent", agentId, runId }), + companyId, + })); + }); + + it("rejects checkout-protected agent routes without a run id before worker dispatch", async () => { + const apiRoutes = manifest([ + { + routeKey: "issue.advance", + method: "POST", + path: "/issues/:issueId/advance", + auth: "agent", + capability: "api.routes.register", + checkoutPolicy: "required-for-agent-in-progress", + companyResolution: { from: "issue", param: "issueId" }, + }, + ]); + mockIssueService.getById.mockResolvedValue({ + id: issueId, + companyId, + status: "in_progress", + assigneeAgentId: agentId, + }); + const { app, workerManager } = await createApp({ + actor: { + type: "agent", + agentId, + companyId, + source: "agent_key", + }, + plugin: { + id: pluginId, + pluginKey: apiRoutes.id, + status: "ready", + manifestJson: apiRoutes, + }, + }); + + const res = await request(app) + .post(`/api/plugins/${pluginId}/api/issues/${issueId}/advance`) + .send({}); + + expect(res.status).toBe(401); + expect(workerManager.call).not.toHaveBeenCalled(); + }); + + it("rejects checkout-protected agent routes when the active checkout belongs to another run", async () => { + const apiRoutes = manifest([ + { + routeKey: "issue.advance", + method: "POST", + path: "/issues/:issueId/advance", + auth: "agent", + capability: "api.routes.register", + checkoutPolicy: "always-for-agent", + companyResolution: { from: "issue", param: "issueId" }, + }, + ]); + mockIssueService.getById.mockResolvedValue({ + id: issueId, + companyId, + status: "in_progress", + assigneeAgentId: agentId, + }); + const conflict = new Error("Issue run ownership conflict") as Error & { status?: number }; + conflict.status = 409; + mockIssueService.assertCheckoutOwner.mockRejectedValue(conflict); + const { app, workerManager } = await createApp({ + actor: { + type: "agent", + agentId, + companyId, + runId, + source: "agent_key", + }, + plugin: { + id: pluginId, + pluginKey: apiRoutes.id, + status: "ready", + manifestJson: apiRoutes, + }, + }); + + const res = await request(app) + .post(`/api/plugins/${pluginId}/api/issues/${issueId}/advance`) + .send({}); + + expect(res.status).toBe(409); + expect(workerManager.call).not.toHaveBeenCalled(); + }); + + it("returns a clear error for disabled plugins without worker dispatch", async () => { + const apiRoutes = manifest([ + { + routeKey: "summary.get", + method: "GET", + path: "/summary", + auth: "board", + capability: "api.routes.register", + companyResolution: { from: "query", key: "companyId" }, + }, + ]); + const { app, workerManager } = await createApp({ + actor: { + type: "board", + userId: "user-1", + source: "local_implicit", + isInstanceAdmin: true, + }, + plugin: { + id: pluginId, + pluginKey: apiRoutes.id, + status: "disabled", + manifestJson: apiRoutes, + }, + }); + + const res = await request(app) + .get(`/api/plugins/${pluginId}/api/summary?companyId=${companyId}`); + + expect(res.status).toBe(503); + expect(res.body.error).toContain("disabled"); + expect(workerManager.call).not.toHaveBeenCalled(); + }); + + it("returns a clear error when a ready plugin has no running worker", async () => { + const apiRoutes = manifest([ + { + routeKey: "summary.get", + method: "GET", + path: "/summary", + auth: "board", + capability: "api.routes.register", + companyResolution: { from: "query", key: "companyId" }, + }, + ]); + const { app, workerManager } = await createApp({ + actor: { + type: "board", + userId: "user-1", + source: "local_implicit", + isInstanceAdmin: true, + }, + plugin: { + id: pluginId, + pluginKey: apiRoutes.id, + status: "ready", + manifestJson: apiRoutes, + }, + workerRunning: false, + }); + + const res = await request(app) + .get(`/api/plugins/${pluginId}/api/summary?companyId=${companyId}`); + + expect(res.status).toBe(503); + expect(res.body.error).toContain("worker is not running"); + expect(workerManager.call).not.toHaveBeenCalled(); + }); + + it("rejects manifest routes that try to claim core API paths", () => { + const result = pluginManifestV1Schema.safeParse(manifest([ + { + routeKey: "bad.shadow", + method: "POST", + path: "/api/issues/:issueId", + auth: "board", + capability: "api.routes.register", + }, + ])); + + expect(result.success).toBe(false); + if (result.success) throw new Error("Expected manifest validation to fail"); + expect(result.error.issues.map((issue) => issue.message).join("\n")).toContain( + "path must stay inside the plugin api namespace", + ); + }); +}); diff --git a/server/src/__tests__/plugin-sdk-orchestration-contract.test.ts b/server/src/__tests__/plugin-sdk-orchestration-contract.test.ts new file mode 100644 index 0000000000..d68cb60a53 --- /dev/null +++ b/server/src/__tests__/plugin-sdk-orchestration-contract.test.ts @@ -0,0 +1,240 @@ +import { randomUUID } from "node:crypto"; +import { describe, expect, it } from "vitest"; +import type { Issue, PaperclipPluginManifestV1 } from "@paperclipai/shared"; +import { createTestHarness } from "../../../packages/plugins/sdk/src/testing.js"; + +function manifest(capabilities: PaperclipPluginManifestV1["capabilities"]): PaperclipPluginManifestV1 { + return { + id: "paperclip.test-orchestration", + apiVersion: 1, + version: "0.1.0", + displayName: "Test Orchestration", + description: "Test plugin", + author: "Paperclip", + categories: ["automation"], + capabilities, + entrypoints: { worker: "./dist/worker.js" }, + }; +} + +function issue(input: Partial & Pick): Issue { + const now = new Date(); + return { + id: input.id, + companyId: input.companyId, + projectId: null, + projectWorkspaceId: null, + goalId: null, + parentId: null, + title: input.title, + description: null, + status: "todo", + priority: "medium", + assigneeAgentId: null, + assigneeUserId: null, + checkoutRunId: null, + executionRunId: null, + executionAgentNameKey: null, + executionLockedAt: null, + createdByAgentId: null, + createdByUserId: null, + issueNumber: null, + identifier: null, + requestDepth: 0, + billingCode: null, + assigneeAdapterOverrides: null, + executionWorkspaceId: null, + executionWorkspacePreference: null, + executionWorkspaceSettings: null, + startedAt: null, + completedAt: null, + cancelledAt: null, + hiddenAt: null, + createdAt: now, + updatedAt: now, + ...input, + }; +} + +describe("plugin SDK orchestration contract", () => { + it("supports expanded issue create fields and relation helpers", async () => { + const companyId = randomUUID(); + const blockerIssueId = randomUUID(); + const harness = createTestHarness({ + manifest: manifest(["issues.create", "issue.relations.read", "issue.relations.write", "issue.subtree.read"]), + }); + harness.seed({ + issues: [issue({ id: blockerIssueId, companyId, title: "Blocker" })], + }); + + const created = await harness.ctx.issues.create({ + companyId, + title: "Generated issue", + status: "todo", + assigneeUserId: "board-user", + billingCode: "mission:alpha", + originId: "mission-alpha", + blockedByIssueIds: [blockerIssueId], + }); + + expect(created.originKind).toBe("plugin:paperclip.test-orchestration"); + expect(created.originId).toBe("mission-alpha"); + expect(created.billingCode).toBe("mission:alpha"); + expect(created.assigneeUserId).toBe("board-user"); + + await expect(harness.ctx.issues.relations.get(created.id, companyId)).resolves.toEqual({ + blockedBy: [ + expect.objectContaining({ + id: blockerIssueId, + title: "Blocker", + }), + ], + blocks: [], + }); + + await expect(harness.ctx.issues.relations.removeBlockers(created.id, [blockerIssueId], companyId)).resolves.toEqual({ + blockedBy: [], + blocks: [], + }); + + await expect(harness.ctx.issues.relations.addBlockers(created.id, [blockerIssueId], companyId)).resolves.toEqual({ + blockedBy: [expect.objectContaining({ id: blockerIssueId })], + blocks: [], + }); + + await expect( + harness.ctx.issues.getSubtree(created.id, companyId, { includeRelations: true }), + ).resolves.toMatchObject({ + rootIssueId: created.id, + issueIds: [created.id], + relations: { + [created.id]: { + blockedBy: [expect.objectContaining({ id: blockerIssueId })], + }, + }, + }); + }); + + it("enforces plugin origin namespaces in the test harness", async () => { + const companyId = randomUUID(); + const harness = createTestHarness({ + manifest: manifest(["issues.create", "issues.update", "issues.read"]), + }); + + const created = await harness.ctx.issues.create({ + companyId, + title: "Generated issue", + originKind: "plugin:paperclip.test-orchestration:feature", + }); + + expect(created.originKind).toBe("plugin:paperclip.test-orchestration:feature"); + await expect( + harness.ctx.issues.list({ + companyId, + originKind: "plugin:paperclip.test-orchestration:feature", + }), + ).resolves.toHaveLength(1); + await expect( + harness.ctx.issues.create({ + companyId, + title: "Spoofed issue", + originKind: "plugin:other.plugin:feature", + }), + ).rejects.toThrow("Plugin may only use originKind values under plugin:paperclip.test-orchestration"); + await expect( + harness.ctx.issues.update( + created.id, + { originKind: "plugin:other.plugin:feature" }, + companyId, + ), + ).rejects.toThrow("Plugin may only use originKind values under plugin:paperclip.test-orchestration"); + }); + + it("enforces checkout and wakeup capabilities in the test harness", async () => { + const companyId = randomUUID(); + const agentId = randomUUID(); + const runId = randomUUID(); + const checkedOutIssueId = randomUUID(); + const harness = createTestHarness({ + manifest: manifest(["issues.checkout", "issues.wakeup", "issues.read"]), + }); + harness.seed({ + issues: [ + issue({ + id: checkedOutIssueId, + companyId, + title: "Checked out", + status: "in_progress", + assigneeAgentId: agentId, + checkoutRunId: runId, + }), + ], + }); + + await expect( + harness.ctx.issues.assertCheckoutOwner({ + issueId: checkedOutIssueId, + companyId, + actorAgentId: agentId, + actorRunId: runId, + }), + ).resolves.toMatchObject({ + issueId: checkedOutIssueId, + checkoutRunId: runId, + }); + + await expect( + harness.ctx.issues.requestWakeup(checkedOutIssueId, companyId, { + reason: "mission_advance", + }), + ).resolves.toMatchObject({ queued: true }); + + await expect( + harness.ctx.issues.requestWakeups([checkedOutIssueId], companyId, { + reason: "mission_advance", + idempotencyKeyPrefix: "mission:alpha", + }), + ).resolves.toEqual([ + expect.objectContaining({ + issueId: checkedOutIssueId, + queued: true, + }), + ]); + }); + + it("rejects wakeups when blockers are unresolved", async () => { + const companyId = randomUUID(); + const blockerIssueId = randomUUID(); + const blockedIssueId = randomUUID(); + const harness = createTestHarness({ + manifest: manifest(["issues.wakeup", "issues.read"]), + }); + harness.seed({ + issues: [ + issue({ id: blockerIssueId, companyId, title: "Unresolved blocker", status: "todo" }), + issue({ + id: blockedIssueId, + companyId, + title: "Blocked work", + status: "todo", + assigneeAgentId: randomUUID(), + blockedBy: [ + { + id: blockerIssueId, + identifier: null, + title: "Unresolved blocker", + status: "todo", + priority: "medium", + assigneeAgentId: null, + assigneeUserId: null, + }, + ], + }), + ], + }); + + await expect( + harness.ctx.issues.requestWakeup(blockedIssueId, companyId), + ).rejects.toThrow("Issue is blocked by unresolved blockers"); + }); +}); diff --git a/server/src/app.ts b/server/src/app.ts index b1389b74d4..b1a31401bc 100644 --- a/server/src/app.ts +++ b/server/src/app.ts @@ -127,6 +127,7 @@ export async function createApp( instanceId?: string; hostVersion?: string; localPluginDir?: string; + pluginMigrationDb?: Db; betterAuthHandler?: express.RequestHandler; resolveSession?: (req: ExpressRequest) => Promise; }, @@ -231,7 +232,10 @@ export async function createApp( let viteHtmlRenderer: ReturnType | null = null; const loader = pluginLoader( db, - { localPluginDir: opts.localPluginDir ?? DEFAULT_LOCAL_PLUGIN_DIR }, + { + localPluginDir: opts.localPluginDir ?? DEFAULT_LOCAL_PLUGIN_DIR, + migrationDb: opts.pluginMigrationDb, + }, { workerManager, eventBus, diff --git a/server/src/config.ts b/server/src/config.ts index 21271c980b..77b8a3f0cf 100644 --- a/server/src/config.ts +++ b/server/src/config.ts @@ -62,6 +62,7 @@ export interface Config { authDisableSignUp: boolean; databaseMode: DatabaseMode; databaseUrl: string | undefined; + databaseMigrationUrl: string | undefined; embeddedPostgresDataDir: string; embeddedPostgresPort: number; databaseBackupEnabled: boolean; @@ -297,6 +298,7 @@ export function loadConfig(): Config { authDisableSignUp, databaseMode: fileDatabaseMode, databaseUrl: process.env.DATABASE_URL ?? fileDbUrl, + databaseMigrationUrl: process.env.DATABASE_MIGRATION_URL, embeddedPostgresDataDir: resolveHomeAwarePath( fileConfig?.database.embeddedPostgresDataDir ?? resolveDefaultEmbeddedPostgresDir(), ), diff --git a/server/src/index.ts b/server/src/index.ts index 870dd013ce..a788364a7c 100644 --- a/server/src/index.ts +++ b/server/src/index.ts @@ -258,6 +258,7 @@ export async function startServer(): Promise { } let db; + let pluginMigrationDb; let embeddedPostgres: EmbeddedPostgresInstance | null = null; let embeddedPostgresStartedByThisProcess = false; let migrationSummary: MigrationSummary = "skipped"; @@ -267,9 +268,11 @@ export async function startServer(): Promise { | { mode: "external-postgres"; connectionString: string } | { mode: "embedded-postgres"; dataDir: string; port: number }; if (config.databaseUrl) { - migrationSummary = await ensureMigrations(config.databaseUrl, "PostgreSQL"); + const migrationUrl = config.databaseMigrationUrl ?? config.databaseUrl; + migrationSummary = await ensureMigrations(migrationUrl, "PostgreSQL"); db = createDb(config.databaseUrl); + pluginMigrationDb = config.databaseMigrationUrl ? createDb(config.databaseMigrationUrl) : db; logger.info("Using external PostgreSQL via DATABASE_URL/config"); activeDatabaseConnectionString = config.databaseUrl; startupDbInfo = { mode: "external-postgres", connectionString: config.databaseUrl }; @@ -431,6 +434,7 @@ export async function startServer(): Promise { }); db = createDb(embeddedConnectionString); + pluginMigrationDb = db; logger.info("Embedded PostgreSQL ready"); activeDatabaseConnectionString = embeddedConnectionString; resolvedEmbeddedPostgresPort = port; @@ -606,6 +610,7 @@ export async function startServer(): Promise { bindHost: config.host, authReady, companyDeletionEnabled: config.companyDeletionEnabled, + pluginMigrationDb: pluginMigrationDb as any, betterAuthHandler, resolveSession, }); diff --git a/server/src/routes/activity.ts b/server/src/routes/activity.ts index 53083b1b60..b5fd3cb9bf 100644 --- a/server/src/routes/activity.ts +++ b/server/src/routes/activity.ts @@ -8,7 +8,7 @@ import { heartbeatService, issueService } from "../services/index.js"; import { sanitizeRecord } from "../redaction.js"; const createActivitySchema = z.object({ - actorType: z.enum(["agent", "user", "system"]).optional().default("system"), + actorType: z.enum(["agent", "user", "system", "plugin"]).optional().default("system"), actorId: z.string().min(1), action: z.string().min(1), entityType: z.string().min(1), diff --git a/server/src/routes/plugins.ts b/server/src/routes/plugins.ts index d75912d07c..19a6739161 100644 --- a/server/src/routes/plugins.ts +++ b/server/src/routes/plugins.ts @@ -23,11 +23,12 @@ import path from "node:path"; import { randomUUID } from "node:crypto"; import { fileURLToPath } from "node:url"; import { Router } from "express"; -import type { Request } from "express"; +import type { Request, Response } from "express"; import { and, desc, eq, gte } from "drizzle-orm"; import type { Db } from "@paperclipai/db"; import { companies, pluginLogs, pluginWebhookDeliveries } from "@paperclipai/db"; import type { + PluginApiRouteDeclaration, PluginStatus, PaperclipPluginManifestV1, PluginBridgeErrorCode, @@ -41,6 +42,7 @@ import { pluginLifecycleManager } from "../services/plugin-lifecycle.js"; import { getPluginUiContributionMetadata, pluginLoader } from "../services/plugin-loader.js"; import { logActivity } from "../services/activity-log.js"; import { publishGlobalLiveEvent } from "../services/live-events.js"; +import { issueService } from "../services/issues.js"; import type { PluginJobScheduler } from "../services/plugin-job-scheduler.js"; import type { PluginJobStore } from "../services/plugin-job-store.js"; import type { PluginWorkerManager } from "../services/plugin-worker-manager.js"; @@ -48,8 +50,16 @@ import type { PluginStreamBus } from "../services/plugin-stream-bus.js"; import type { PluginToolDispatcher } from "../services/plugin-tool-dispatcher.js"; import type { ToolRunContext } from "@paperclipai/plugin-sdk"; import { JsonRpcCallError, PLUGIN_RPC_ERROR_CODES } from "@paperclipai/plugin-sdk"; -import { assertBoardOrgAccess, assertCompanyAccess, assertInstanceAdmin, getActorInfo } from "./authz.js"; +import { + assertAuthenticated, + assertBoard, + assertBoardOrgAccess, + assertCompanyAccess, + assertInstanceAdmin, + getActorInfo, +} from "./authz.js"; import { validateInstanceConfig } from "../services/plugin-config-validator.js"; +import { forbidden, notFound, unauthorized, unprocessable } from "../errors.js"; /** UI slot declaration extracted from plugin manifest */ type PluginUiSlotDeclaration = NonNullable["slots"]>[number]; @@ -112,6 +122,14 @@ interface PluginHealthCheckResult { const UUID_REGEX = /^[0-9a-f]{8}-[0-9a-f]{4}-[1-5][0-9a-f]{3}-[89ab][0-9a-f]{3}-[0-9a-f]{12}$/i; +const PLUGIN_API_BODY_LIMIT_BYTES = 1_000_000; +const PLUGIN_SCOPED_API_RESPONSE_HEADER_ALLOWLIST = new Set([ + "cache-control", + "etag", + "last-modified", + "x-request-id", +]); + const __dirname = path.dirname(fileURLToPath(import.meta.url)); const REPO_ROOT = path.resolve(__dirname, "../../.."); @@ -140,6 +158,14 @@ const BUNDLED_PLUGIN_EXAMPLES: AvailablePluginExample[] = [ localPath: "packages/plugins/examples/plugin-kitchen-sink-example", tag: "example", }, + { + packageName: "@paperclipai/plugin-orchestration-smoke-example", + pluginKey: "paperclipai.plugin-orchestration-smoke-example", + displayName: "Orchestration Smoke (Example)", + description: "Acceptance fixture for scoped plugin routes, restricted database namespaces, issue orchestration, documents, wakeups, summaries, and UI status surfaces.", + localPath: "packages/plugins/examples/plugin-orchestration-smoke-example", + tag: "example", + }, ]; function listBundledPluginExamples(): AvailablePluginExample[] { @@ -246,6 +272,30 @@ export interface PluginRouteBridgeDeps { streamBus?: PluginStreamBus; } +interface PluginScopedApiRequest { + routeKey: string; + method: string; + path: string; + params: Record; + query: Record; + body: unknown; + actor: { + actorType: "user" | "agent"; + actorId: string; + agentId?: string | null; + userId?: string | null; + runId?: string | null; + }; + companyId: string; + headers: Record; +} + +interface PluginScopedApiResponse { + status?: number; + headers?: Record; + body?: unknown; +} + /** Request body for POST /api/plugins/tools/execute */ interface PluginToolExecuteRequest { /** Fully namespaced tool name (e.g., "acme.linear:search-issues"). */ @@ -314,6 +364,146 @@ export function pluginRoutes( loader, workerManager: bridgeDeps?.workerManager ?? webhookDeps?.workerManager, }); + const issuesSvc = issueService(db); + + function matchScopedApiRoute(route: PluginApiRouteDeclaration, method: string, requestPath: string) { + if (route.method !== method) return null; + const normalize = (value: string) => value.replace(/\/+$/, "") || "/"; + const routeSegments = normalize(route.path).split("/").filter(Boolean); + const requestSegments = normalize(requestPath).split("/").filter(Boolean); + if (routeSegments.length !== requestSegments.length) return null; + const params: Record = {}; + for (let i = 0; i < routeSegments.length; i += 1) { + const routeSegment = routeSegments[i]!; + const requestSegment = requestSegments[i]!; + if (routeSegment.startsWith(":")) { + params[routeSegment.slice(1)] = decodeURIComponent(requestSegment); + continue; + } + if (routeSegment !== requestSegment) return null; + } + return params; + } + + function sanitizePluginRequestHeaders(req: Request): Record { + const safeHeaderNames = new Set([ + "accept", + "content-type", + "user-agent", + "x-paperclip-run-id", + "x-request-id", + ]); + const headers: Record = {}; + for (const [name, value] of Object.entries(req.headers)) { + const lower = name.toLowerCase(); + if (!safeHeaderNames.has(lower)) continue; + if (Array.isArray(value)) { + headers[lower] = value.join(", "); + } else if (typeof value === "string") { + headers[lower] = value; + } + } + return headers; + } + + function applyPluginScopedApiResponseHeaders( + res: Response, + headers: Record | undefined, + ): void { + for (const [name, value] of Object.entries(headers ?? {})) { + const lower = name.toLowerCase(); + if (!PLUGIN_SCOPED_API_RESPONSE_HEADER_ALLOWLIST.has(lower)) continue; + res.setHeader(lower, value); + } + } + + function normalizeQuery(query: Request["query"]): Record { + const normalized: Record = {}; + for (const [key, value] of Object.entries(query)) { + if (typeof value === "string") { + normalized[key] = value; + } else if (Array.isArray(value)) { + normalized[key] = value.map((entry) => String(entry)); + } + } + return normalized; + } + + async function resolveScopedApiCompanyId( + route: PluginApiRouteDeclaration, + params: Record, + req: Request, + ) { + const resolution = route.companyResolution; + if (!resolution) { + if (req.actor.type === "agent" && req.actor.companyId) return req.actor.companyId; + return null; + } + + if (resolution.from === "body") { + const body = req.body as Record | undefined; + const companyId = body?.[resolution.key ?? ""]; + return typeof companyId === "string" ? companyId : null; + } + + if (resolution.from === "query") { + const value = req.query[resolution.key ?? ""]; + return typeof value === "string" ? value : null; + } + + const issueId = params[resolution.param ?? ""]; + if (!issueId) return null; + const issue = await issuesSvc.getById(issueId); + return issue?.companyId ?? null; + } + + function assertScopedApiAuth(req: Request, route: PluginApiRouteDeclaration) { + if (route.auth === "board") { + assertBoard(req); + return; + } + if (route.auth === "agent") { + assertAuthenticated(req); + if (req.actor.type !== "agent") throw forbidden("Agent access required"); + return; + } + if (route.auth === "webhook") { + throw unprocessable("Webhook-scoped plugin API routes require a signature verifier and are not enabled"); + } + assertAuthenticated(req); + if (req.actor.type !== "board" && req.actor.type !== "agent") { + throw forbidden("Board or agent access required"); + } + } + + async function enforceScopedApiCheckout( + req: Request, + route: PluginApiRouteDeclaration, + params: Record, + companyId: string, + ) { + const policy = route.checkoutPolicy ?? "none"; + if (policy === "none" || req.actor.type !== "agent") return; + const issueId = params.issueId; + if (!issueId) { + throw unprocessable("Checkout-protected plugin API routes require an issueId route parameter"); + } + const issue = await issuesSvc.getById(issueId); + if (!issue || issue.companyId !== companyId) { + throw notFound("Issue not found"); + } + if (policy === "required-for-agent-in-progress") { + if (issue.status !== "in_progress" || issue.assigneeAgentId !== req.actor.agentId) return; + } + const runId = req.actor.runId?.trim(); + if (!runId) { + throw unauthorized("Agent run id required"); + } + if (!req.actor.agentId) { + throw forbidden("Agent authentication required"); + } + await issuesSvc.assertCheckoutOwner(issueId, req.actor.agentId, runId); + } async function resolvePluginAuditCompanyIds(req: Request): Promise { if (typeof (db as { select?: unknown }).select === "function") { @@ -1189,6 +1379,113 @@ export function pluginRoutes( res.on("error", safeUnsubscribe); }); + router.use("/plugins/:pluginId/api", async (req, res) => { + if (!bridgeDeps) { + res.status(501).json({ error: "Plugin scoped API routes are not enabled" }); + return; + } + + const { pluginId } = req.params; + const plugin = await resolvePlugin(registry, pluginId); + if (!plugin) { + res.status(404).json({ error: "Plugin not found" }); + return; + } + if (plugin.status !== "ready") { + res.status(503).json({ error: `Plugin is not ready (current status: ${plugin.status})` }); + return; + } + const isWorkerRunning = typeof bridgeDeps.workerManager.isRunning === "function" + ? bridgeDeps.workerManager.isRunning(plugin.id) + : true; + if (!isWorkerRunning) { + res.status(503).json({ error: "Plugin worker is not running" }); + return; + } + if (!plugin.manifestJson.capabilities.includes("api.routes.register")) { + res.status(404).json({ error: "Plugin does not expose scoped API routes" }); + return; + } + + const requestPath = req.path || "/"; + const routes = plugin.manifestJson.apiRoutes ?? []; + const match = routes + .map((route) => ({ route, params: matchScopedApiRoute(route, req.method, requestPath) })) + .find((candidate) => candidate.params !== null); + if (!match || !match.params) { + res.status(404).json({ error: "Plugin API route not found" }); + return; + } + + try { + assertScopedApiAuth(req, match.route); + const companyId = await resolveScopedApiCompanyId(match.route, match.params, req); + if (!companyId) { + res.status(400).json({ error: "Unable to resolve company for plugin API route" }); + return; + } + assertCompanyAccess(req, companyId); + await enforceScopedApiCheckout(req, match.route, match.params, companyId); + if (req.method !== "GET" && req.headers["content-type"] && !req.is("application/json")) { + res.status(415).json({ error: "Plugin API routes accept JSON requests only" }); + return; + } + const requestBody = req.body ?? null; + const bodySize = Buffer.byteLength(JSON.stringify(requestBody)); + if (bodySize > PLUGIN_API_BODY_LIMIT_BYTES) { + res.status(413).json({ error: "Plugin API request body is too large" }); + return; + } + + const actor = getActorInfo(req); + const input: PluginScopedApiRequest = { + routeKey: match.route.routeKey, + method: req.method, + path: requestPath, + params: match.params, + query: normalizeQuery(req.query), + body: requestBody, + actor: { + actorType: actor.actorType, + actorId: actor.actorId, + agentId: actor.agentId, + userId: actor.actorType === "user" ? actor.actorId : null, + runId: actor.runId, + }, + companyId, + headers: sanitizePluginRequestHeaders(req), + }; + + const result = await bridgeDeps.workerManager.call( + plugin.id, + "handleApiRequest", + input, + ) as PluginScopedApiResponse; + const status = Number.isInteger(result.status) && Number(result.status) >= 200 && Number(result.status) <= 599 + ? Number(result.status) + : 200; + applyPluginScopedApiResponseHeaders(res, result.headers); + if (status === 204) { + res.status(status).end(); + } else { + res.status(status).json(result.body ?? null); + } + } catch (err) { + const status = typeof (err as { status?: unknown }).status === "number" + ? (err as { status: number }).status + : err instanceof JsonRpcCallError && err.code === PLUGIN_RPC_ERROR_CODES.CAPABILITY_DENIED + ? 403 + : err instanceof JsonRpcCallError && err.code === PLUGIN_RPC_ERROR_CODES.METHOD_NOT_IMPLEMENTED + ? 501 + : err instanceof JsonRpcCallError + ? 502 + : 500; + res.status(status).json({ + error: err instanceof Error ? err.message : String(err), + }); + } + }); + /** * GET /api/plugins/:pluginId * diff --git a/server/src/services/activity-log.ts b/server/src/services/activity-log.ts index cc608a7454..473a1d787b 100644 --- a/server/src/services/activity-log.ts +++ b/server/src/services/activity-log.ts @@ -11,6 +11,20 @@ import type { PluginEventBus } from "./plugin-event-bus.js"; import { instanceSettingsService } from "./instance-settings.js"; const PLUGIN_EVENT_SET: ReadonlySet = new Set(PLUGIN_EVENT_TYPES); +const ACTIVITY_ACTION_TO_PLUGIN_EVENT: Readonly> = { + issue_comment_added: "issue.comment.created", + issue_comment_created: "issue.comment.created", + issue_document_created: "issue.document.created", + issue_document_updated: "issue.document.updated", + issue_document_deleted: "issue.document.deleted", + issue_blockers_updated: "issue.relations.updated", + approval_approved: "approval.decided", + approval_rejected: "approval.decided", + approval_revision_requested: "approval.decided", + budget_soft_threshold_crossed: "budget.incident.opened", + budget_hard_threshold_crossed: "budget.incident.opened", + budget_incident_resolved: "budget.incident.resolved", +}; let _pluginEventBus: PluginEventBus | null = null; @@ -22,9 +36,23 @@ export function setPluginEventBus(bus: PluginEventBus): void { _pluginEventBus = bus; } +function eventTypeForActivityAction(action: string): PluginEventType | null { + if (PLUGIN_EVENT_SET.has(action)) return action as PluginEventType; + return ACTIVITY_ACTION_TO_PLUGIN_EVENT[action.replaceAll(".", "_")] ?? null; +} + +export function publishPluginDomainEvent(event: PluginEvent): void { + if (!_pluginEventBus) return; + void _pluginEventBus.emit(event).then(({ errors }) => { + for (const { pluginId, error } of errors) { + logger.warn({ pluginId, eventType: event.eventType, err: error }, "plugin event handler failed"); + } + }).catch(() => {}); +} + export interface LogActivityInput { companyId: string; - actorType: "agent" | "user" | "system"; + actorType: "agent" | "user" | "system" | "plugin"; actorId: string; action: string; entityType: string; @@ -69,10 +97,11 @@ export async function logActivity(db: Db, input: LogActivityInput) { }, }); - if (_pluginEventBus && PLUGIN_EVENT_SET.has(input.action)) { + const pluginEventType = eventTypeForActivityAction(input.action); + if (pluginEventType) { const event: PluginEvent = { eventId: randomUUID(), - eventType: input.action as PluginEventType, + eventType: pluginEventType, occurredAt: new Date().toISOString(), actorId: input.actorId, actorType: input.actorType, @@ -85,10 +114,6 @@ export async function logActivity(db: Db, input: LogActivityInput) { runId: input.runId ?? null, }, }; - void _pluginEventBus.emit(event).then(({ errors }) => { - for (const { pluginId, error } of errors) { - logger.warn({ pluginId, eventType: event.eventType, err: error }, "plugin event handler failed"); - } - }).catch(() => {}); + publishPluginDomainEvent(event); } } diff --git a/server/src/services/heartbeat.ts b/server/src/services/heartbeat.ts index 75987703a6..bfc1c79fca 100644 --- a/server/src/services/heartbeat.ts +++ b/server/src/services/heartbeat.ts @@ -2,6 +2,7 @@ import fs from "node:fs/promises"; import path from "node:path"; import { execFile as execFileCallback } from "node:child_process"; import { promisify } from "node:util"; +import { randomUUID } from "node:crypto"; import { and, asc, desc, eq, getTableColumns, gt, inArray, isNull, or, sql } from "drizzle-orm"; import type { Db } from "@paperclipai/db"; import { @@ -60,7 +61,7 @@ import { classifyRunLiveness, type RunLivenessClassificationInput, } from "./run-liveness.js"; -import { logActivity, type LogActivityInput } from "./activity-log.js"; +import { logActivity, publishPluginDomainEvent, type LogActivityInput } from "./activity-log.js"; import { buildWorkspaceReadyComment, cleanupExecutionWorkspaceArtifacts, @@ -2377,11 +2378,50 @@ export function heartbeatService(db: Db) { finishedAt: updated.finishedAt ? new Date(updated.finishedAt).toISOString() : null, }, }); + publishRunLifecyclePluginEvent(updated); } return updated; } + function publishRunLifecyclePluginEvent(run: typeof heartbeatRuns.$inferSelect) { + const eventType = + run.status === "running" + ? "agent.run.started" + : run.status === "succeeded" + ? "agent.run.finished" + : run.status === "failed" || run.status === "timed_out" + ? "agent.run.failed" + : run.status === "cancelled" + ? "agent.run.cancelled" + : null; + if (!eventType) return; + publishPluginDomainEvent({ + eventId: randomUUID(), + eventType, + occurredAt: new Date().toISOString(), + actorId: run.agentId, + actorType: "agent", + entityId: run.id, + entityType: "heartbeat_run", + companyId: run.companyId, + payload: { + runId: run.id, + agentId: run.agentId, + status: run.status, + invocationSource: run.invocationSource, + triggerDetail: run.triggerDetail, + error: run.error ?? null, + errorCode: run.errorCode ?? null, + issueId: typeof run.contextSnapshot === "object" && run.contextSnapshot !== null + ? (run.contextSnapshot as Record).issueId ?? null + : null, + startedAt: run.startedAt ? new Date(run.startedAt).toISOString() : null, + finishedAt: run.finishedAt ? new Date(run.finishedAt).toISOString() : null, + }, + }); + } + async function setWakeupStatus( wakeupRequestId: string | null | undefined, status: string, @@ -3054,6 +3094,7 @@ export function heartbeatService(db: Db) { finishedAt: claimed.finishedAt ? new Date(claimed.finishedAt).toISOString() : null, }, }); + publishRunLifecyclePluginEvent(claimed); await setWakeupStatus(claimed.wakeupRequestId, "claimed", { claimedAt }); diff --git a/server/src/services/plugin-capability-validator.ts b/server/src/services/plugin-capability-validator.ts index 0d4bb2a739..f064719219 100644 --- a/server/src/services/plugin-capability-validator.ts +++ b/server/src/services/plugin-capability-validator.ts @@ -51,6 +51,7 @@ const OPERATION_CAPABILITIES: Record = { "project.workspaces.get": ["project.workspaces.read"], "issues.list": ["issues.read"], "issues.get": ["issues.read"], + "issues.relations.get": ["issue.relations.read"], "issue.comments.list": ["issue.comments.read"], "issue.comments.get": ["issue.comments.read"], "agents.list": ["agents.read"], @@ -61,14 +62,26 @@ const OPERATION_CAPABILITIES: Record = { "activity.get": ["activity.read"], "costs.list": ["costs.read"], "costs.get": ["costs.read"], + "issues.summaries.getOrchestration": ["issues.orchestration.read"], + "db.namespace": ["database.namespace.read"], + "db.query": ["database.namespace.read"], // Data write operations "issues.create": ["issues.create"], "issues.update": ["issues.update"], + "issues.relations.setBlockedBy": ["issue.relations.write"], + "issues.relations.addBlockers": ["issue.relations.write"], + "issues.relations.removeBlockers": ["issue.relations.write"], + "issues.assertCheckoutOwner": ["issues.checkout"], + "issues.getSubtree": ["issue.subtree.read"], + "issues.requestWakeup": ["issues.wakeup"], + "issues.requestWakeups": ["issues.wakeup"], "issue.comments.create": ["issue.comments.create"], "activity.log": ["activity.log.write"], "metrics.write": ["metrics.write"], "telemetry.track": ["telemetry.track"], + "db.migrate": ["database.namespace.migrate"], + "db.execute": ["database.namespace.write"], // Plugin state operations "plugin.state.get": ["plugin.state.read"], @@ -141,6 +154,7 @@ const FEATURE_CAPABILITIES: Record = { tools: "agent.tools.register", jobs: "jobs.schedule", webhooks: "webhooks.receive", + database: "database.namespace.migrate", }; // --------------------------------------------------------------------------- diff --git a/server/src/services/plugin-database.ts b/server/src/services/plugin-database.ts new file mode 100644 index 0000000000..e1822e45cd --- /dev/null +++ b/server/src/services/plugin-database.ts @@ -0,0 +1,498 @@ +import { createHash } from "node:crypto"; +import { readdir, readFile } from "node:fs/promises"; +import path from "node:path"; +import { and, eq, sql } from "drizzle-orm"; +import type { SQL } from "drizzle-orm"; +import type { Db } from "@paperclipai/db"; +import { + pluginDatabaseNamespaces, + pluginMigrations, + plugins, +} from "@paperclipai/db"; +import type { + PaperclipPluginManifestV1, + PluginDatabaseCoreReadTable, + PluginMigrationRecord, +} from "@paperclipai/shared"; + +const IDENTIFIER_RE = /^[A-Za-z_][A-Za-z0-9_]*$/; +const MAX_POSTGRES_IDENTIFIER_LENGTH = 63; + +type SqlRef = { schema: string; table: string; keyword: string }; + +export type PluginDatabaseRuntimeResult> = { + rows?: T[]; + rowCount?: number; +}; + +export function derivePluginDatabaseNamespace( + pluginKey: string, + namespaceSlug?: string, +): string { + const hash = createHash("sha256").update(pluginKey).digest("hex").slice(0, 10); + const slug = (namespaceSlug ?? pluginKey) + .toLowerCase() + .replace(/[^a-z0-9_]+/g, "_") + .replace(/^_+|_+$/g, "") + .replace(/_+/g, "_") + .slice(0, 36) || "plugin"; + const namespace = `plugin_${slug}_${hash}`; + return namespace.slice(0, MAX_POSTGRES_IDENTIFIER_LENGTH); +} + +function assertIdentifier(value: string, label = "identifier"): string { + if (!IDENTIFIER_RE.test(value)) { + throw new Error(`Unsafe SQL ${label}: ${value}`); + } + return value; +} + +function quoteIdentifier(value: string): string { + return `"${assertIdentifier(value).replaceAll("\"", "\"\"")}"`; +} + +function splitSqlStatements(input: string): string[] { + const statements: string[] = []; + let start = 0; + let quote: "'" | "\"" | null = null; + let lineComment = false; + let blockComment = false; + + for (let i = 0; i < input.length; i += 1) { + const char = input[i]!; + const next = input[i + 1]; + + if (lineComment) { + if (char === "\n") lineComment = false; + continue; + } + if (blockComment) { + if (char === "*" && next === "/") { + blockComment = false; + i += 1; + } + continue; + } + if (quote) { + if (char === quote) { + if (next === quote) { + i += 1; + } else { + quote = null; + } + } + continue; + } + if (char === "-" && next === "-") { + lineComment = true; + i += 1; + continue; + } + if (char === "/" && next === "*") { + blockComment = true; + i += 1; + continue; + } + if (char === "'" || char === "\"") { + quote = char; + continue; + } + if (char === ";") { + const statement = input.slice(start, i).trim(); + if (statement) statements.push(statement); + start = i + 1; + } + } + + const trailing = input.slice(start).trim(); + if (trailing) statements.push(trailing); + return statements; +} + +function stripSqlForKeywordScan(input: string): string { + return input + .replace(/'([^']|'')*'/g, "''") + .replace(/"([^"]|"")*"/g, "\"\"") + .replace(/--.*$/gm, "") + .replace(/\/\*[\s\S]*?\*\//g, ""); +} + +function normaliseSql(input: string): string { + return stripSqlForKeywordScan(input).replace(/\s+/g, " ").trim().toLowerCase(); +} + +function extractQualifiedRefs(statement: string): SqlRef[] { + const refs: SqlRef[] = []; + const patterns = [ + /\b(from|join|references|into|update)\s+"?([A-Za-z_][A-Za-z0-9_]*)"?\."?([A-Za-z_][A-Za-z0-9_]*)"?/gi, + /\b(alter\s+table|create\s+table|create\s+view|drop\s+table|truncate\s+table)\s+(?:if\s+(?:not\s+)?exists\s+)?"?([A-Za-z_][A-Za-z0-9_]*)"?\."?([A-Za-z_][A-Za-z0-9_]*)"?/gi, + ]; + + for (const pattern of patterns) { + for (const match of statement.matchAll(pattern)) { + refs.push({ keyword: match[1]!.toLowerCase(), schema: match[2]!, table: match[3]! }); + } + } + return refs; +} + +function assertAllowedPublicRead( + ref: SqlRef, + allowedCoreReadTables: ReadonlySet, +): void { + if (ref.schema !== "public") return; + if (!allowedCoreReadTables.has(ref.table)) { + throw new Error(`Plugin SQL references public.${ref.table}, which is not whitelisted`); + } + if (!["from", "join", "references"].includes(ref.keyword)) { + throw new Error(`Plugin SQL cannot mutate or define objects in public.${ref.table}`); + } +} + +function assertNoBannedSql(statement: string): void { + const normalized = normaliseSql(statement); + const banned = [ + /\bcreate\s+extension\b/, + /\bcreate\s+(?:event\s+)?trigger\b/, + /\bcreate\s+(?:or\s+replace\s+)?function\b/, + /\bcreate\s+language\b/, + /\bgrant\b/, + /\brevoke\b/, + /\bsecurity\s+definer\b/, + /\bcopy\b/, + /\bcall\b/, + /\bdo\s+(?:\$\$|language\b)/, + ]; + const matched = banned.find((pattern) => pattern.test(normalized)); + if (matched) { + throw new Error(`Plugin SQL contains a disallowed statement or clause: ${matched.source}`); + } +} + +export function validatePluginMigrationStatement( + statement: string, + namespace: string, + coreReadTables: readonly PluginDatabaseCoreReadTable[] = [], +): void { + assertIdentifier(namespace, "namespace"); + assertNoBannedSql(statement); + + const normalized = normaliseSql(statement); + if (/^\s*(drop|truncate)\b/.test(normalized)) { + throw new Error("Destructive plugin migrations are not allowed in Phase 1"); + } + + const ddlAllowed = /^(create|alter|comment)\b/.test(normalized); + if (!ddlAllowed) { + throw new Error("Plugin migrations may contain DDL statements only"); + } + + const refs = extractQualifiedRefs(statement); + if (refs.length === 0 && !normalized.startsWith("comment ")) { + throw new Error("Plugin migration objects must use fully qualified schema names"); + } + + const allowedCoreReadTables = new Set(coreReadTables); + for (const ref of refs) { + if (ref.schema === namespace) continue; + if (ref.schema === "public") { + assertAllowedPublicRead(ref, allowedCoreReadTables); + continue; + } + throw new Error(`Plugin SQL references schema "${ref.schema}" outside namespace "${namespace}"`); + } +} + +export function validatePluginRuntimeQuery( + query: string, + namespace: string, + coreReadTables: readonly PluginDatabaseCoreReadTable[] = [], +): void { + const statements = splitSqlStatements(query); + if (statements.length !== 1) { + throw new Error("Plugin runtime SQL must contain exactly one statement"); + } + const statement = statements[0]!; + assertNoBannedSql(statement); + const normalized = normaliseSql(statement); + if (!normalized.startsWith("select ") && !normalized.startsWith("with ")) { + throw new Error("ctx.db.query only allows SELECT statements"); + } + if (/\b(insert|update|delete|alter|create|drop|truncate)\b/.test(normalized)) { + throw new Error("ctx.db.query cannot contain mutation or DDL keywords"); + } + + const allowedCoreReadTables = new Set(coreReadTables); + for (const ref of extractQualifiedRefs(statement)) { + if (ref.schema === namespace) continue; + if (ref.schema === "public") { + assertAllowedPublicRead(ref, allowedCoreReadTables); + continue; + } + throw new Error(`ctx.db.query cannot read schema "${ref.schema}"`); + } +} + +export function validatePluginRuntimeExecute(query: string, namespace: string): void { + const statements = splitSqlStatements(query); + if (statements.length !== 1) { + throw new Error("Plugin runtime SQL must contain exactly one statement"); + } + const statement = statements[0]!; + assertNoBannedSql(statement); + const normalized = normaliseSql(statement); + if (!/^(insert\s+into|update|delete\s+from)\b/.test(normalized)) { + throw new Error("ctx.db.execute only allows INSERT, UPDATE, or DELETE"); + } + if (/\b(alter|create|drop|truncate)\b/.test(normalized)) { + throw new Error("ctx.db.execute cannot contain DDL keywords"); + } + + const refs = extractQualifiedRefs(statement); + const target = refs.find((ref) => ["into", "update", "from"].includes(ref.keyword)); + if (!target || target.schema !== namespace) { + throw new Error(`ctx.db.execute target must be inside plugin namespace "${namespace}"`); + } + for (const ref of refs) { + if (ref.schema !== namespace) { + throw new Error("ctx.db.execute cannot reference public or other non-plugin schemas"); + } + } +} + +function bindSql(statement: string, params: readonly unknown[] = []): SQL { + // Safe only after callers run the plugin SQL validators above. + if (params.length === 0) return sql.raw(statement); + const chunks: SQL[] = []; + let cursor = 0; + const placeholderPattern = /\$(\d+)/g; + const seen = new Set(); + + for (const match of statement.matchAll(placeholderPattern)) { + const index = Number(match[1]); + if (!Number.isInteger(index) || index < 1 || index > params.length) { + throw new Error(`SQL placeholder $${match[1]} has no matching parameter`); + } + chunks.push(sql.raw(statement.slice(cursor, match.index))); + chunks.push(sql`${params[index - 1]}`); + seen.add(index); + cursor = match.index! + match[0].length; + } + chunks.push(sql.raw(statement.slice(cursor))); + if (seen.size !== params.length) { + throw new Error("Every ctx.db parameter must be referenced by a $n placeholder"); + } + return sql.join(chunks, sql.raw("")); +} + +async function listSqlMigrationFiles(migrationsDir: string): Promise { + const entries = await readdir(migrationsDir, { withFileTypes: true }); + return entries + .filter((entry) => entry.isFile() && entry.name.endsWith(".sql")) + .map((entry) => entry.name) + .sort((a, b) => a.localeCompare(b)); +} + +function resolveMigrationsDir(packageRoot: string, migrationsDir: string): string { + const resolvedRoot = path.resolve(packageRoot); + const resolvedDir = path.resolve(resolvedRoot, migrationsDir); + const relative = path.relative(resolvedRoot, resolvedDir); + if (relative.startsWith("..") || path.isAbsolute(relative)) { + throw new Error(`Plugin migrationsDir escapes package root: ${migrationsDir}`); + } + return resolvedDir; +} + +export function pluginDatabaseService(db: Db) { + async function getPluginRecord(pluginId: string) { + const rows = await db.select().from(plugins).where(eq(plugins.id, pluginId)).limit(1); + const plugin = rows[0]; + if (!plugin) throw new Error(`Plugin not found: ${pluginId}`); + return plugin; + } + + async function ensureNamespace(pluginId: string, manifest: PaperclipPluginManifestV1) { + if (!manifest.database) return null; + const namespaceName = derivePluginDatabaseNamespace( + manifest.id, + manifest.database.namespaceSlug, + ); + await db.execute(sql.raw(`CREATE SCHEMA IF NOT EXISTS ${quoteIdentifier(namespaceName)}`)); + const rows = await db + .insert(pluginDatabaseNamespaces) + .values({ + pluginId, + pluginKey: manifest.id, + namespaceName, + namespaceMode: "schema", + status: "active", + }) + .onConflictDoUpdate({ + target: pluginDatabaseNamespaces.pluginId, + set: { + pluginKey: manifest.id, + namespaceName, + namespaceMode: "schema", + status: "active", + updatedAt: new Date(), + }, + }) + .returning(); + return rows[0] ?? null; + } + + async function getNamespace(pluginId: string) { + const rows = await db + .select() + .from(pluginDatabaseNamespaces) + .where(eq(pluginDatabaseNamespaces.pluginId, pluginId)) + .limit(1); + return rows[0] ?? null; + } + + async function getRuntimeNamespace(pluginId: string) { + const namespace = await getNamespace(pluginId); + if (!namespace || namespace.status !== "active") { + throw new Error("Plugin database namespace is not active"); + } + return namespace.namespaceName; + } + + async function recordMigrationFailure(input: { + pluginId: string; + pluginKey: string; + namespaceName: string; + migrationKey: string; + checksum: string; + pluginVersion: string; + error: unknown; + }): Promise { + const message = input.error instanceof Error ? input.error.message : String(input.error); + await db + .insert(pluginMigrations) + .values({ + pluginId: input.pluginId, + pluginKey: input.pluginKey, + namespaceName: input.namespaceName, + migrationKey: input.migrationKey, + checksum: input.checksum, + pluginVersion: input.pluginVersion, + status: "failed", + errorMessage: message, + }) + .onConflictDoUpdate({ + target: [pluginMigrations.pluginId, pluginMigrations.migrationKey], + set: { + checksum: input.checksum, + pluginVersion: input.pluginVersion, + status: "failed", + errorMessage: message, + startedAt: new Date(), + appliedAt: null, + }, + }); + await db + .update(pluginDatabaseNamespaces) + .set({ status: "migration_failed", updatedAt: new Date() }) + .where(eq(pluginDatabaseNamespaces.pluginId, input.pluginId)); + } + + return { + ensureNamespace, + + async applyMigrations(pluginId: string, manifest: PaperclipPluginManifestV1, packageRoot: string) { + if (!manifest.database) return null; + const namespace = await ensureNamespace(pluginId, manifest); + if (!namespace) return null; + + const migrationDir = resolveMigrationsDir(packageRoot, manifest.database.migrationsDir); + const migrationFiles = await listSqlMigrationFiles(migrationDir); + const coreReadTables = manifest.database.coreReadTables ?? []; + const lockKey = Number.parseInt(createHash("sha256").update(pluginId).digest("hex").slice(0, 12), 16); + + await db.transaction(async (tx) => { + await tx.execute(sql`SELECT pg_advisory_xact_lock(${lockKey})`); + for (const migrationKey of migrationFiles) { + const content = await readFile(path.join(migrationDir, migrationKey), "utf8"); + const checksum = createHash("sha256").update(content).digest("hex"); + const existingRows = await tx + .select() + .from(pluginMigrations) + .where(and(eq(pluginMigrations.pluginId, pluginId), eq(pluginMigrations.migrationKey, migrationKey))) + .limit(1); + const existing = existingRows[0] as PluginMigrationRecord | undefined; + if (existing?.status === "applied") { + if (existing.checksum !== checksum) { + throw new Error(`Plugin migration checksum mismatch for ${migrationKey}`); + } + continue; + } + + const statements = splitSqlStatements(content); + try { + if (statements.length === 0) { + throw new Error(`Plugin migration ${migrationKey} is empty`); + } + for (const statement of statements) { + validatePluginMigrationStatement(statement, namespace.namespaceName, coreReadTables); + await tx.execute(sql.raw(statement)); + } + await tx + .insert(pluginMigrations) + .values({ + pluginId, + pluginKey: manifest.id, + namespaceName: namespace.namespaceName, + migrationKey, + checksum, + pluginVersion: manifest.version, + status: "applied", + appliedAt: new Date(), + }) + .onConflictDoUpdate({ + target: [pluginMigrations.pluginId, pluginMigrations.migrationKey], + set: { + checksum, + pluginVersion: manifest.version, + status: "applied", + errorMessage: null, + startedAt: new Date(), + appliedAt: new Date(), + }, + }); + } catch (error) { + await recordMigrationFailure({ + pluginId, + pluginKey: manifest.id, + namespaceName: namespace.namespaceName, + migrationKey, + checksum, + pluginVersion: manifest.version, + error, + }); + throw error; + } + } + }); + + return namespace; + }, + + getRuntimeNamespace, + + async query>(pluginId: string, statement: string, params?: unknown[]): Promise { + const plugin = await getPluginRecord(pluginId); + const namespace = await getRuntimeNamespace(pluginId); + validatePluginRuntimeQuery(statement, namespace, plugin.manifestJson.database?.coreReadTables ?? []); + const result = await db.execute(bindSql(statement, params)); + return Array.from(result as Iterable); + }, + + async execute(pluginId: string, statement: string, params?: unknown[]): Promise<{ rowCount: number }> { + const namespace = await getRuntimeNamespace(pluginId); + validatePluginRuntimeExecute(statement, namespace); + const result = await db.execute(bindSql(statement, params)); + return { rowCount: Number((result as { count?: number | string }).count ?? 0) }; + }, + }; +} diff --git a/server/src/services/plugin-host-services.ts b/server/src/services/plugin-host-services.ts index 22ccb01708..6a8ba88ef8 100644 --- a/server/src/services/plugin-host-services.ts +++ b/server/src/services/plugin-host-services.ts @@ -1,6 +1,14 @@ import type { Db } from "@paperclipai/db"; -import { pluginLogs, agentTaskSessions as agentTaskSessionsTable } from "@paperclipai/db"; -import { eq, and, like, desc } from "drizzle-orm"; +import { + agentTaskSessions as agentTaskSessionsTable, + agents as agentsTable, + budgetIncidents, + costEvents, + heartbeatRuns, + issues as issuesTable, + pluginLogs, +} from "@paperclipai/db"; +import { eq, and, like, desc, inArray, sql } from "drizzle-orm"; import type { HostServices, Company, @@ -10,7 +18,10 @@ import type { Goal, PluginWorkspace, IssueComment, + PluginIssueAssigneeSummary, + PluginIssueOrchestrationSummary, } from "@paperclipai/plugin-sdk"; +import type { IssueDocumentSummary } from "@paperclipai/shared"; import { companyService } from "./companies.js"; import { agentService } from "./agents.js"; import { projectService } from "./projects.js"; @@ -18,6 +29,8 @@ import { issueService } from "./issues.js"; import { goalService } from "./goals.js"; import { documentService } from "./documents.js"; import { heartbeatService } from "./heartbeat.js"; +import { budgetService } from "./budgets.js"; +import { issueApprovalService } from "./issue-approvals.js"; import { subscribeCompanyLiveEvents } from "./live-events.js"; import { randomUUID } from "node:crypto"; import { activityService } from "./activity.js"; @@ -25,6 +38,7 @@ import { costService } from "./costs.js"; import { assetService } from "./assets.js"; import { pluginRegistryService } from "./plugin-registry.js"; import { pluginStateStore } from "./plugin-state-store.js"; +import { pluginDatabaseService } from "./plugin-database.js"; import { createPluginSecretsHandler } from "./plugin-secrets-handler.js"; import { logActivity } from "./activity-log.js"; import type { PluginEventBus } from "./plugin-event-bus.js"; @@ -447,6 +461,7 @@ export function buildHostServices( ): HostServices & { dispose(): void } { const registry = pluginRegistryService(db); const stateStore = pluginStateStore(db); + const pluginDb = pluginDatabaseService(db); const secretsHandler = createPluginSecretsHandler({ db, pluginId }); const companies = companyService(db); const agents = agentService(db); @@ -457,6 +472,8 @@ export function buildHostServices( const goals = goalService(db); const activity = activityService(db); const costs = costService(db); + const budgets = budgetService(db); + const issueApprovals = issueApprovalService(db); const assets = assetService(db); const scopedBus = eventBus.forPlugin(pluginKey); @@ -512,6 +529,216 @@ export function buildHostServices( return record; }; + const pluginActivityDetails = ( + details: Record | null | undefined, + actor?: { actorAgentId?: string | null; actorUserId?: string | null; actorRunId?: string | null }, + ) => { + const initiatingActorType = actor?.actorAgentId ? "agent" : actor?.actorUserId ? "user" : null; + const initiatingActorId = actor?.actorAgentId ?? actor?.actorUserId ?? null; + return { + ...(details ?? {}), + sourcePluginId: pluginId, + sourcePluginKey: pluginKey, + initiatingActorType, + initiatingActorId, + initiatingAgentId: actor?.actorAgentId ?? null, + initiatingUserId: actor?.actorUserId ?? null, + initiatingRunId: actor?.actorRunId ?? null, + pluginId, + pluginKey, + }; + }; + + const defaultPluginOriginKind = `plugin:${pluginKey}`; + const normalizePluginOriginKind = (originKind: unknown = defaultPluginOriginKind) => { + if (originKind == null || originKind === "") return defaultPluginOriginKind; + if (typeof originKind !== "string") { + throw new Error("Plugin issue originKind must be a string"); + } + if (originKind === defaultPluginOriginKind || originKind.startsWith(`${defaultPluginOriginKind}:`)) { + return originKind; + } + throw new Error(`Plugin may only use originKind values under ${defaultPluginOriginKind}`); + }; + + const assertReadableOriginFilter = (originKind: unknown) => { + if (typeof originKind !== "string" || !originKind.startsWith("plugin:")) return; + normalizePluginOriginKind(originKind); + }; + + const logPluginActivity = async (input: { + companyId: string; + action: string; + entityType: string; + entityId: string; + details?: Record | null; + actor?: { actorAgentId?: string | null; actorUserId?: string | null; actorRunId?: string | null }; + }) => { + await logActivity(db, { + companyId: input.companyId, + actorType: "plugin", + actorId: pluginId, + agentId: input.actor?.actorAgentId ?? null, + runId: input.actor?.actorRunId ?? null, + action: input.action, + entityType: input.entityType, + entityId: input.entityId, + details: pluginActivityDetails(input.details, input.actor), + }); + }; + + const collectIssueSubtreeIds = async (companyId: string, rootIssueId: string) => { + const seen = new Set([rootIssueId]); + let frontier = [rootIssueId]; + + while (frontier.length > 0) { + const children = await db + .select({ id: issuesTable.id }) + .from(issuesTable) + .where(and(eq(issuesTable.companyId, companyId), inArray(issuesTable.parentId, frontier))); + frontier = children.map((child) => child.id).filter((id) => !seen.has(id)); + for (const id of frontier) seen.add(id); + } + + return [...seen]; + }; + + const getIssueRunSummaries = async ( + companyId: string, + issueIds: string[], + options: { activeOnly?: boolean } = {}, + ) => { + if (issueIds.length === 0) return []; + const issueIdExpr = sql`${heartbeatRuns.contextSnapshot} ->> 'issueId'`; + const statusCondition = options.activeOnly + ? inArray(heartbeatRuns.status, ["queued", "running"]) + : undefined; + const rows = await db + .select({ + id: heartbeatRuns.id, + issueId: issueIdExpr, + agentId: heartbeatRuns.agentId, + status: heartbeatRuns.status, + invocationSource: heartbeatRuns.invocationSource, + triggerDetail: heartbeatRuns.triggerDetail, + startedAt: heartbeatRuns.startedAt, + finishedAt: heartbeatRuns.finishedAt, + error: heartbeatRuns.error, + createdAt: heartbeatRuns.createdAt, + }) + .from(heartbeatRuns) + .where(and(eq(heartbeatRuns.companyId, companyId), inArray(issueIdExpr, issueIds), statusCondition)) + .orderBy(desc(heartbeatRuns.createdAt)) + .limit(100); + + return rows.map((row) => ({ + ...row, + startedAt: row.startedAt?.toISOString() ?? null, + finishedAt: row.finishedAt?.toISOString() ?? null, + createdAt: row.createdAt.toISOString(), + })); + }; + + const setBlockedByWithActivity = async (params: { + issueId: string; + companyId: string; + blockedByIssueIds: string[]; + mutation: "set" | "add" | "remove"; + actorAgentId?: string | null; + actorUserId?: string | null; + actorRunId?: string | null; + }) => { + const existing = requireInCompany("Issue", await issues.getById(params.issueId), params.companyId); + const previous = await issues.getRelationSummaries(params.issueId); + await issues.update(params.issueId, { + blockedByIssueIds: params.blockedByIssueIds, + actorAgentId: params.actorAgentId ?? null, + actorUserId: params.actorUserId ?? null, + } as any); + const relations = await issues.getRelationSummaries(params.issueId); + await logPluginActivity({ + companyId: params.companyId, + action: "issue.relations.updated", + entityType: "issue", + entityId: params.issueId, + actor: { + actorAgentId: params.actorAgentId, + actorUserId: params.actorUserId, + actorRunId: params.actorRunId, + }, + details: { + identifier: existing.identifier, + mutation: params.mutation, + blockedByIssueIds: params.blockedByIssueIds, + previousBlockedByIssueIds: previous.blockedBy.map((relation) => relation.id), + }, + }); + return relations; + }; + + const getIssueCostSummary = async ( + companyId: string, + issueIds: string[], + billingCode?: string | null, + ) => { + const scopeConditions = [ + issueIds.length > 0 ? inArray(costEvents.issueId, issueIds) : undefined, + billingCode ? eq(costEvents.billingCode, billingCode) : undefined, + ].filter((condition): condition is NonNullable => Boolean(condition)); + if (scopeConditions.length === 0) { + return { + costCents: 0, + inputTokens: 0, + cachedInputTokens: 0, + outputTokens: 0, + billingCode: billingCode ?? null, + }; + } + const scopeCondition = scopeConditions.length === 1 ? scopeConditions[0]! : and(...scopeConditions); + const [row] = await db + .select({ + costCents: sql`coalesce(sum(${costEvents.costCents}), 0)::double precision`, + inputTokens: sql`coalesce(sum(${costEvents.inputTokens}), 0)::double precision`, + cachedInputTokens: sql`coalesce(sum(${costEvents.cachedInputTokens}), 0)::double precision`, + outputTokens: sql`coalesce(sum(${costEvents.outputTokens}), 0)::double precision`, + }) + .from(costEvents) + .where(and(eq(costEvents.companyId, companyId), scopeCondition)); + + return { + costCents: Number(row?.costCents ?? 0), + inputTokens: Number(row?.inputTokens ?? 0), + cachedInputTokens: Number(row?.cachedInputTokens ?? 0), + outputTokens: Number(row?.outputTokens ?? 0), + billingCode: billingCode ?? null, + }; + }; + + const getOpenBudgetIncidents = async (companyId: string) => { + const rows = await db + .select({ + id: budgetIncidents.id, + scopeType: budgetIncidents.scopeType, + scopeId: budgetIncidents.scopeId, + metric: budgetIncidents.metric, + windowKind: budgetIncidents.windowKind, + thresholdType: budgetIncidents.thresholdType, + amountLimit: budgetIncidents.amountLimit, + amountObserved: budgetIncidents.amountObserved, + status: budgetIncidents.status, + approvalId: budgetIncidents.approvalId, + createdAt: budgetIncidents.createdAt, + }) + .from(budgetIncidents) + .where(and(eq(budgetIncidents.companyId, companyId), eq(budgetIncidents.status, "open"))) + .orderBy(desc(budgetIncidents.createdAt)); + + return rows.map((row) => ({ + ...row, + createdAt: row.createdAt.toISOString(), + })); + }; + return { config: { async get() { @@ -544,6 +771,18 @@ export function buildHostServices( }, }, + db: { + async namespace() { + return pluginDb.getRuntimeNamespace(pluginId); + }, + async query(params) { + return pluginDb.query(pluginId, params.sql, params.params); + }, + async execute(params) { + return pluginDb.execute(pluginId, params.sql, params.params); + }, + }, + entities: { async upsert(params) { return registry.upsertEntity(pluginId, params as any) as any; @@ -604,12 +843,12 @@ export function buildHostServices( await ensurePluginAvailableForCompany(companyId); await logActivity(db, { companyId, - actorType: "system", + actorType: "plugin", actorId: pluginId, action: params.message, entityType: params.entityType ?? "plugin", entityId: params.entityId ?? pluginId, - details: params.metadata, + details: pluginActivityDetails(params.metadata), }); }, }, @@ -775,6 +1014,7 @@ export function buildHostServices( async list(params) { const companyId = ensureCompanyId(params.companyId); await ensurePluginAvailableForCompany(companyId); + assertReadableOriginFilter(params.originKind); return applyWindow((await issues.list(companyId, params as any)) as Issue[], params); }, async get(params) { @@ -786,13 +1026,456 @@ export function buildHostServices( async create(params) { const companyId = ensureCompanyId(params.companyId); await ensurePluginAvailableForCompany(companyId); - return (await issues.create(companyId, params as any)) as Issue; + const { actorAgentId, actorUserId, actorRunId, originKind, ...issueInput } = params; + const normalizedOriginKind = normalizePluginOriginKind(originKind); + const issue = (await issues.create(companyId, { + ...(issueInput as any), + originKind: normalizedOriginKind, + originId: params.originId ?? null, + originRunId: params.originRunId ?? actorRunId ?? null, + createdByAgentId: actorAgentId ?? null, + createdByUserId: actorUserId ?? null, + })) as Issue; + await logPluginActivity({ + companyId, + action: "issue.created", + entityType: "issue", + entityId: issue.id, + actor: { actorAgentId, actorUserId, actorRunId }, + details: { + title: issue.title, + identifier: issue.identifier, + originKind: normalizedOriginKind, + originId: issue.originId, + billingCode: issue.billingCode, + blockedByIssueIds: params.blockedByIssueIds ?? [], + }, + }); + return issue; }, async update(params) { + const companyId = ensureCompanyId(params.companyId); + await ensurePluginAvailableForCompany(companyId); + const existing = requireInCompany("Issue", await issues.getById(params.issueId), companyId); + const patch = { ...(params.patch as Record) }; + const actorAgentId = typeof patch.actorAgentId === "string" ? patch.actorAgentId : null; + const actorUserId = typeof patch.actorUserId === "string" ? patch.actorUserId : null; + const actorRunId = typeof patch.actorRunId === "string" ? patch.actorRunId : null; + delete patch.actorAgentId; + delete patch.actorUserId; + delete patch.actorRunId; + if (patch.originKind !== undefined) { + patch.originKind = normalizePluginOriginKind(patch.originKind); + } + const updated = (await issues.update(params.issueId, { + ...(patch as any), + actorAgentId, + actorUserId, + })) as Issue; + await logPluginActivity({ + companyId, + action: "issue.updated", + entityType: "issue", + entityId: updated.id, + actor: { actorAgentId, actorUserId, actorRunId }, + details: { + identifier: updated.identifier, + patch, + _previous: { + status: existing.status, + assigneeAgentId: existing.assigneeAgentId, + assigneeUserId: existing.assigneeUserId, + }, + }, + }); + return updated; + }, + async getRelations(params) { const companyId = ensureCompanyId(params.companyId); await ensurePluginAvailableForCompany(companyId); requireInCompany("Issue", await issues.getById(params.issueId), companyId); - return (await issues.update(params.issueId, params.patch as any)) as Issue; + return await issues.getRelationSummaries(params.issueId); + }, + async setBlockedBy(params) { + const companyId = ensureCompanyId(params.companyId); + await ensurePluginAvailableForCompany(companyId); + return setBlockedByWithActivity({ + companyId, + issueId: params.issueId, + blockedByIssueIds: params.blockedByIssueIds, + mutation: "set", + actorAgentId: params.actorAgentId, + actorUserId: params.actorUserId, + actorRunId: params.actorRunId, + }); + }, + async addBlockers(params) { + const companyId = ensureCompanyId(params.companyId); + await ensurePluginAvailableForCompany(companyId); + requireInCompany("Issue", await issues.getById(params.issueId), companyId); + const previous = await issues.getRelationSummaries(params.issueId); + const nextBlockedByIssueIds = [ + ...new Set([ + ...previous.blockedBy.map((relation) => relation.id), + ...params.blockerIssueIds, + ]), + ]; + return setBlockedByWithActivity({ + companyId, + issueId: params.issueId, + blockedByIssueIds: nextBlockedByIssueIds, + mutation: "add", + actorAgentId: params.actorAgentId, + actorUserId: params.actorUserId, + actorRunId: params.actorRunId, + }); + }, + async removeBlockers(params) { + const companyId = ensureCompanyId(params.companyId); + await ensurePluginAvailableForCompany(companyId); + requireInCompany("Issue", await issues.getById(params.issueId), companyId); + const previous = await issues.getRelationSummaries(params.issueId); + const removals = new Set(params.blockerIssueIds); + const nextBlockedByIssueIds = previous.blockedBy + .map((relation) => relation.id) + .filter((issueId) => !removals.has(issueId)); + return setBlockedByWithActivity({ + companyId, + issueId: params.issueId, + blockedByIssueIds: nextBlockedByIssueIds, + mutation: "remove", + actorAgentId: params.actorAgentId, + actorUserId: params.actorUserId, + actorRunId: params.actorRunId, + }); + }, + async assertCheckoutOwner(params) { + const companyId = ensureCompanyId(params.companyId); + await ensurePluginAvailableForCompany(companyId); + requireInCompany("Issue", await issues.getById(params.issueId), companyId); + const ownership = await issues.assertCheckoutOwner( + params.issueId, + params.actorAgentId, + params.actorRunId, + ); + if (ownership.adoptedFromRunId) { + await logPluginActivity({ + companyId, + action: "issue.checkout_lock_adopted", + entityType: "issue", + entityId: params.issueId, + actor: { + actorAgentId: params.actorAgentId, + actorRunId: params.actorRunId, + }, + details: { + previousCheckoutRunId: ownership.adoptedFromRunId, + checkoutRunId: params.actorRunId, + reason: "stale_checkout_run", + }, + }); + } + return { + issueId: ownership.id, + status: ownership.status as Issue["status"], + assigneeAgentId: ownership.assigneeAgentId, + checkoutRunId: ownership.checkoutRunId, + adoptedFromRunId: ownership.adoptedFromRunId, + }; + }, + async getSubtree(params) { + const companyId = ensureCompanyId(params.companyId); + await ensurePluginAvailableForCompany(companyId); + const rootIssue = requireInCompany("Issue", await issues.getById(params.issueId), companyId); + const includeRoot = params.includeRoot !== false; + const subtreeIssueIds = await collectIssueSubtreeIds(companyId, rootIssue.id); + const issueIds = includeRoot ? subtreeIssueIds : subtreeIssueIds.filter((issueId) => issueId !== rootIssue.id); + const issueRows = issueIds.length > 0 + ? await db + .select() + .from(issuesTable) + .where(and(eq(issuesTable.companyId, companyId), inArray(issuesTable.id, issueIds))) + : []; + const issuesById = new Map(issueRows.map((issue) => [issue.id, issue as Issue])); + const outputIssues = issueIds + .map((issueId) => issuesById.get(issueId)) + .filter((issue): issue is Issue => Boolean(issue)); + + const assigneeAgentIds = [ + ...new Set(outputIssues.map((issue) => issue.assigneeAgentId).filter((id): id is string => Boolean(id))), + ]; + + const [relationPairs, documentPairs, activeRunRows, assigneeRows] = await Promise.all([ + params.includeRelations + ? Promise.all(issueIds.map(async (issueId) => [issueId, await issues.getRelationSummaries(issueId)] as const)) + : Promise.resolve(null), + params.includeDocuments + ? Promise.all( + issueIds.map(async (issueId) => { + const docs = await documents.listIssueDocuments(issueId); + const summaries: IssueDocumentSummary[] = docs.map((document) => { + const { body: _body, ...summary } = document as typeof document & { body?: string }; + return { ...summary, format: "markdown" as const }; + }); + return [ + issueId, + summaries, + ] as const; + }), + ) + : Promise.resolve(null), + params.includeActiveRuns + ? getIssueRunSummaries(companyId, issueIds, { activeOnly: true }) + : Promise.resolve(null), + params.includeAssignees && assigneeAgentIds.length > 0 + ? db + .select({ + id: agentsTable.id, + name: agentsTable.name, + role: agentsTable.role, + title: agentsTable.title, + status: agentsTable.status, + }) + .from(agentsTable) + .where(and(eq(agentsTable.companyId, companyId), inArray(agentsTable.id, assigneeAgentIds))) + : Promise.resolve(params.includeAssignees ? [] : null), + ]); + + const activeRuns = activeRunRows + ? Object.fromEntries(issueIds.map((issueId) => [ + issueId, + activeRunRows.filter((run) => run.issueId === issueId), + ])) + : undefined; + + return { + rootIssueId: rootIssue.id, + companyId, + issueIds, + issues: outputIssues, + ...(relationPairs ? { relations: Object.fromEntries(relationPairs) } : {}), + ...(documentPairs ? { documents: Object.fromEntries(documentPairs) } : {}), + ...(activeRuns ? { activeRuns } : {}), + ...(assigneeRows + ? { + assignees: Object.fromEntries(assigneeRows.map((agent) => [ + agent.id, + { ...agent, status: agent.status as Agent["status"] } as PluginIssueAssigneeSummary, + ])), + } + : {}), + }; + }, + async requestWakeup(params) { + const companyId = ensureCompanyId(params.companyId); + await ensurePluginAvailableForCompany(companyId); + const issue = requireInCompany("Issue", await issues.getById(params.issueId), companyId); + if (!issue.assigneeAgentId) { + throw new Error("Issue has no assigned agent to wake"); + } + if (["backlog", "done", "cancelled"].includes(issue.status)) { + throw new Error(`Issue is not wakeable in status: ${issue.status}`); + } + const relations = await issues.getRelationSummaries(issue.id); + const unresolvedBlockers = relations.blockedBy.filter((blocker) => blocker.status !== "done"); + if (unresolvedBlockers.length > 0) { + throw new Error("Issue is blocked by unresolved blockers"); + } + const budgetBlock = await budgets.getInvocationBlock(companyId, issue.assigneeAgentId, { + issueId: issue.id, + projectId: issue.projectId, + }); + if (budgetBlock) { + throw new Error(budgetBlock.reason); + } + const contextSource = params.contextSource ?? "plugin.issue.requestWakeup"; + const run = await heartbeat.wakeup(issue.assigneeAgentId, { + source: "assignment", + triggerDetail: "system", + reason: params.reason ?? "plugin_issue_wakeup_requested", + payload: { + issueId: issue.id, + mutation: "plugin_wakeup", + pluginId, + pluginKey, + contextSource, + }, + idempotencyKey: params.idempotencyKey ?? null, + requestedByActorType: "system", + requestedByActorId: pluginId, + contextSnapshot: { + issueId: issue.id, + taskId: issue.id, + wakeReason: params.reason ?? "plugin_issue_wakeup_requested", + source: contextSource, + pluginId, + pluginKey, + }, + }); + await logPluginActivity({ + companyId, + action: "issue.assignment_wakeup_requested", + entityType: "issue", + entityId: issue.id, + actor: { + actorAgentId: params.actorAgentId, + actorUserId: params.actorUserId, + actorRunId: params.actorRunId, + }, + details: { + identifier: issue.identifier, + assigneeAgentId: issue.assigneeAgentId, + runId: run?.id ?? null, + reason: params.reason ?? "plugin_issue_wakeup_requested", + contextSource, + }, + }); + return { queued: Boolean(run), runId: run?.id ?? null }; + }, + async requestWakeups(params) { + const companyId = ensureCompanyId(params.companyId); + await ensurePluginAvailableForCompany(companyId); + const results = []; + for (const issueId of [...new Set(params.issueIds)]) { + const issue = requireInCompany("Issue", await issues.getById(issueId), companyId); + if (!issue.assigneeAgentId) { + throw new Error("Issue has no assigned agent to wake"); + } + if (["backlog", "done", "cancelled"].includes(issue.status)) { + throw new Error(`Issue is not wakeable in status: ${issue.status}`); + } + const relations = await issues.getRelationSummaries(issue.id); + const unresolvedBlockers = relations.blockedBy.filter((blocker) => blocker.status !== "done"); + if (unresolvedBlockers.length > 0) { + throw new Error("Issue is blocked by unresolved blockers"); + } + const budgetBlock = await budgets.getInvocationBlock(companyId, issue.assigneeAgentId, { + issueId: issue.id, + projectId: issue.projectId, + }); + if (budgetBlock) { + throw new Error(budgetBlock.reason); + } + const contextSource = params.contextSource ?? "plugin.issue.requestWakeups"; + const run = await heartbeat.wakeup(issue.assigneeAgentId, { + source: "assignment", + triggerDetail: "system", + reason: params.reason ?? "plugin_issue_wakeup_requested", + payload: { + issueId: issue.id, + mutation: "plugin_wakeup", + pluginId, + pluginKey, + contextSource, + }, + idempotencyKey: params.idempotencyKeyPrefix ? `${params.idempotencyKeyPrefix}:${issue.id}` : null, + requestedByActorType: "system", + requestedByActorId: pluginId, + contextSnapshot: { + issueId: issue.id, + taskId: issue.id, + wakeReason: params.reason ?? "plugin_issue_wakeup_requested", + source: contextSource, + pluginId, + pluginKey, + }, + }); + await logPluginActivity({ + companyId, + action: "issue.assignment_wakeup_requested", + entityType: "issue", + entityId: issue.id, + actor: { + actorAgentId: params.actorAgentId, + actorUserId: params.actorUserId, + actorRunId: params.actorRunId, + }, + details: { + identifier: issue.identifier, + assigneeAgentId: issue.assigneeAgentId, + runId: run?.id ?? null, + reason: params.reason ?? "plugin_issue_wakeup_requested", + contextSource, + }, + }); + results.push({ issueId: issue.id, queued: Boolean(run), runId: run?.id ?? null }); + } + return results; + }, + async getOrchestrationSummary(params): Promise { + const companyId = ensureCompanyId(params.companyId); + await ensurePluginAvailableForCompany(companyId); + const rootIssue = requireInCompany("Issue", await issues.getById(params.issueId), companyId); + const subtreeIssueIds = params.includeSubtree + ? await collectIssueSubtreeIds(companyId, rootIssue.id) + : [rootIssue.id]; + const relationPairs = await Promise.all( + subtreeIssueIds.map(async (issueId) => [issueId, await issues.getRelationSummaries(issueId)] as const), + ); + const approvalRows = ( + await Promise.all( + subtreeIssueIds.map(async (issueId) => { + const rows = await issueApprovals.listApprovalsForIssue(issueId); + return rows.map((approval) => ({ + issueId, + id: approval.id, + type: approval.type, + status: approval.status, + requestedByAgentId: approval.requestedByAgentId, + requestedByUserId: approval.requestedByUserId, + decidedByUserId: approval.decidedByUserId, + decidedAt: approval.decidedAt?.toISOString() ?? null, + createdAt: approval.createdAt.toISOString(), + })); + }), + ) + ).flat(); + const [runs, costsSummary, openBudgetIncidents] = await Promise.all([ + getIssueRunSummaries(companyId, subtreeIssueIds), + getIssueCostSummary(companyId, subtreeIssueIds, params.billingCode ?? rootIssue.billingCode ?? null), + getOpenBudgetIncidents(companyId), + ]); + const issueRows = await db + .select({ + id: issuesTable.id, + assigneeAgentId: issuesTable.assigneeAgentId, + projectId: issuesTable.projectId, + }) + .from(issuesTable) + .where(and(eq(issuesTable.companyId, companyId), inArray(issuesTable.id, subtreeIssueIds))); + const invocationBlocks = ( + await Promise.all( + issueRows + .filter((issueRow) => issueRow.assigneeAgentId) + .map(async (issueRow) => { + const block = await budgets.getInvocationBlock(companyId, issueRow.assigneeAgentId!, { + issueId: issueRow.id, + projectId: issueRow.projectId, + }); + return block + ? { + issueId: issueRow.id, + agentId: issueRow.assigneeAgentId!, + scopeType: block.scopeType, + scopeId: block.scopeId, + scopeName: block.scopeName, + reason: block.reason, + } + : null; + }), + ) + ).filter((block): block is NonNullable => block !== null); + return { + issueId: rootIssue.id, + companyId, + subtreeIssueIds, + relations: Object.fromEntries(relationPairs), + approvals: approvalRows, + runs, + costs: costsSummary, + openBudgetIncidents, + invocationBlocks, + }; }, async listComments(params) { const companyId = ensureCompanyId(params.companyId); @@ -803,12 +1486,25 @@ export function buildHostServices( async createComment(params) { const companyId = ensureCompanyId(params.companyId); await ensurePluginAvailableForCompany(companyId); - requireInCompany("Issue", await issues.getById(params.issueId), companyId); - return (await issues.addComment( + const issue = requireInCompany("Issue", await issues.getById(params.issueId), companyId); + const comment = (await issues.addComment( params.issueId, params.body, { agentId: params.authorAgentId }, )) as IssueComment; + await logPluginActivity({ + companyId, + action: "issue.comment.created", + entityType: "issue", + entityId: issue.id, + actor: { actorAgentId: params.authorAgentId ?? null }, + details: { + identifier: issue.identifier, + commentId: comment.id, + bodySnippet: comment.body.slice(0, 120), + }, + }); + return comment; }, }, @@ -830,7 +1526,7 @@ export function buildHostServices( async upsert(params) { const companyId = ensureCompanyId(params.companyId); await ensurePluginAvailableForCompany(companyId); - requireInCompany("Issue", await issues.getById(params.issueId), companyId); + const issue = requireInCompany("Issue", await issues.getById(params.issueId), companyId); const result = await documents.upsertIssueDocument({ issueId: params.issueId, key: params.key, @@ -839,13 +1535,35 @@ export function buildHostServices( format: params.format ?? "markdown", changeSummary: params.changeSummary ?? null, }); + await logPluginActivity({ + companyId, + action: "issue.document_upserted", + entityType: "issue", + entityId: issue.id, + details: { + identifier: issue.identifier, + documentKey: params.key, + title: params.title ?? null, + format: params.format ?? "markdown", + }, + }); return result.document as any; }, async delete(params) { const companyId = ensureCompanyId(params.companyId); await ensurePluginAvailableForCompany(companyId); - requireInCompany("Issue", await issues.getById(params.issueId), companyId); + const issue = requireInCompany("Issue", await issues.getById(params.issueId), companyId); await documents.deleteIssueDocument(params.issueId, params.key); + await logPluginActivity({ + companyId, + action: "issue.document_deleted", + entityType: "issue", + entityId: issue.id, + details: { + identifier: issue.identifier, + documentKey: params.key, + }, + }); }, }, diff --git a/server/src/services/plugin-loader.ts b/server/src/services/plugin-loader.ts index 1ceadd1917..9580118015 100644 --- a/server/src/services/plugin-loader.ts +++ b/server/src/services/plugin-loader.ts @@ -48,6 +48,7 @@ import type { PluginJobScheduler } from "./plugin-job-scheduler.js"; import type { PluginJobStore } from "./plugin-job-store.js"; import type { PluginToolDispatcher } from "./plugin-tool-dispatcher.js"; import type { PluginLifecycleManager } from "./plugin-lifecycle.js"; +import { pluginDatabaseService } from "./plugin-database.js"; const execFileAsync = promisify(execFile); const __dirname = path.dirname(fileURLToPath(import.meta.url)); @@ -147,6 +148,9 @@ export interface PluginLoaderOptions { */ localPluginDir?: string; + /** Optional direct Postgres connection used for plugin DDL migrations. */ + migrationDb?: Db; + /** * Whether to scan the local filesystem directory for plugins. * Defaults to true. @@ -735,6 +739,7 @@ export function pluginLoader( ): PluginLoader { const { localPluginDir = DEFAULT_LOCAL_PLUGIN_DIR, + migrationDb = db, enableLocalFilesystem = true, enableNpmDiscovery = true, } = options; @@ -1701,14 +1706,22 @@ export function pluginLoader( // 1. Resolve worker entrypoint // ------------------------------------------------------------------ const workerEntrypoint = resolveWorkerEntrypoint(plugin, localPluginDir); + const packageRoot = resolvePluginPackageRoot(plugin, localPluginDir); // ------------------------------------------------------------------ - // 2. Build host handlers for this plugin + // 2. Apply restricted database migrations before worker startup + // ------------------------------------------------------------------ + const databaseNamespace = manifest.database + ? (await pluginDatabaseService(migrationDb).applyMigrations(pluginId, manifest, packageRoot))?.namespaceName ?? null + : null; + + // ------------------------------------------------------------------ + // 3. Build host handlers for this plugin // ------------------------------------------------------------------ const hostHandlers = buildHostHandlers(pluginId, manifest); // ------------------------------------------------------------------ - // 3. Retrieve plugin config (if any) + // 4. Retrieve plugin config (if any) // ------------------------------------------------------------------ let config: Record = {}; try { @@ -1722,7 +1735,7 @@ export function pluginLoader( } // ------------------------------------------------------------------ - // 4. Spawn worker process + // 5. Spawn worker process // ------------------------------------------------------------------ const workerOptions: WorkerStartOptions = { entrypointPath: workerEntrypoint, @@ -1730,6 +1743,7 @@ export function pluginLoader( config, instanceInfo, apiVersion: manifest.apiVersion, + databaseNamespace, hostHandlers, autoRestart: true, }; @@ -1750,7 +1764,7 @@ export function pluginLoader( ); // ------------------------------------------------------------------ - // 5. Sync job declarations and register with scheduler + // 6. Sync job declarations and register with scheduler // ------------------------------------------------------------------ const jobDeclarations = manifest.jobs ?? []; if (jobDeclarations.length > 0) { @@ -1939,6 +1953,26 @@ function resolveWorkerEntrypoint( ); } +function resolvePluginPackageRoot( + plugin: PluginRecord & { packagePath?: string | null }, + localPluginDir: string, +): string { + if (plugin.packagePath && existsSync(plugin.packagePath)) { + return path.resolve(plugin.packagePath); + } + + const packageName = plugin.packageName; + const packageDir = packageName.startsWith("@") + ? path.join(localPluginDir, "node_modules", ...packageName.split("/")) + : path.join(localPluginDir, "node_modules", packageName); + if (existsSync(packageDir)) return packageDir; + + const directDir = path.join(localPluginDir, packageName); + if (existsSync(directDir)) return directDir; + + throw new Error(`Package root not found for plugin "${plugin.pluginKey}"`); +} + function resolveManagedInstallPackageDir(localPluginDir: string, packageName: string): string { if (packageName.startsWith("@")) { return path.join(localPluginDir, "node_modules", ...packageName.split("/")); diff --git a/server/src/services/plugin-worker-manager.ts b/server/src/services/plugin-worker-manager.ts index b55ba1bcdf..8413af10d0 100644 --- a/server/src/services/plugin-worker-manager.ts +++ b/server/src/services/plugin-worker-manager.ts @@ -166,6 +166,8 @@ export interface WorkerStartOptions { }; /** Host API version. */ apiVersion: number; + /** Host-derived plugin database namespace, when declared. */ + databaseNamespace?: string | null; /** Handlers for worker→host RPC calls. */ hostHandlers: WorkerToHostHandlers; /** Default timeout for RPC calls (ms). Defaults to 30s. */ @@ -828,6 +830,7 @@ export function createPluginWorkerHandle( config: options.config, instanceInfo: options.instanceInfo, apiVersion: options.apiVersion, + databaseNamespace: options.databaseNamespace ?? null, }; try {