mirror of
https://github.com/paperclipai/paperclip
synced 2026-05-11 17:36:22 +02:00
Compare commits
84 Commits
pap-1497-d
...
pap-2445-s
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
a5f82c77c3 | ||
|
|
a51a30398d | ||
|
|
d2cbe2cb23 | ||
|
|
82e257c7ba | ||
|
|
868d08903e | ||
|
|
1d9f7a5149 | ||
|
|
8145141c55 | ||
|
|
54ab0d24cd | ||
|
|
b2496c8067 | ||
|
|
08af830430 | ||
|
|
d47ffa87f0 | ||
|
|
d1484551ee | ||
|
|
91333ec86f | ||
|
|
c036bbfa98 | ||
|
|
df425fde96 | ||
|
|
40782f703d | ||
|
|
4ef969f084 | ||
|
|
5bd0f578fd | ||
|
|
deba60ebb2 | ||
|
|
f68e9caa9a | ||
|
|
73fbdf36db | ||
|
|
6916e30f8e | ||
|
|
0c6961a03e | ||
|
|
5a0c1979cf | ||
|
|
9a8d219949 | ||
|
|
70679a3321 | ||
|
|
641eb44949 | ||
|
|
77a72e28c2 | ||
|
|
8f1cd0474f | ||
|
|
4fdbbeced3 | ||
|
|
7ad225a198 | ||
|
|
35a9dc37b0 | ||
|
|
e4995bbb1c | ||
|
|
f98c348e2b | ||
|
|
854fa81757 | ||
|
|
fe14de504c | ||
|
|
3d15798c22 | ||
|
|
24232078fd | ||
|
|
13551b2bac | ||
|
|
b69b563aa8 | ||
|
|
a957394420 | ||
|
|
014aa0eb2d | ||
|
|
bcbbb41a4b | ||
|
|
73ef40e7be | ||
|
|
a26e1288b6 | ||
|
|
09d0678840 | ||
|
|
ab9051b595 | ||
|
|
1954eb3048 | ||
|
|
8d0c3d2fe6 | ||
|
|
1266954a4e | ||
|
|
1bf2424377 | ||
|
|
51f127f47b | ||
|
|
b94f1a1565 | ||
|
|
2de893f624 | ||
|
|
7a329fb8bb | ||
|
|
549ef11c14 | ||
|
|
c7c1ca0c78 | ||
|
|
56b3120971 | ||
|
|
4357a3f352 | ||
|
|
0f4e4b4c10 | ||
|
|
73eb23734f | ||
|
|
9c6f551595 | ||
|
|
16b2b84d84 | ||
|
|
057fee4836 | ||
|
|
fee514efcb | ||
|
|
d8b63a18e7 | ||
|
|
e89d3f7e11 | ||
|
|
236d11d36f | ||
|
|
b9a80dcf22 | ||
|
|
e93e418cbf | ||
|
|
407e76c1db | ||
|
|
e458145583 | ||
|
|
f701c3e78c | ||
|
|
1afb6be961 | ||
|
|
b8725c52ef | ||
|
|
5f45712846 | ||
|
|
d4c3899ca4 | ||
|
|
7463479fc8 | ||
|
|
3fa5d25de1 | ||
|
|
c1a02497b0 | ||
|
|
390502736c | ||
|
|
0d87fd9a11 | ||
|
|
6059c665d5 | ||
|
|
f460f744ef |
@@ -154,6 +154,14 @@ Each AGENTS.md body should include not just what the agent does, but how they fi
|
||||
|
||||
This turns a collection of agents into an organization that actually works together. Without workflow context, agents operate in isolation — they do their job but don't know what happens before or after them.
|
||||
|
||||
Add a concise execution contract to every generated working agent:
|
||||
|
||||
- Start actionable work in the same heartbeat and do not stop at a plan unless planning was requested.
|
||||
- Leave durable progress in comments, documents, or work products with the next action.
|
||||
- Use child issues for long or parallel delegated work instead of polling agents, sessions, or processes.
|
||||
- Mark blocked work with the unblock owner and action.
|
||||
- Respect budget, pause/cancel, approval gates, and company boundaries.
|
||||
|
||||
### Step 5: Confirm Output Location
|
||||
|
||||
Ask the user where to write the package. Common options:
|
||||
|
||||
@@ -105,6 +105,13 @@ Your responsibilities:
|
||||
- Implement features and fix bugs
|
||||
- Write tests and documentation
|
||||
- Participate in code reviews
|
||||
|
||||
Execution contract:
|
||||
|
||||
- Start actionable implementation work in the same heartbeat; do not stop at a plan unless planning was requested.
|
||||
- Leave durable progress with a clear next action.
|
||||
- Use child issues for long or parallel delegated work instead of polling agents, sessions, or processes.
|
||||
- Mark blocked work with the unblock owner and action.
|
||||
```
|
||||
|
||||
## teams/engineering/TEAM.md
|
||||
|
||||
@@ -548,7 +548,7 @@ Import from `@paperclipai/adapter-utils/server-utils`:
|
||||
### Prompt Templates
|
||||
- Support `promptTemplate` for every run
|
||||
- Use `renderTemplate()` with the standard variable set
|
||||
- Default prompt: `"You are agent {{agent.id}} ({{agent.name}}). Continue your Paperclip work."`
|
||||
- Default prompt should use `DEFAULT_PAPERCLIP_AGENT_PROMPT_TEMPLATE` from `@paperclipai/adapter-utils/server-utils` so local adapters share Paperclip's execution contract: act in the same heartbeat, avoid planning-only exits unless requested, leave durable progress and a next action, use child issues instead of polling, mark blockers with owner/action, and respect governance boundaries.
|
||||
|
||||
### Error Handling
|
||||
- Differentiate timeout vs process error vs parse failure
|
||||
|
||||
@@ -2,3 +2,6 @@ DATABASE_URL=postgres://paperclip:paperclip@localhost:5432/paperclip
|
||||
PORT=3100
|
||||
SERVE_UI=false
|
||||
BETTER_AUTH_SECRET=paperclip-dev-secret
|
||||
|
||||
# Discord webhook for daily merge digest (scripts/discord-daily-digest.sh)
|
||||
# DISCORD_WEBHOOK_URL=https://discord.com/api/webhooks/...
|
||||
|
||||
3
.github/PULL_REQUEST_TEMPLATE.md
vendored
3
.github/PULL_REQUEST_TEMPLATE.md
vendored
@@ -38,6 +38,8 @@
|
||||
|
||||
-
|
||||
|
||||
> For core feature work, check [`ROADMAP.md`](ROADMAP.md) first and discuss it in `#dev` before opening the PR. Feature PRs that overlap with planned core work may need to be redirected — check the roadmap first. See `CONTRIBUTING.md`.
|
||||
|
||||
## Model Used
|
||||
|
||||
<!--
|
||||
@@ -57,6 +59,7 @@
|
||||
|
||||
- [ ] I have included a thinking path that traces from project context to this change
|
||||
- [ ] I have specified the model used (with version and capability details)
|
||||
- [ ] I have checked ROADMAP.md and confirmed this PR does not duplicate planned core work
|
||||
- [ ] I have run tests locally and they pass
|
||||
- [ ] I have added or updated tests where applicable
|
||||
- [ ] If this change affects the UI, I have included before/after screenshots
|
||||
|
||||
39
.github/workflows/pr.yml
vendored
39
.github/workflows/pr.yml
vendored
@@ -41,44 +41,7 @@ jobs:
|
||||
node-version: 24
|
||||
|
||||
- name: Validate Dockerfile deps stage
|
||||
run: |
|
||||
missing=0
|
||||
|
||||
# Extract only the deps stage from the Dockerfile
|
||||
deps_stage="$(awk '/^FROM .* AS deps$/{found=1; next} found && /^FROM /{exit} found{print}' Dockerfile)"
|
||||
|
||||
if [ -z "$deps_stage" ]; then
|
||||
echo "::error::Could not extract deps stage from Dockerfile (expected 'FROM ... AS deps')"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Derive workspace search roots from pnpm-workspace.yaml (exclude dev-only packages)
|
||||
search_roots="$(grep '^ *- ' pnpm-workspace.yaml | sed 's/^ *- //' | sed 's/\*$//' | grep -v 'examples' | grep -v 'create-paperclip-plugin' | tr '\n' ' ')"
|
||||
|
||||
if [ -z "$search_roots" ]; then
|
||||
echo "::error::Could not derive workspace roots from pnpm-workspace.yaml"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Check all workspace package.json files are copied in the deps stage
|
||||
for pkg in $(find $search_roots -maxdepth 2 -name package.json -not -path '*/examples/*' -not -path '*/create-paperclip-plugin/*' -not -path '*/node_modules/*' 2>/dev/null | sort -u); do
|
||||
dir="$(dirname "$pkg")"
|
||||
if ! echo "$deps_stage" | grep -q "^COPY ${dir}/package.json"; then
|
||||
echo "::error::Dockerfile deps stage missing: COPY ${pkg} ${dir}/"
|
||||
missing=1
|
||||
fi
|
||||
done
|
||||
|
||||
# Check patches directory is copied if it exists
|
||||
if [ -d patches ] && ! echo "$deps_stage" | grep -q '^COPY patches/'; then
|
||||
echo "::error::Dockerfile deps stage missing: COPY patches/ patches/"
|
||||
missing=1
|
||||
fi
|
||||
|
||||
if [ "$missing" -eq 1 ]; then
|
||||
echo "Dockerfile deps stage is out of sync. Update it to include the missing files."
|
||||
exit 1
|
||||
fi
|
||||
run: node ./scripts/check-docker-deps-stage.mjs
|
||||
|
||||
- name: Validate dependency resolution when manifests change
|
||||
run: |
|
||||
|
||||
5
.gitignore
vendored
5
.gitignore
vendored
@@ -1,5 +1,9 @@
|
||||
node_modules
|
||||
node_modules/
|
||||
**/node_modules
|
||||
**/node_modules/
|
||||
dist/
|
||||
ui/storybook-static/
|
||||
.env
|
||||
*.tsbuildinfo
|
||||
drizzle/meta/
|
||||
@@ -32,6 +36,7 @@ server/src/**/*.d.ts
|
||||
server/src/**/*.d.ts.map
|
||||
tmp/
|
||||
feedback-export-*
|
||||
diagnostics/
|
||||
|
||||
# Editor / tool temp files
|
||||
*.tmp
|
||||
|
||||
@@ -123,7 +123,9 @@ pnpm test:release-smoke
|
||||
|
||||
Run the browser suites only when your change touches them or when you are explicitly verifying CI/release flows.
|
||||
|
||||
Run this full check before claiming done:
|
||||
For normal issue work, run the smallest relevant verification first. Do not default to repo-wide typecheck/build/test on every heartbeat when a narrower check is enough to prove the change.
|
||||
|
||||
Run this full check before claiming repo work done in a PR-ready hand-off, or when the change scope is broad enough that targeted checks are not sufficient:
|
||||
|
||||
```sh
|
||||
pnpm -r typecheck
|
||||
|
||||
@@ -51,6 +51,21 @@ All tests must pass before a PR can be merged. Run them locally first and verify
|
||||
|
||||
We use [Greptile](https://greptile.com) for automated code review. Your PR must achieve a **5/5 Greptile score** with **all Greptile comments addressed** before it can be merged. If Greptile leaves comments, fix or respond to each one and request a re-review.
|
||||
|
||||
## Feature Contributions
|
||||
|
||||
We actively manage the core Paperclip feature roadmap.
|
||||
|
||||
Uncoordinated feature PRs against the core product may be closed, even when the implementation is thoughtful and high quality. That is about roadmap ownership, product coherence, and long-term maintenance commitment, not a judgment about the effort.
|
||||
|
||||
If you want to contribute a feature:
|
||||
|
||||
- Check [ROADMAP.md](ROADMAP.md) first
|
||||
- Start the discussion in Discord -> `#dev` before writing code
|
||||
- If the idea fits as an extension, prefer building it with the [plugin system](doc/plugins/PLUGIN_SPEC.md)
|
||||
- If you want to show a possible direction, reference implementations are welcome as feedback, but they generally will not be merged directly into core
|
||||
|
||||
Bugs, docs improvements, and small targeted improvements are still the easiest path to getting merged, and we really do appreciate them.
|
||||
|
||||
## General Rules (both paths)
|
||||
|
||||
- Write clear commit messages
|
||||
|
||||
16
Dockerfile
16
Dockerfile
@@ -1,16 +1,9 @@
|
||||
# syntax=docker/dockerfile:1.20
|
||||
FROM node:lts-trixie-slim AS base
|
||||
ARG USER_UID=1000
|
||||
ARG USER_GID=1000
|
||||
RUN apt-get update \
|
||||
&& apt-get install -y --no-install-recommends ca-certificates gosu curl git wget ripgrep python3 \
|
||||
&& mkdir -p -m 755 /etc/apt/keyrings \
|
||||
&& wget -nv -O/etc/apt/keyrings/githubcli-archive-keyring.gpg https://cli.github.com/packages/githubcli-archive-keyring.gpg \
|
||||
&& echo "20e0125d6f6e077a9ad46f03371bc26d90b04939fb95170f5a1905099cc6bcc0 /etc/apt/keyrings/githubcli-archive-keyring.gpg" | sha256sum -c - \
|
||||
&& chmod go+r /etc/apt/keyrings/githubcli-archive-keyring.gpg \
|
||||
&& mkdir -p -m 755 /etc/apt/sources.list.d \
|
||||
&& echo "deb [arch=$(dpkg --print-architecture) signed-by=/etc/apt/keyrings/githubcli-archive-keyring.gpg] https://cli.github.com/packages stable main" > /etc/apt/sources.list.d/github-cli.list \
|
||||
&& apt-get update \
|
||||
&& apt-get install -y --no-install-recommends gh \
|
||||
&& apt-get install -y --no-install-recommends ca-certificates gosu curl gh git wget ripgrep python3 \
|
||||
&& rm -rf /var/lib/apt/lists/* \
|
||||
&& corepack enable
|
||||
|
||||
@@ -37,6 +30,8 @@ COPY packages/adapters/openclaw-gateway/package.json packages/adapters/openclaw-
|
||||
COPY packages/adapters/opencode-local/package.json packages/adapters/opencode-local/
|
||||
COPY packages/adapters/pi-local/package.json packages/adapters/pi-local/
|
||||
COPY packages/plugins/sdk/package.json packages/plugins/sdk/
|
||||
COPY --parents packages/plugins/sandbox-providers/./*/package.json packages/plugins/sandbox-providers/
|
||||
COPY packages/plugins/paperclip-plugin-fake-sandbox/package.json packages/plugins/paperclip-plugin-fake-sandbox/
|
||||
COPY patches/ patches/
|
||||
|
||||
RUN pnpm install --frozen-lockfile
|
||||
@@ -56,6 +51,9 @@ ARG USER_GID=1000
|
||||
WORKDIR /app
|
||||
COPY --chown=node:node --from=build /app /app
|
||||
RUN npm install --global --omit=dev @anthropic-ai/claude-code@latest @openai/codex@latest opencode-ai \
|
||||
&& apt-get update \
|
||||
&& apt-get install -y --no-install-recommends openssh-client jq \
|
||||
&& rm -rf /var/lib/apt/lists/* \
|
||||
&& mkdir -p /paperclip \
|
||||
&& chown node:node /paperclip
|
||||
|
||||
|
||||
115
README.md
115
README.md
@@ -156,6 +156,115 @@ Paperclip handles the hard orchestration details correctly.
|
||||
|
||||
<br/>
|
||||
|
||||
## What's Under the Hood
|
||||
|
||||
Paperclip is a full control plane, not a wrapper. Before you build any of this yourself, know that it already exists:
|
||||
|
||||
```
|
||||
┌──────────────────────────────────────────────────────────────┐
|
||||
│ PAPERCLIP SERVER │
|
||||
│ │
|
||||
│ ┌───────────┐ ┌───────────┐ ┌───────────┐ ┌───────────┐ │
|
||||
│ │Identity & │ │ Work & │ │ Heartbeat │ │Governance │ │
|
||||
│ │ Access │ │ Tasks │ │ Execution │ │& Approvals│ │
|
||||
│ └───────────┘ └───────────┘ └───────────┘ └───────────┘ │
|
||||
│ │
|
||||
│ ┌───────────┐ ┌───────────┐ ┌───────────┐ ┌───────────┐ │
|
||||
│ │ Org Chart │ │Workspaces │ │ Plugins │ │ Budget │ │
|
||||
│ │ & Agents │ │ & Runtime │ │ │ │ & Costs │ │
|
||||
│ └───────────┘ └───────────┘ └───────────┘ └───────────┘ │
|
||||
│ │
|
||||
│ ┌───────────┐ ┌───────────┐ ┌───────────┐ ┌───────────┐ │
|
||||
│ │ Routines │ │ Secrets & │ │ Activity │ │ Company │ │
|
||||
│ │& Schedules│ │ Storage │ │ & Events │ │Portability│ │
|
||||
│ └───────────┘ └───────────┘ └───────────┘ └───────────┘ │
|
||||
└──────────────────────────────────────────────────────────────┘
|
||||
▲ ▲ ▲ ▲
|
||||
┌─────┴─────┐ ┌─────┴─────┐ ┌─────┴─────┐ ┌─────┴─────┐
|
||||
│ Claude │ │ Codex │ │ CLI │ │ HTTP/web │
|
||||
│ Code │ │ │ │ agents │ │ bots │
|
||||
└───────────┘ └───────────┘ └───────────┘ └───────────┘
|
||||
```
|
||||
|
||||
### The Systems
|
||||
|
||||
<table>
|
||||
<tr>
|
||||
<td width="50%">
|
||||
|
||||
**Identity & Access** — Two deployment modes (trusted local or authenticated), board users, agent API keys, short-lived run JWTs, company memberships, invite flows, and OpenClaw onboarding. Every mutating request is traced to an actor.
|
||||
|
||||
</td>
|
||||
<td width="50%">
|
||||
|
||||
**Org Chart & Agents** — Agents have roles, titles, reporting lines, permissions, and budgets. Adapter examples match the diagram: Claude Code, Codex, CLI agents such as Cursor/Gemini/bash, HTTP/webhook bots such as OpenClaw, and external adapter plugins. If it can receive a heartbeat, it's hired.
|
||||
|
||||
</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>
|
||||
|
||||
**Work & Task System** — Issues carry company/project/goal/parent links, atomic checkout with execution locks, first-class blocker dependencies, comments, documents, attachments, work products, labels, and inbox state. No double-work, no lost context.
|
||||
|
||||
</td>
|
||||
<td>
|
||||
|
||||
**Heartbeat Execution** — DB-backed wakeup queue with coalescing, budget checks, workspace resolution, secret injection, skill loading, and adapter invocation. Runs produce structured logs, cost events, session state, and audit trails. Recovery handles orphaned runs automatically.
|
||||
|
||||
</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>
|
||||
|
||||
**Workspaces & Runtime** — Project workspaces, isolated execution workspaces (git worktrees, operator branches), and runtime services (dev servers, preview URLs). Agents work in the right directory with the right context every time.
|
||||
|
||||
</td>
|
||||
<td>
|
||||
|
||||
**Governance & Approvals** — Board approval workflows, execution policies with review/approval stages, decision tracking, budget hard-stops, agent pause/resume/terminate, and full audit logging. You're the board — nothing ships without your sign-off.
|
||||
|
||||
</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>
|
||||
|
||||
**Budget & Cost Control** — Token and cost tracking by company, agent, project, goal, issue, provider, and model. Scoped budget policies with warning thresholds and hard stops. Overspend pauses agents and cancels queued work automatically.
|
||||
|
||||
</td>
|
||||
<td>
|
||||
|
||||
**Routines & Schedules** — Recurring tasks with cron, webhook, and API triggers. Concurrency and catch-up policies. Each routine execution creates a tracked issue and wakes the assigned agent — no manual kick-offs needed.
|
||||
|
||||
</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>
|
||||
|
||||
**Plugins** — Instance-wide plugin system with out-of-process workers, capability-gated host services, job scheduling, tool exposure, and UI contributions. Extend Paperclip without forking it.
|
||||
|
||||
</td>
|
||||
<td>
|
||||
|
||||
**Secrets & Storage** — Instance and company secrets, encrypted local storage, provider-backed object storage, attachments, and work products. Sensitive values stay out of prompts unless a scoped run explicitly needs them.
|
||||
|
||||
</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>
|
||||
|
||||
**Activity & Events** — Mutating actions, heartbeat state changes, cost events, approvals, comments, and work products are recorded as durable activity so operators can audit what happened and why.
|
||||
|
||||
</td>
|
||||
<td>
|
||||
|
||||
**Company Portability** — Export and import entire organizations — agents, skills, projects, routines, and issues — with secret scrubbing and collision handling. One deployment, many companies, complete data isolation.
|
||||
|
||||
</td>
|
||||
</tr>
|
||||
</table>
|
||||
|
||||
<br/>
|
||||
|
||||
## What Paperclip is not
|
||||
|
||||
| | |
|
||||
@@ -256,10 +365,10 @@ See [doc/DEVELOPING.md](doc/DEVELOPING.md) for the full development guide.
|
||||
- ✅ Scheduled Routines
|
||||
- ✅ Better Budgeting
|
||||
- ✅ Agent Reviews and Approvals
|
||||
- ⚪ Multiple Human Users
|
||||
- ✅ Multiple Human Users
|
||||
- ⚪ Cloud / Sandbox agents (e.g. Cursor / e2b agents)
|
||||
- ⚪ Artifacts & Work Products
|
||||
- ⚪ Memory & Knowledge
|
||||
- ⚪ Memory / Knowledge
|
||||
- ⚪ Enforced Outcomes
|
||||
- ⚪ MAXIMIZER MODE
|
||||
- ⚪ Deep Planning
|
||||
@@ -270,6 +379,8 @@ See [doc/DEVELOPING.md](doc/DEVELOPING.md) for the full development guide.
|
||||
- ⚪ Cloud deployments
|
||||
- ⚪ Desktop App
|
||||
|
||||
This is the short roadmap preview. See the full roadmap in [ROADMAP.md](ROADMAP.md).
|
||||
|
||||
<br/>
|
||||
|
||||
## Community & Plugins
|
||||
|
||||
97
ROADMAP.md
Normal file
97
ROADMAP.md
Normal file
@@ -0,0 +1,97 @@
|
||||
# Roadmap
|
||||
|
||||
This document expands the roadmap preview in `README.md`.
|
||||
|
||||
Paperclip is still moving quickly. The list below is directional, not promised, and priorities may shift as we learn from users and from operating real AI companies with the product.
|
||||
|
||||
We value community involvement and want to make sure contributor energy goes toward areas where it can land.
|
||||
|
||||
We may accept contributions in the areas below, but if you want to work on roadmap-level core features, please coordinate with us first in Discord (`#dev`) before writing code. Bugs, docs, polish, and tightly scoped improvements are still the easiest contributions to merge.
|
||||
|
||||
If you want to extend Paperclip today, the best path is often the [plugin system](doc/plugins/PLUGIN_SPEC.md). Community reference implementations are also useful feedback even when they are not merged directly into core.
|
||||
|
||||
## Milestones
|
||||
|
||||
### ✅ Plugin system
|
||||
|
||||
Paperclip should keep a thin core and rich edges. Plugins are the path for optional capabilities like knowledge bases, custom tracing, queues, doc editors, and other product-specific surfaces that do not need to live in the control plane itself.
|
||||
|
||||
### ✅ Get OpenClaw / claw-style agent employees
|
||||
|
||||
Paperclip should be able to hire and manage real claw-style agent workers, not just a narrow built-in runtime. This is part of the larger "bring your own agent" story and keeps the control plane useful across different agent ecosystems.
|
||||
|
||||
### ✅ companies.sh - import and export entire organizations
|
||||
|
||||
Reusable companies matter. Import/export is the foundation for moving org structures, agent definitions, and reusable company setups between environments and eventually for broader company-template distribution.
|
||||
|
||||
### ✅ Easy AGENTS.md configurations
|
||||
|
||||
Agent setup should feel repo-native and legible. Simple `AGENTS.md`-style configuration lowers the barrier to getting an agent team running and makes it easier for contributors to understand how a company is wired together.
|
||||
|
||||
### ✅ Skills Manager
|
||||
|
||||
Agents need a practical way to discover, install, and use skills without every setup becoming bespoke. The skills layer is part of making Paperclip companies more reusable and easier to operate.
|
||||
|
||||
### ✅ Scheduled Routines
|
||||
|
||||
Recurring work should be native. Routine tasks like reports, reviews, and other periodic work need first-class scheduling so the company keeps operating even when no human is manually kicking work off.
|
||||
|
||||
### ✅ Better Budgeting
|
||||
|
||||
Budgets are a core control-plane feature, not an afterthought. Better budgeting means clearer spend visibility, safer hard stops, and better operator control over how autonomy turns into real cost.
|
||||
|
||||
### ✅ Agent Reviews and Approvals
|
||||
|
||||
Paperclip should support explicit review and approval stages as first-class workflow steps, not just ad hoc comments. That means reviewer routing, approval gates, change requests, and durable audit trails that fit the same task model as the rest of the control plane.
|
||||
|
||||
### ✅ Multiple Human Users
|
||||
|
||||
Paperclip needs a clearer path from solo operator to real human teams. That means shared board access, safer collaboration, and a better model for several humans supervising the same autonomous company.
|
||||
|
||||
### ⚪ Cloud / Sandbox agents (e.g. Cursor / e2b agents)
|
||||
|
||||
We want agents to run in more remote and sandboxed environments while preserving the same Paperclip control-plane model. This makes the system safer, more flexible, and more useful outside a single trusted local machine.
|
||||
|
||||
### ⚪ Artifacts & Work Products
|
||||
|
||||
Paperclip should make outputs first-class. That means generated artifacts, previews, deployable outputs, and the handoff from "agent did work" to "here is the result" should become more visible and easier to operate.
|
||||
|
||||
### ⚪ Memory / Knowledge
|
||||
|
||||
We want a stronger memory and knowledge surface for companies, agents, and projects. That includes durable memory, better recall of prior decisions and context, and a clearer path for knowledge-style capabilities without turning Paperclip into a generic chat app.
|
||||
|
||||
### ⚪ Enforced Outcomes
|
||||
|
||||
Paperclip should get stricter about what counts as finished work. Tasks, approvals, and execution flows should resolve to clear outcomes like merged code, published artifacts, shipped docs, or explicit decisions instead of stopping at vague status updates.
|
||||
|
||||
### ⚪ MAXIMIZER MODE
|
||||
|
||||
This is the direction for higher-autonomy execution: more aggressive delegation, deeper follow-through, and stronger operating loops with clear budgets, visibility, and governance. The point is not hidden autonomy; the point is more output per human supervisor.
|
||||
|
||||
### ⚪ Deep Planning
|
||||
|
||||
Some work needs more than a task description before execution starts. Deeper planning means stronger issue documents, revisionable plans, and clearer review loops for strategy-heavy work before agents begin execution.
|
||||
|
||||
### ⚪ Work Queues
|
||||
|
||||
Paperclip should support queue-style work streams for repeatable inputs like support, triage, review, and backlog intake. That would make it easier to route work continuously without turning every system into a one-off workflow.
|
||||
|
||||
### ⚪ Self-Organization
|
||||
|
||||
As companies grow, agents should be able to propose useful structural changes such as role adjustments, delegation changes, and new recurring routines. The goal is adaptive organizations that still stay within governance and approval boundaries.
|
||||
|
||||
### ⚪ Automatic Organizational Learning
|
||||
|
||||
Paperclip should get better at turning completed work into reusable organizational knowledge. That includes capturing playbooks, recurring fixes, and decision patterns so future work starts from what the company has already learned.
|
||||
|
||||
### ⚪ CEO Chat
|
||||
|
||||
We want a lighter-weight way to talk to leadership agents, but those conversations should still resolve to real work objects like plans, issues, approvals, or decisions. This should improve interaction without changing the core task-and-comments model.
|
||||
|
||||
### ⚪ Cloud deployments
|
||||
|
||||
Local-first remains important, but Paperclip also needs a cleaner shared deployment story. Teams should be able to run the same product in hosted or semi-hosted environments without changing the mental model.
|
||||
|
||||
### ⚪ Desktop App
|
||||
|
||||
A desktop app can make Paperclip feel more accessible and persistent for day-to-day operators. The goal is easier access, better local ergonomics, and a smoother default experience for users who want the control plane always close at hand.
|
||||
@@ -12,7 +12,7 @@
|
||||
<p align="center">
|
||||
<a href="https://github.com/paperclipai/paperclip/blob/master/LICENSE"><img src="https://img.shields.io/badge/license-MIT-blue" alt="MIT License" /></a>
|
||||
<a href="https://github.com/paperclipai/paperclip/stargazers"><img src="https://img.shields.io/github/stars/paperclipai/paperclip?style=flat" alt="Stars" /></a>
|
||||
<a href="https://discord.gg/m4HZY7xNG3"><img src="https://img.shields.io/badge/discord-join%20chat-5865F2?logo=discord&logoColor=white" alt="Discord" /></a>
|
||||
<a href="https://discord.gg/m4HZY7xNG3"><img src="https://img.shields.io/discord/000000000?label=discord" alt="Discord" /></a>
|
||||
</p>
|
||||
|
||||
<br/>
|
||||
@@ -258,7 +258,7 @@ See [doc/DEVELOPING.md](https://github.com/paperclipai/paperclip/blob/master/doc
|
||||
- ⚪ Artifacts & Deployments
|
||||
- ⚪ CEO Chat
|
||||
- ⚪ MAXIMIZER MODE
|
||||
- ⚪ Multiple Human Users
|
||||
- ✅ Multiple Human Users
|
||||
- ⚪ Cloud / Sandbox agents (e.g. Cursor / e2b agents)
|
||||
- ⚪ Cloud deployments
|
||||
- ⚪ Desktop App
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import { execFile, spawn } from "node:child_process";
|
||||
import { mkdirSync, mkdtempSync, readFileSync, readdirSync, rmSync, writeFileSync } from "node:fs";
|
||||
import { existsSync, mkdirSync, mkdtempSync, readFileSync, readdirSync, rmSync, writeFileSync } from "node:fs";
|
||||
import net from "node:net";
|
||||
import os from "node:os";
|
||||
import path from "node:path";
|
||||
@@ -104,20 +104,50 @@ function writeTestConfig(configPath: string, tempRoot: string, port: number, con
|
||||
writeFileSync(configPath, `${JSON.stringify(config, null, 2)}\n`, "utf8");
|
||||
}
|
||||
|
||||
function createServerEnv(configPath: string, port: number, connectionString: string) {
|
||||
interface TestPaperclipEnv {
|
||||
configPath: string;
|
||||
paperclipHome: string;
|
||||
instanceId: string;
|
||||
shellHome?: string;
|
||||
}
|
||||
|
||||
function createBasePaperclipEnv(options: TestPaperclipEnv) {
|
||||
const env = { ...process.env };
|
||||
for (const key of Object.keys(env)) {
|
||||
if (key.startsWith("PAPERCLIP_")) {
|
||||
delete env[key];
|
||||
}
|
||||
}
|
||||
|
||||
env.PAPERCLIP_CONFIG = options.configPath;
|
||||
env.PAPERCLIP_HOME = options.paperclipHome;
|
||||
env.PAPERCLIP_INSTANCE_ID = options.instanceId;
|
||||
env.PAPERCLIP_CONTEXT = path.join(options.paperclipHome, "context.json");
|
||||
env.PAPERCLIP_AUTH_STORE = path.join(options.paperclipHome, "auth.json");
|
||||
if (options.shellHome) {
|
||||
env.HOME = options.shellHome;
|
||||
}
|
||||
|
||||
return env;
|
||||
}
|
||||
|
||||
function createServerEnv(
|
||||
configPath: string,
|
||||
port: number,
|
||||
connectionString: string,
|
||||
options: Omit<TestPaperclipEnv, "configPath">,
|
||||
) {
|
||||
const env = createBasePaperclipEnv({
|
||||
configPath,
|
||||
...options,
|
||||
});
|
||||
|
||||
delete env.DATABASE_URL;
|
||||
delete env.PORT;
|
||||
delete env.HOST;
|
||||
delete env.SERVE_UI;
|
||||
delete env.HEARTBEAT_SCHEDULER_ENABLED;
|
||||
|
||||
env.PAPERCLIP_CONFIG = configPath;
|
||||
env.DATABASE_URL = connectionString;
|
||||
env.HOST = "127.0.0.1";
|
||||
env.PORT = String(port);
|
||||
@@ -130,13 +160,8 @@ function createServerEnv(configPath: string, port: number, connectionString: str
|
||||
return env;
|
||||
}
|
||||
|
||||
function createCliEnv() {
|
||||
const env = { ...process.env };
|
||||
for (const key of Object.keys(env)) {
|
||||
if (key.startsWith("PAPERCLIP_")) {
|
||||
delete env[key];
|
||||
}
|
||||
}
|
||||
function createCliEnv(options: TestPaperclipEnv) {
|
||||
const env = createBasePaperclipEnv(options);
|
||||
delete env.DATABASE_URL;
|
||||
delete env.PORT;
|
||||
delete env.HOST;
|
||||
@@ -183,14 +208,25 @@ async function api<T>(baseUrl: string, pathname: string, init?: RequestInit): Pr
|
||||
return text ? JSON.parse(text) as T : (null as T);
|
||||
}
|
||||
|
||||
async function runCliJson<T>(args: string[], opts: { apiBase: string; configPath: string }) {
|
||||
async function runCliJson<T>(
|
||||
args: string[],
|
||||
opts: TestPaperclipEnv & { apiBase?: string; includeConfigArg?: boolean },
|
||||
) {
|
||||
const repoRoot = path.resolve(path.dirname(fileURLToPath(import.meta.url)), "../../..");
|
||||
const cliArgs = ["--silent", "paperclipai", ...args];
|
||||
if (opts.apiBase) {
|
||||
cliArgs.push("--api-base", opts.apiBase);
|
||||
}
|
||||
if (opts.includeConfigArg !== false) {
|
||||
cliArgs.push("--config", opts.configPath);
|
||||
}
|
||||
cliArgs.push("--json");
|
||||
const result = await execFileAsync(
|
||||
"pnpm",
|
||||
["--silent", "paperclipai", ...args, "--api-base", opts.apiBase, "--config", opts.configPath, "--json"],
|
||||
cliArgs,
|
||||
{
|
||||
cwd: repoRoot,
|
||||
env: createCliEnv(),
|
||||
env: createCliEnv(opts),
|
||||
maxBuffer: 10 * 1024 * 1024,
|
||||
},
|
||||
);
|
||||
@@ -235,6 +271,9 @@ describeEmbeddedPostgres("paperclipai company import/export e2e", () => {
|
||||
let configPath = "";
|
||||
let exportDir = "";
|
||||
let apiBase = "";
|
||||
let paperclipHome = "";
|
||||
let cliShellHome = "";
|
||||
let paperclipInstanceId = "";
|
||||
let serverProcess: ServerProcess | null = null;
|
||||
let tempDb: Awaited<ReturnType<typeof startEmbeddedPostgresTestDatabase>> | null = null;
|
||||
|
||||
@@ -242,6 +281,11 @@ describeEmbeddedPostgres("paperclipai company import/export e2e", () => {
|
||||
tempRoot = mkdtempSync(path.join(os.tmpdir(), "paperclip-company-cli-e2e-"));
|
||||
configPath = path.join(tempRoot, "config", "config.json");
|
||||
exportDir = path.join(tempRoot, "exported-company");
|
||||
paperclipHome = path.join(tempRoot, "paperclip-home");
|
||||
cliShellHome = path.join(tempRoot, "shell-home");
|
||||
paperclipInstanceId = "company-cli-e2e";
|
||||
mkdirSync(paperclipHome, { recursive: true });
|
||||
mkdirSync(cliShellHome, { recursive: true });
|
||||
|
||||
tempDb = await startEmbeddedPostgresTestDatabase("paperclip-company-cli-db-");
|
||||
|
||||
@@ -256,7 +300,11 @@ describeEmbeddedPostgres("paperclipai company import/export e2e", () => {
|
||||
["paperclipai", "run", "--config", configPath],
|
||||
{
|
||||
cwd: repoRoot,
|
||||
env: createServerEnv(configPath, port, tempDb.connectionString),
|
||||
env: createServerEnv(configPath, port, tempDb.connectionString, {
|
||||
paperclipHome,
|
||||
instanceId: paperclipInstanceId,
|
||||
shellHome: cliShellHome,
|
||||
}),
|
||||
stdio: ["ignore", "pipe", "pipe"],
|
||||
},
|
||||
);
|
||||
@@ -282,11 +330,41 @@ describeEmbeddedPostgres("paperclipai company import/export e2e", () => {
|
||||
it("exports a company package and imports it into new and existing companies", async () => {
|
||||
expect(serverProcess).not.toBeNull();
|
||||
|
||||
const cliContext = await runCliJson<{
|
||||
contextPath: string;
|
||||
profileName: string;
|
||||
profile: { apiBase?: string };
|
||||
}>(
|
||||
["context", "set", "--profile", "isolation-check", "--api-base", "https://example.test"],
|
||||
{
|
||||
configPath,
|
||||
paperclipHome,
|
||||
instanceId: paperclipInstanceId,
|
||||
shellHome: cliShellHome,
|
||||
includeConfigArg: false,
|
||||
},
|
||||
);
|
||||
|
||||
const expectedContextPath = path.join(paperclipHome, "context.json");
|
||||
const leakedContextPath = path.join(cliShellHome, ".paperclip", "context.json");
|
||||
expect(cliContext.contextPath).toBe(expectedContextPath);
|
||||
expect(cliContext.profileName).toBe("isolation-check");
|
||||
expect(cliContext.profile.apiBase).toBe("https://example.test");
|
||||
expect(existsSync(expectedContextPath)).toBe(true);
|
||||
expect(existsSync(leakedContextPath)).toBe(false);
|
||||
rmSync(expectedContextPath, { force: true });
|
||||
expect(existsSync(expectedContextPath)).toBe(false);
|
||||
|
||||
const sourceCompany = await api<{ id: string; name: string; issuePrefix: string }>(apiBase, "/api/companies", {
|
||||
method: "POST",
|
||||
headers: { "content-type": "application/json" },
|
||||
body: JSON.stringify({ name: `CLI Export Source ${Date.now()}` }),
|
||||
});
|
||||
await api(apiBase, `/api/companies/${sourceCompany.id}`, {
|
||||
method: "PATCH",
|
||||
headers: { "content-type": "application/json" },
|
||||
body: JSON.stringify({ requireBoardApprovalForNewAgents: false }),
|
||||
});
|
||||
|
||||
const sourceAgent = await api<{ id: string; name: string }>(
|
||||
apiBase,
|
||||
@@ -350,7 +428,13 @@ describeEmbeddedPostgres("paperclipai company import/export e2e", () => {
|
||||
"--include",
|
||||
"company,agents,projects,issues",
|
||||
],
|
||||
{ apiBase, configPath },
|
||||
{
|
||||
apiBase,
|
||||
configPath,
|
||||
paperclipHome,
|
||||
instanceId: paperclipInstanceId,
|
||||
shellHome: cliShellHome,
|
||||
},
|
||||
);
|
||||
|
||||
expect(exportResult.ok).toBe(true);
|
||||
@@ -374,7 +458,13 @@ describeEmbeddedPostgres("paperclipai company import/export e2e", () => {
|
||||
"company,agents,projects,issues",
|
||||
"--yes",
|
||||
],
|
||||
{ apiBase, configPath },
|
||||
{
|
||||
apiBase,
|
||||
configPath,
|
||||
paperclipHome,
|
||||
instanceId: paperclipInstanceId,
|
||||
shellHome: cliShellHome,
|
||||
},
|
||||
);
|
||||
|
||||
expect(importedNew.company.action).toBe("created");
|
||||
@@ -393,10 +483,11 @@ describeEmbeddedPostgres("paperclipai company import/export e2e", () => {
|
||||
apiBase,
|
||||
`/api/companies/${importedNew.company.id}/issues`,
|
||||
);
|
||||
const importedMatchingIssues = importedIssues.filter((issue) => issue.title === sourceIssue.title);
|
||||
|
||||
expect(importedAgents.map((agent) => agent.name)).toContain(sourceAgent.name);
|
||||
expect(importedProjects.map((project) => project.name)).toContain(sourceProject.name);
|
||||
expect(importedIssues.map((issue) => issue.title)).toContain(sourceIssue.title);
|
||||
expect(importedMatchingIssues).toHaveLength(1);
|
||||
|
||||
const previewExisting = await runCliJson<{
|
||||
errors: string[];
|
||||
@@ -421,7 +512,13 @@ describeEmbeddedPostgres("paperclipai company import/export e2e", () => {
|
||||
"rename",
|
||||
"--dry-run",
|
||||
],
|
||||
{ apiBase, configPath },
|
||||
{
|
||||
apiBase,
|
||||
configPath,
|
||||
paperclipHome,
|
||||
instanceId: paperclipInstanceId,
|
||||
shellHome: cliShellHome,
|
||||
},
|
||||
);
|
||||
|
||||
expect(previewExisting.errors).toEqual([]);
|
||||
@@ -448,7 +545,13 @@ describeEmbeddedPostgres("paperclipai company import/export e2e", () => {
|
||||
"rename",
|
||||
"--yes",
|
||||
],
|
||||
{ apiBase, configPath },
|
||||
{
|
||||
apiBase,
|
||||
configPath,
|
||||
paperclipHome,
|
||||
instanceId: paperclipInstanceId,
|
||||
shellHome: cliShellHome,
|
||||
},
|
||||
);
|
||||
|
||||
expect(importedExisting.company.action).toBe("unchanged");
|
||||
@@ -466,11 +569,13 @@ describeEmbeddedPostgres("paperclipai company import/export e2e", () => {
|
||||
apiBase,
|
||||
`/api/companies/${importedNew.company.id}/issues`,
|
||||
);
|
||||
const twiceImportedMatchingIssues = twiceImportedIssues.filter((issue) => issue.title === sourceIssue.title);
|
||||
|
||||
expect(twiceImportedAgents).toHaveLength(2);
|
||||
expect(new Set(twiceImportedAgents.map((agent) => agent.name)).size).toBe(2);
|
||||
expect(twiceImportedProjects).toHaveLength(2);
|
||||
expect(twiceImportedIssues).toHaveLength(2);
|
||||
expect(twiceImportedMatchingIssues).toHaveLength(2);
|
||||
expect(new Set(twiceImportedMatchingIssues.map((issue) => issue.identifier)).size).toBe(2);
|
||||
|
||||
const zipPath = path.join(tempRoot, "exported-company.zip");
|
||||
const portableFiles: Record<string, string> = {};
|
||||
@@ -493,10 +598,16 @@ describeEmbeddedPostgres("paperclipai company import/export e2e", () => {
|
||||
"company,agents,projects,issues",
|
||||
"--yes",
|
||||
],
|
||||
{ apiBase, configPath },
|
||||
{
|
||||
apiBase,
|
||||
configPath,
|
||||
paperclipHome,
|
||||
instanceId: paperclipInstanceId,
|
||||
shellHome: cliShellHome,
|
||||
},
|
||||
);
|
||||
|
||||
expect(importedFromZip.company.action).toBe("created");
|
||||
expect(importedFromZip.agents.some((agent) => agent.action === "created")).toBe(true);
|
||||
}, 60_000);
|
||||
}, 90_000);
|
||||
});
|
||||
|
||||
24
cli/src/__tests__/env-lab.test.ts
Normal file
24
cli/src/__tests__/env-lab.test.ts
Normal file
@@ -0,0 +1,24 @@
|
||||
import path from "node:path";
|
||||
import { describe, expect, it } from "vitest";
|
||||
import { collectEnvLabDoctorStatus, resolveEnvLabSshStatePath } from "../commands/env-lab.js";
|
||||
|
||||
describe("env-lab command", () => {
|
||||
it("resolves the default SSH fixture state path under the instance root", () => {
|
||||
const statePath = resolveEnvLabSshStatePath("fixture-test");
|
||||
|
||||
expect(statePath).toContain(
|
||||
path.join("instances", "fixture-test", "env-lab", "ssh-fixture", "state.json"),
|
||||
);
|
||||
});
|
||||
|
||||
it("reports doctor status for an instance without a running fixture", async () => {
|
||||
const status = await collectEnvLabDoctorStatus({ instance: "fixture-test-missing" });
|
||||
|
||||
expect(status.statePath).toContain(
|
||||
path.join("instances", "fixture-test-missing", "env-lab", "ssh-fixture", "state.json"),
|
||||
);
|
||||
expect(typeof status.ssh.supported).toBe("boolean");
|
||||
expect(status.ssh.running).toBe(false);
|
||||
expect(status.ssh.environment).toBeNull();
|
||||
});
|
||||
});
|
||||
@@ -3,11 +3,15 @@ import os from "node:os";
|
||||
import path from "node:path";
|
||||
import { execFileSync } from "node:child_process";
|
||||
import { randomUUID } from "node:crypto";
|
||||
import { eq } from "drizzle-orm";
|
||||
import { afterEach, describe, expect, it, vi } from "vitest";
|
||||
import {
|
||||
agents,
|
||||
authUsers,
|
||||
companies,
|
||||
createDb,
|
||||
issueComments,
|
||||
issues,
|
||||
projects,
|
||||
routines,
|
||||
routineTriggers,
|
||||
@@ -16,6 +20,7 @@ import {
|
||||
copyGitHooksToWorktreeGitDir,
|
||||
copySeededSecretsKey,
|
||||
pauseSeededScheduledRoutines,
|
||||
quarantineSeededWorktreeExecutionState,
|
||||
readSourceAttachmentBody,
|
||||
rebindWorkspaceCwd,
|
||||
resolveSourceConfigPath,
|
||||
@@ -47,6 +52,7 @@ import {
|
||||
const ORIGINAL_CWD = process.cwd();
|
||||
const ORIGINAL_ENV = { ...process.env };
|
||||
const embeddedPostgresSupport = await getEmbeddedPostgresTestSupport();
|
||||
const itEmbeddedPostgres = embeddedPostgresSupport.supported ? it : it.skip;
|
||||
const describeEmbeddedPostgres = embeddedPostgresSupport.supported ? describe : describe.skip;
|
||||
|
||||
if (!embeddedPostgresSupport.supported) {
|
||||
@@ -184,8 +190,9 @@ describe("worktree helpers", () => {
|
||||
).toEqual(["worktree", "add", "-b", "my-worktree", "/tmp/my-worktree", "origin/main"]);
|
||||
});
|
||||
|
||||
it("rewrites loopback auth URLs to the new port only", () => {
|
||||
it("rewrites auth URLs only when they already include a port", () => {
|
||||
expect(rewriteLocalUrlPort("http://127.0.0.1:3100", 3110)).toBe("http://127.0.0.1:3110/");
|
||||
expect(rewriteLocalUrlPort("http://my-host.ts.net:3100", 3110)).toBe("http://my-host.ts.net:3110/");
|
||||
expect(rewriteLocalUrlPort("https://paperclip.example", 3110)).toBe("https://paperclip.example");
|
||||
});
|
||||
|
||||
@@ -280,6 +287,138 @@ describe("worktree helpers", () => {
|
||||
expect(full.nullifyColumns).toEqual({});
|
||||
});
|
||||
|
||||
itEmbeddedPostgres("quarantines copied live execution state in seeded worktree databases", async () => {
|
||||
const tempDb = await startEmbeddedPostgresTestDatabase("paperclip-worktree-quarantine-");
|
||||
const db = createDb(tempDb.connectionString);
|
||||
const companyId = randomUUID();
|
||||
const agentId = randomUUID();
|
||||
const idleAgentId = randomUUID();
|
||||
const inProgressIssueId = randomUUID();
|
||||
const todoIssueId = randomUUID();
|
||||
const reviewIssueId = randomUUID();
|
||||
const userIssueId = randomUUID();
|
||||
|
||||
try {
|
||||
await db.insert(companies).values({
|
||||
id: companyId,
|
||||
name: "Paperclip",
|
||||
issuePrefix: "WTQ",
|
||||
requireBoardApprovalForNewAgents: false,
|
||||
});
|
||||
await db.insert(agents).values([
|
||||
{
|
||||
id: agentId,
|
||||
companyId,
|
||||
name: "CodexCoder",
|
||||
role: "engineer",
|
||||
status: "running",
|
||||
adapterType: "codex_local",
|
||||
adapterConfig: {},
|
||||
runtimeConfig: {
|
||||
heartbeat: { enabled: true, intervalSec: 60 },
|
||||
wakeOnDemand: true,
|
||||
},
|
||||
permissions: {},
|
||||
},
|
||||
{
|
||||
id: idleAgentId,
|
||||
companyId,
|
||||
name: "Reviewer",
|
||||
role: "reviewer",
|
||||
status: "idle",
|
||||
adapterType: "codex_local",
|
||||
adapterConfig: {},
|
||||
runtimeConfig: { heartbeat: { enabled: false, intervalSec: 300 } },
|
||||
permissions: {},
|
||||
},
|
||||
]);
|
||||
await db.insert(issues).values([
|
||||
{
|
||||
id: inProgressIssueId,
|
||||
companyId,
|
||||
title: "Copied in-flight issue",
|
||||
status: "in_progress",
|
||||
priority: "medium",
|
||||
assigneeAgentId: agentId,
|
||||
issueNumber: 1,
|
||||
identifier: "WTQ-1",
|
||||
executionAgentNameKey: "codexcoder",
|
||||
executionLockedAt: new Date("2026-04-18T00:00:00.000Z"),
|
||||
},
|
||||
{
|
||||
id: todoIssueId,
|
||||
companyId,
|
||||
title: "Copied assigned todo issue",
|
||||
status: "todo",
|
||||
priority: "medium",
|
||||
assigneeAgentId: agentId,
|
||||
issueNumber: 2,
|
||||
identifier: "WTQ-2",
|
||||
},
|
||||
{
|
||||
id: reviewIssueId,
|
||||
companyId,
|
||||
title: "Copied assigned review issue",
|
||||
status: "in_review",
|
||||
priority: "medium",
|
||||
assigneeAgentId: idleAgentId,
|
||||
issueNumber: 3,
|
||||
identifier: "WTQ-3",
|
||||
},
|
||||
{
|
||||
id: userIssueId,
|
||||
companyId,
|
||||
title: "Copied user issue",
|
||||
status: "todo",
|
||||
priority: "medium",
|
||||
assigneeUserId: "user-1",
|
||||
issueNumber: 4,
|
||||
identifier: "WTQ-4",
|
||||
},
|
||||
]);
|
||||
|
||||
await expect(quarantineSeededWorktreeExecutionState(tempDb.connectionString)).resolves.toEqual({
|
||||
disabledTimerHeartbeats: 1,
|
||||
resetRunningAgents: 1,
|
||||
quarantinedInProgressIssues: 1,
|
||||
unassignedTodoIssues: 1,
|
||||
unassignedReviewIssues: 1,
|
||||
});
|
||||
|
||||
const [quarantinedAgent] = await db.select().from(agents).where(eq(agents.id, agentId));
|
||||
expect(quarantinedAgent?.status).toBe("idle");
|
||||
expect(quarantinedAgent?.runtimeConfig).toMatchObject({
|
||||
heartbeat: { enabled: false, intervalSec: 60 },
|
||||
wakeOnDemand: true,
|
||||
});
|
||||
|
||||
const [inProgressIssue] = await db.select().from(issues).where(eq(issues.id, inProgressIssueId));
|
||||
expect(inProgressIssue?.status).toBe("blocked");
|
||||
expect(inProgressIssue?.assigneeAgentId).toBeNull();
|
||||
expect(inProgressIssue?.executionAgentNameKey).toBeNull();
|
||||
expect(inProgressIssue?.executionLockedAt).toBeNull();
|
||||
|
||||
const [todoIssue] = await db.select().from(issues).where(eq(issues.id, todoIssueId));
|
||||
expect(todoIssue?.status).toBe("todo");
|
||||
expect(todoIssue?.assigneeAgentId).toBeNull();
|
||||
|
||||
const [reviewIssue] = await db.select().from(issues).where(eq(issues.id, reviewIssueId));
|
||||
expect(reviewIssue?.status).toBe("in_review");
|
||||
expect(reviewIssue?.assigneeAgentId).toBeNull();
|
||||
|
||||
const [userIssue] = await db.select().from(issues).where(eq(issues.id, userIssueId));
|
||||
expect(userIssue?.status).toBe("todo");
|
||||
expect(userIssue?.assigneeUserId).toBe("user-1");
|
||||
|
||||
const comments = await db.select().from(issueComments).where(eq(issueComments.issueId, inProgressIssueId));
|
||||
expect(comments).toHaveLength(1);
|
||||
expect(comments[0]?.body).toContain("Quarantined during worktree seed");
|
||||
} finally {
|
||||
await db.$client?.end?.({ timeout: 5 }).catch(() => undefined);
|
||||
await tempDb.cleanup();
|
||||
}
|
||||
}, 20_000);
|
||||
|
||||
it("copies the source local_encrypted secrets key into the seeded worktree instance", () => {
|
||||
const tempRoot = fs.mkdtempSync(path.join(os.tmpdir(), "paperclip-worktree-secrets-"));
|
||||
const originalInlineMasterKey = process.env.PAPERCLIP_SECRETS_MASTER_KEY;
|
||||
@@ -373,6 +512,97 @@ describe("worktree helpers", () => {
|
||||
}
|
||||
});
|
||||
|
||||
itEmbeddedPostgres(
|
||||
"seeds authenticated users into minimally cloned worktree instances",
|
||||
async () => {
|
||||
const tempRoot = fs.mkdtempSync(path.join(os.tmpdir(), "paperclip-worktree-auth-seed-"));
|
||||
const worktreeRoot = path.join(tempRoot, "PAP-999-auth-seed");
|
||||
const sourceHome = path.join(tempRoot, "source-home");
|
||||
const sourceConfigDir = path.join(sourceHome, "instances", "source");
|
||||
const sourceConfigPath = path.join(sourceConfigDir, "config.json");
|
||||
const sourceEnvPath = path.join(sourceConfigDir, ".env");
|
||||
const sourceKeyPath = path.join(sourceConfigDir, "secrets", "master.key");
|
||||
const worktreeHome = path.join(tempRoot, ".paperclip-worktrees");
|
||||
const originalCwd = process.cwd();
|
||||
const sourceDb = await startEmbeddedPostgresTestDatabase("paperclip-worktree-auth-source-");
|
||||
|
||||
try {
|
||||
const sourceDbClient = createDb(sourceDb.connectionString);
|
||||
await sourceDbClient.insert(authUsers).values({
|
||||
id: "user-existing",
|
||||
email: "existing@paperclip.ing",
|
||||
name: "Existing User",
|
||||
emailVerified: true,
|
||||
createdAt: new Date(),
|
||||
updatedAt: new Date(),
|
||||
});
|
||||
|
||||
fs.mkdirSync(path.dirname(sourceKeyPath), { recursive: true });
|
||||
fs.mkdirSync(worktreeRoot, { recursive: true });
|
||||
|
||||
const sourceConfig = buildSourceConfig();
|
||||
sourceConfig.database = {
|
||||
mode: "postgres",
|
||||
embeddedPostgresDataDir: path.join(sourceConfigDir, "db"),
|
||||
embeddedPostgresPort: 54329,
|
||||
backup: {
|
||||
enabled: true,
|
||||
intervalMinutes: 60,
|
||||
retentionDays: 30,
|
||||
dir: path.join(sourceConfigDir, "backups"),
|
||||
},
|
||||
connectionString: sourceDb.connectionString,
|
||||
};
|
||||
sourceConfig.logging.logDir = path.join(sourceConfigDir, "logs");
|
||||
sourceConfig.storage.localDisk.baseDir = path.join(sourceConfigDir, "storage");
|
||||
sourceConfig.secrets.localEncrypted.keyFilePath = sourceKeyPath;
|
||||
|
||||
fs.writeFileSync(sourceConfigPath, JSON.stringify(sourceConfig, null, 2) + "\n", "utf8");
|
||||
fs.writeFileSync(sourceEnvPath, "", "utf8");
|
||||
fs.writeFileSync(sourceKeyPath, "source-master-key", "utf8");
|
||||
|
||||
process.chdir(worktreeRoot);
|
||||
await worktreeInitCommand({
|
||||
name: "PAP-999-auth-seed",
|
||||
home: worktreeHome,
|
||||
fromConfig: sourceConfigPath,
|
||||
force: true,
|
||||
});
|
||||
|
||||
const targetConfig = JSON.parse(
|
||||
fs.readFileSync(path.join(worktreeRoot, ".paperclip", "config.json"), "utf8"),
|
||||
) as PaperclipConfig;
|
||||
const { default: EmbeddedPostgres } = await import("embedded-postgres");
|
||||
const targetPg = new EmbeddedPostgres({
|
||||
databaseDir: targetConfig.database.embeddedPostgresDataDir,
|
||||
user: "paperclip",
|
||||
password: "paperclip",
|
||||
port: targetConfig.database.embeddedPostgresPort,
|
||||
persistent: true,
|
||||
initdbFlags: ["--encoding=UTF8", "--locale=C", "--lc-messages=C"],
|
||||
onLog: () => {},
|
||||
onError: () => {},
|
||||
});
|
||||
|
||||
await targetPg.start();
|
||||
try {
|
||||
const targetDb = createDb(
|
||||
`postgres://paperclip:paperclip@127.0.0.1:${targetConfig.database.embeddedPostgresPort}/paperclip`,
|
||||
);
|
||||
const seededUsers = await targetDb.select().from(authUsers);
|
||||
expect(seededUsers.some((row) => row.email === "existing@paperclip.ing")).toBe(true);
|
||||
} finally {
|
||||
await targetPg.stop();
|
||||
}
|
||||
} finally {
|
||||
process.chdir(originalCwd);
|
||||
await sourceDb.cleanup();
|
||||
fs.rmSync(tempRoot, { recursive: true, force: true });
|
||||
}
|
||||
},
|
||||
30000,
|
||||
);
|
||||
|
||||
it("avoids ports already claimed by sibling worktree instance configs", async () => {
|
||||
const tempRoot = fs.mkdtempSync(path.join(os.tmpdir(), "paperclip-worktree-claimed-ports-"));
|
||||
const repoRoot = path.join(tempRoot, "repo");
|
||||
@@ -652,7 +882,7 @@ describe("worktree helpers", () => {
|
||||
}
|
||||
fs.rmSync(tempRoot, { recursive: true, force: true });
|
||||
}
|
||||
}, 20_000);
|
||||
}, 30_000);
|
||||
|
||||
it("restores the current worktree config and instance data if reseed fails", async () => {
|
||||
const tempRoot = fs.mkdtempSync(path.join(os.tmpdir(), "paperclip-worktree-reseed-rollback-"));
|
||||
@@ -809,7 +1039,7 @@ describe("worktree helpers", () => {
|
||||
execFileSync("git", ["worktree", "remove", "--force", worktreePath], { cwd: repoRoot, stdio: "ignore" });
|
||||
fs.rmSync(tempRoot, { recursive: true, force: true });
|
||||
}
|
||||
});
|
||||
}, 15_000);
|
||||
|
||||
it("creates and initializes a worktree from the top-level worktree:make command", async () => {
|
||||
const tempRoot = fs.mkdtempSync(path.join(os.tmpdir(), "paperclip-worktree-make-"));
|
||||
|
||||
@@ -61,6 +61,7 @@ interface IssueUpdateOptions extends BaseClientOptions {
|
||||
interface IssueCommentOptions extends BaseClientOptions {
|
||||
body: string;
|
||||
reopen?: boolean;
|
||||
resume?: boolean;
|
||||
}
|
||||
|
||||
interface IssueCheckoutOptions extends BaseClientOptions {
|
||||
@@ -241,12 +242,14 @@ export function registerIssueCommands(program: Command): void {
|
||||
.argument("<issueId>", "Issue ID")
|
||||
.requiredOption("--body <text>", "Comment body")
|
||||
.option("--reopen", "Reopen if issue is done/cancelled")
|
||||
.option("--resume", "Request explicit follow-up and wake the assignee when resumable")
|
||||
.action(async (issueId: string, opts: IssueCommentOptions) => {
|
||||
try {
|
||||
const ctx = resolveCommandContext(opts);
|
||||
const payload = addIssueCommentSchema.parse({
|
||||
body: opts.body,
|
||||
reopen: opts.reopen,
|
||||
resume: opts.resume,
|
||||
});
|
||||
const comment = await ctx.api.post<IssueComment>(`/api/issues/${issueId}/comments`, payload);
|
||||
printOutput(comment, { json: ctx.json });
|
||||
|
||||
174
cli/src/commands/env-lab.ts
Normal file
174
cli/src/commands/env-lab.ts
Normal file
@@ -0,0 +1,174 @@
|
||||
import path from "node:path";
|
||||
import type { Command } from "commander";
|
||||
import * as p from "@clack/prompts";
|
||||
import pc from "picocolors";
|
||||
import {
|
||||
buildSshEnvLabFixtureConfig,
|
||||
getSshEnvLabSupport,
|
||||
readSshEnvLabFixtureStatus,
|
||||
startSshEnvLabFixture,
|
||||
stopSshEnvLabFixture,
|
||||
} from "@paperclipai/adapter-utils/ssh";
|
||||
import { resolvePaperclipInstanceId, resolvePaperclipInstanceRoot } from "../config/home.js";
|
||||
|
||||
export function resolveEnvLabSshStatePath(instanceId?: string): string {
|
||||
const resolvedInstanceId = resolvePaperclipInstanceId(instanceId);
|
||||
return path.resolve(
|
||||
resolvePaperclipInstanceRoot(resolvedInstanceId),
|
||||
"env-lab",
|
||||
"ssh-fixture",
|
||||
"state.json",
|
||||
);
|
||||
}
|
||||
|
||||
function printJson(value: unknown) {
|
||||
process.stdout.write(`${JSON.stringify(value, null, 2)}\n`);
|
||||
}
|
||||
|
||||
function summarizeFixture(state: {
|
||||
host: string;
|
||||
port: number;
|
||||
username: string;
|
||||
workspaceDir: string;
|
||||
sshdLogPath: string;
|
||||
}) {
|
||||
p.log.message(`Host: ${pc.cyan(state.host)}:${pc.cyan(String(state.port))}`);
|
||||
p.log.message(`User: ${pc.cyan(state.username)}`);
|
||||
p.log.message(`Workspace: ${pc.cyan(state.workspaceDir)}`);
|
||||
p.log.message(`Log: ${pc.dim(state.sshdLogPath)}`);
|
||||
}
|
||||
|
||||
export async function collectEnvLabDoctorStatus(opts: { instance?: string }) {
|
||||
const statePath = resolveEnvLabSshStatePath(opts.instance);
|
||||
const [sshSupport, sshStatus] = await Promise.all([
|
||||
getSshEnvLabSupport(),
|
||||
readSshEnvLabFixtureStatus(statePath),
|
||||
]);
|
||||
const environment = sshStatus.state ? await buildSshEnvLabFixtureConfig(sshStatus.state) : null;
|
||||
|
||||
return {
|
||||
statePath,
|
||||
ssh: {
|
||||
supported: sshSupport.supported,
|
||||
reason: sshSupport.reason,
|
||||
running: sshStatus.running,
|
||||
state: sshStatus.state,
|
||||
environment,
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
export async function envLabUpCommand(opts: { instance?: string; json?: boolean }) {
|
||||
const statePath = resolveEnvLabSshStatePath(opts.instance);
|
||||
const state = await startSshEnvLabFixture({ statePath });
|
||||
const environment = await buildSshEnvLabFixtureConfig(state);
|
||||
|
||||
if (opts.json) {
|
||||
printJson({ state, environment });
|
||||
return;
|
||||
}
|
||||
|
||||
p.log.success("SSH env-lab fixture is running.");
|
||||
summarizeFixture(state);
|
||||
p.log.message(`State: ${pc.dim(statePath)}`);
|
||||
}
|
||||
|
||||
export async function envLabStatusCommand(opts: { instance?: string; json?: boolean }) {
|
||||
const statePath = resolveEnvLabSshStatePath(opts.instance);
|
||||
const status = await readSshEnvLabFixtureStatus(statePath);
|
||||
const environment = status.state ? await buildSshEnvLabFixtureConfig(status.state) : null;
|
||||
|
||||
if (opts.json) {
|
||||
printJson({ ...status, environment, statePath });
|
||||
return;
|
||||
}
|
||||
|
||||
if (!status.state || !status.running) {
|
||||
p.log.info(`SSH env-lab fixture is not running (${pc.dim(statePath)}).`);
|
||||
return;
|
||||
}
|
||||
|
||||
p.log.success("SSH env-lab fixture is running.");
|
||||
summarizeFixture(status.state);
|
||||
p.log.message(`State: ${pc.dim(statePath)}`);
|
||||
}
|
||||
|
||||
export async function envLabDownCommand(opts: { instance?: string; json?: boolean }) {
|
||||
const statePath = resolveEnvLabSshStatePath(opts.instance);
|
||||
const stopped = await stopSshEnvLabFixture(statePath);
|
||||
|
||||
if (opts.json) {
|
||||
printJson({ stopped, statePath });
|
||||
return;
|
||||
}
|
||||
|
||||
if (!stopped) {
|
||||
p.log.info(`No SSH env-lab fixture was running (${pc.dim(statePath)}).`);
|
||||
return;
|
||||
}
|
||||
|
||||
p.log.success("SSH env-lab fixture stopped.");
|
||||
p.log.message(`State: ${pc.dim(statePath)}`);
|
||||
}
|
||||
|
||||
export async function envLabDoctorCommand(opts: { instance?: string; json?: boolean }) {
|
||||
const status = await collectEnvLabDoctorStatus(opts);
|
||||
|
||||
if (opts.json) {
|
||||
printJson(status);
|
||||
return;
|
||||
}
|
||||
|
||||
if (status.ssh.supported) {
|
||||
p.log.success("SSH fixture prerequisites are installed.");
|
||||
} else {
|
||||
p.log.warn(`SSH fixture prerequisites are incomplete: ${status.ssh.reason ?? "unknown reason"}`);
|
||||
}
|
||||
|
||||
if (status.ssh.state && status.ssh.running) {
|
||||
p.log.success("SSH env-lab fixture is running.");
|
||||
summarizeFixture(status.ssh.state);
|
||||
p.log.message(`Private key: ${pc.dim(status.ssh.state.clientPrivateKeyPath)}`);
|
||||
p.log.message(`Known hosts: ${pc.dim(status.ssh.state.knownHostsPath)}`);
|
||||
} else if (status.ssh.state) {
|
||||
p.log.warn("SSH env-lab fixture state exists, but the process is not running.");
|
||||
p.log.message(`State: ${pc.dim(status.statePath)}`);
|
||||
} else {
|
||||
p.log.info("SSH env-lab fixture is not running.");
|
||||
p.log.message(`State: ${pc.dim(status.statePath)}`);
|
||||
}
|
||||
|
||||
p.log.message(`Cleanup: ${pc.dim("pnpm paperclipai env-lab down")}`);
|
||||
}
|
||||
|
||||
export function registerEnvLabCommands(program: Command) {
|
||||
const envLab = program.command("env-lab").description("Deterministic local environment fixtures");
|
||||
|
||||
envLab
|
||||
.command("up")
|
||||
.description("Start the default SSH env-lab fixture")
|
||||
.option("-i, --instance <id>", "Paperclip instance id (default: current/default)")
|
||||
.option("--json", "Print machine-readable fixture details")
|
||||
.action(envLabUpCommand);
|
||||
|
||||
envLab
|
||||
.command("status")
|
||||
.description("Show the current SSH env-lab fixture state")
|
||||
.option("-i, --instance <id>", "Paperclip instance id (default: current/default)")
|
||||
.option("--json", "Print machine-readable fixture details")
|
||||
.action(envLabStatusCommand);
|
||||
|
||||
envLab
|
||||
.command("down")
|
||||
.description("Stop the default SSH env-lab fixture")
|
||||
.option("-i, --instance <id>", "Paperclip instance id (default: current/default)")
|
||||
.option("--json", "Print machine-readable stop details")
|
||||
.action(envLabDownCommand);
|
||||
|
||||
envLab
|
||||
.command("doctor")
|
||||
.description("Check SSH fixture prerequisites and current status")
|
||||
.option("-i, --instance <id>", "Paperclip instance id (default: current/default)")
|
||||
.option("--json", "Print machine-readable diagnostic details")
|
||||
.action(envLabDoctorCommand);
|
||||
}
|
||||
@@ -75,11 +75,6 @@ function nonEmpty(value: string | null | undefined): string | null {
|
||||
return typeof value === "string" && value.trim().length > 0 ? value.trim() : null;
|
||||
}
|
||||
|
||||
function isLoopbackHost(hostname: string): boolean {
|
||||
const value = hostname.trim().toLowerCase();
|
||||
return value === "127.0.0.1" || value === "localhost" || value === "::1";
|
||||
}
|
||||
|
||||
export function sanitizeWorktreeInstanceId(rawValue: string): string {
|
||||
const trimmed = rawValue.trim().toLowerCase();
|
||||
const normalized = trimmed
|
||||
@@ -168,7 +163,8 @@ export function rewriteLocalUrlPort(rawUrl: string | undefined, port: number): s
|
||||
if (!rawUrl) return undefined;
|
||||
try {
|
||||
const parsed = new URL(rawUrl);
|
||||
if (!isLoopbackHost(parsed.hostname)) return rawUrl;
|
||||
// The URL API normalizes default ports like :80/:443 to "", so treat them as stable URLs.
|
||||
if (!parsed.port) return rawUrl;
|
||||
parsed.port = String(port);
|
||||
return parsed.toString();
|
||||
} catch {
|
||||
|
||||
@@ -93,6 +93,7 @@ type WorktreeInitOptions = {
|
||||
dbPort?: number;
|
||||
seed?: boolean;
|
||||
seedMode?: string;
|
||||
preserveLiveWork?: boolean;
|
||||
force?: boolean;
|
||||
};
|
||||
|
||||
@@ -126,6 +127,7 @@ type WorktreeReseedOptions = {
|
||||
fromDataDir?: string;
|
||||
fromInstance?: string;
|
||||
seedMode?: string;
|
||||
preserveLiveWork?: boolean;
|
||||
yes?: boolean;
|
||||
allowLiveTarget?: boolean;
|
||||
};
|
||||
@@ -137,6 +139,7 @@ type WorktreeRepairOptions = {
|
||||
fromDataDir?: string;
|
||||
fromInstance?: string;
|
||||
seedMode?: string;
|
||||
preserveLiveWork?: boolean;
|
||||
noSeed?: boolean;
|
||||
allowLiveTarget?: boolean;
|
||||
};
|
||||
@@ -179,6 +182,8 @@ type CopiedGitHooksResult = {
|
||||
|
||||
type SeedWorktreeDatabaseResult = {
|
||||
backupSummary: string;
|
||||
pausedScheduledRoutines: number;
|
||||
executionQuarantine: SeededWorktreeExecutionQuarantineSummary;
|
||||
reboundWorkspaces: Array<{
|
||||
name: string;
|
||||
fromCwd: string;
|
||||
@@ -186,6 +191,14 @@ type SeedWorktreeDatabaseResult = {
|
||||
}>;
|
||||
};
|
||||
|
||||
export type SeededWorktreeExecutionQuarantineSummary = {
|
||||
disabledTimerHeartbeats: number;
|
||||
resetRunningAgents: number;
|
||||
quarantinedInProgressIssues: number;
|
||||
unassignedTodoIssues: number;
|
||||
unassignedReviewIssues: number;
|
||||
};
|
||||
|
||||
function nonEmpty(value: string | null | undefined): string | null {
|
||||
return typeof value === "string" && value.trim().length > 0 ? value.trim() : null;
|
||||
}
|
||||
@@ -198,6 +211,18 @@ function isCurrentSourceConfigPath(sourceConfigPath: string): boolean {
|
||||
return path.resolve(currentConfigPath) === path.resolve(sourceConfigPath);
|
||||
}
|
||||
|
||||
function formatSeededWorktreeExecutionQuarantineSummary(
|
||||
summary: SeededWorktreeExecutionQuarantineSummary,
|
||||
): string {
|
||||
return [
|
||||
`disabled timer heartbeats: ${summary.disabledTimerHeartbeats}`,
|
||||
`reset running agents: ${summary.resetRunningAgents}`,
|
||||
`quarantined in-progress issues: ${summary.quarantinedInProgressIssues}`,
|
||||
`unassigned todo issues: ${summary.unassignedTodoIssues}`,
|
||||
`unassigned review issues: ${summary.unassignedReviewIssues}`,
|
||||
].join(", ");
|
||||
}
|
||||
|
||||
const WORKTREE_NAME_PREFIX = "paperclip-";
|
||||
|
||||
function resolveWorktreeMakeName(name: string): string {
|
||||
@@ -1119,6 +1144,133 @@ export async function pauseSeededScheduledRoutines(connectionString: string): Pr
|
||||
}
|
||||
}
|
||||
|
||||
const EMPTY_SEEDED_WORKTREE_EXECUTION_QUARANTINE_SUMMARY: SeededWorktreeExecutionQuarantineSummary = {
|
||||
disabledTimerHeartbeats: 0,
|
||||
resetRunningAgents: 0,
|
||||
quarantinedInProgressIssues: 0,
|
||||
unassignedTodoIssues: 0,
|
||||
unassignedReviewIssues: 0,
|
||||
};
|
||||
|
||||
function isRecord(value: unknown): value is Record<string, unknown> {
|
||||
return Boolean(value) && typeof value === "object" && !Array.isArray(value);
|
||||
}
|
||||
|
||||
function isEnabledValue(value: unknown): boolean {
|
||||
return value === true || value === "true" || value === 1 || value === "1";
|
||||
}
|
||||
|
||||
function normalizeWorktreeRuntimeConfig(runtimeConfig: unknown): {
|
||||
runtimeConfig: Record<string, unknown>;
|
||||
disabledTimerHeartbeat: boolean;
|
||||
changed: boolean;
|
||||
} {
|
||||
const nextRuntimeConfig = isRecord(runtimeConfig) ? { ...runtimeConfig } : {};
|
||||
const heartbeat = isRecord(nextRuntimeConfig.heartbeat) ? { ...nextRuntimeConfig.heartbeat } : null;
|
||||
if (!heartbeat) {
|
||||
return { runtimeConfig: nextRuntimeConfig, disabledTimerHeartbeat: false, changed: false };
|
||||
}
|
||||
|
||||
const disabledTimerHeartbeat = isEnabledValue(heartbeat.enabled);
|
||||
if (heartbeat.enabled !== false) {
|
||||
heartbeat.enabled = false;
|
||||
nextRuntimeConfig.heartbeat = heartbeat;
|
||||
return { runtimeConfig: nextRuntimeConfig, disabledTimerHeartbeat, changed: true };
|
||||
}
|
||||
|
||||
return { runtimeConfig: nextRuntimeConfig, disabledTimerHeartbeat: false, changed: false };
|
||||
}
|
||||
|
||||
export async function quarantineSeededWorktreeExecutionState(
|
||||
connectionString: string,
|
||||
): Promise<SeededWorktreeExecutionQuarantineSummary> {
|
||||
const db = createDb(connectionString);
|
||||
const summary = { ...EMPTY_SEEDED_WORKTREE_EXECUTION_QUARANTINE_SUMMARY };
|
||||
try {
|
||||
await db.transaction(async (tx) => {
|
||||
const seededAgents = await tx
|
||||
.select({
|
||||
id: agents.id,
|
||||
status: agents.status,
|
||||
runtimeConfig: agents.runtimeConfig,
|
||||
})
|
||||
.from(agents);
|
||||
|
||||
for (const agent of seededAgents) {
|
||||
const normalized = normalizeWorktreeRuntimeConfig(agent.runtimeConfig);
|
||||
const nextStatus = agent.status === "running" ? "idle" : agent.status;
|
||||
if (normalized.disabledTimerHeartbeat) {
|
||||
summary.disabledTimerHeartbeats += 1;
|
||||
}
|
||||
if (agent.status === "running") {
|
||||
summary.resetRunningAgents += 1;
|
||||
}
|
||||
if (normalized.changed || nextStatus !== agent.status) {
|
||||
await tx
|
||||
.update(agents)
|
||||
.set({
|
||||
runtimeConfig: normalized.runtimeConfig,
|
||||
status: nextStatus,
|
||||
updatedAt: new Date(),
|
||||
})
|
||||
.where(eq(agents.id, agent.id));
|
||||
}
|
||||
}
|
||||
|
||||
const affectedIssues = await tx
|
||||
.select({
|
||||
id: issues.id,
|
||||
companyId: issues.companyId,
|
||||
status: issues.status,
|
||||
})
|
||||
.from(issues)
|
||||
.where(
|
||||
and(
|
||||
sql`${issues.assigneeAgentId} is not null`,
|
||||
sql`${issues.assigneeUserId} is null`,
|
||||
inArray(issues.status, ["todo", "in_progress", "in_review"]),
|
||||
),
|
||||
);
|
||||
|
||||
for (const issue of affectedIssues) {
|
||||
const nextStatus = issue.status === "in_progress" ? "blocked" : issue.status;
|
||||
await tx
|
||||
.update(issues)
|
||||
.set({
|
||||
status: nextStatus,
|
||||
assigneeAgentId: null,
|
||||
checkoutRunId: null,
|
||||
executionRunId: null,
|
||||
executionAgentNameKey: null,
|
||||
executionLockedAt: null,
|
||||
executionWorkspaceId: null,
|
||||
updatedAt: new Date(),
|
||||
})
|
||||
.where(eq(issues.id, issue.id));
|
||||
|
||||
if (issue.status === "in_progress") {
|
||||
summary.quarantinedInProgressIssues += 1;
|
||||
await tx.insert(issueComments).values({
|
||||
companyId: issue.companyId,
|
||||
issueId: issue.id,
|
||||
body:
|
||||
"Quarantined during worktree seed so copied in-flight work does not auto-run in this isolated instance. " +
|
||||
"Reassign or unblock here only if you intentionally want the worktree instance to own this task.",
|
||||
});
|
||||
} else if (issue.status === "todo") {
|
||||
summary.unassignedTodoIssues += 1;
|
||||
} else if (issue.status === "in_review") {
|
||||
summary.unassignedReviewIssues += 1;
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
return summary;
|
||||
} finally {
|
||||
await db.$client?.end?.({ timeout: 5 }).catch(() => undefined);
|
||||
}
|
||||
}
|
||||
|
||||
async function seedWorktreeDatabase(input: {
|
||||
sourceConfigPath: string;
|
||||
sourceConfig: PaperclipConfig;
|
||||
@@ -1126,6 +1278,7 @@ async function seedWorktreeDatabase(input: {
|
||||
targetPaths: WorktreeLocalPaths;
|
||||
instanceId: string;
|
||||
seedMode: WorktreeSeedMode;
|
||||
preserveLiveWork?: boolean;
|
||||
}): Promise<SeedWorktreeDatabaseResult> {
|
||||
const seedPlan = resolveWorktreeSeedPlan(input.seedMode);
|
||||
const sourceEnvFile = resolvePaperclipEnvFile(input.sourceConfigPath);
|
||||
@@ -1158,6 +1311,7 @@ async function seedWorktreeDatabase(input: {
|
||||
backupDir: path.resolve(input.targetPaths.backupDir, "seed"),
|
||||
retention: { dailyDays: 7, weeklyWeeks: 4, monthlyMonths: 1 },
|
||||
filenamePrefix: `${input.instanceId}-seed`,
|
||||
backupEngine: "javascript",
|
||||
includeMigrationJournal: true,
|
||||
excludeTables: seedPlan.excludedTables,
|
||||
nullifyColumns: seedPlan.nullifyColumns,
|
||||
@@ -1176,7 +1330,10 @@ async function seedWorktreeDatabase(input: {
|
||||
backupFile: backup.backupFile,
|
||||
});
|
||||
await applyPendingMigrations(targetConnectionString);
|
||||
await pauseSeededScheduledRoutines(targetConnectionString);
|
||||
const executionQuarantine = input.preserveLiveWork
|
||||
? { ...EMPTY_SEEDED_WORKTREE_EXECUTION_QUARANTINE_SUMMARY }
|
||||
: await quarantineSeededWorktreeExecutionState(targetConnectionString);
|
||||
const pausedScheduledRoutines = await pauseSeededScheduledRoutines(targetConnectionString);
|
||||
const reboundWorkspaces = await rebindSeededProjectWorkspaces({
|
||||
targetConnectionString,
|
||||
currentCwd: input.targetPaths.cwd,
|
||||
@@ -1184,6 +1341,8 @@ async function seedWorktreeDatabase(input: {
|
||||
|
||||
return {
|
||||
backupSummary: formatDatabaseBackupResult(backup),
|
||||
pausedScheduledRoutines,
|
||||
executionQuarantine,
|
||||
reboundWorkspaces,
|
||||
};
|
||||
} finally {
|
||||
@@ -1262,6 +1421,8 @@ async function runWorktreeInit(opts: WorktreeInitOptions): Promise<void> {
|
||||
const copiedGitHooks = copyGitHooksToWorktreeGitDir(cwd);
|
||||
|
||||
let seedSummary: string | null = null;
|
||||
let seedExecutionQuarantineSummary: SeededWorktreeExecutionQuarantineSummary | null = null;
|
||||
let pausedScheduledRoutineCount: number | null = null;
|
||||
let reboundWorkspaceSummary: SeedWorktreeDatabaseResult["reboundWorkspaces"] = [];
|
||||
if (opts.seed !== false) {
|
||||
if (!sourceConfig) {
|
||||
@@ -1279,8 +1440,11 @@ async function runWorktreeInit(opts: WorktreeInitOptions): Promise<void> {
|
||||
targetPaths: paths,
|
||||
instanceId,
|
||||
seedMode,
|
||||
preserveLiveWork: opts.preserveLiveWork,
|
||||
});
|
||||
seedSummary = seeded.backupSummary;
|
||||
seedExecutionQuarantineSummary = seeded.executionQuarantine;
|
||||
pausedScheduledRoutineCount = seeded.pausedScheduledRoutines;
|
||||
reboundWorkspaceSummary = seeded.reboundWorkspaces;
|
||||
spinner.stop(`Seeded isolated worktree database (${seedMode}).`);
|
||||
} catch (error) {
|
||||
@@ -1303,6 +1467,16 @@ async function runWorktreeInit(opts: WorktreeInitOptions): Promise<void> {
|
||||
if (seedSummary) {
|
||||
p.log.message(pc.dim(`Seed mode: ${seedMode}`));
|
||||
p.log.message(pc.dim(`Seed snapshot: ${seedSummary}`));
|
||||
if (opts.preserveLiveWork) {
|
||||
p.log.warning("Preserved copied live work; this worktree instance may auto-run source-instance assignments.");
|
||||
} else if (seedExecutionQuarantineSummary) {
|
||||
p.log.message(
|
||||
pc.dim(`Seed execution quarantine: ${formatSeededWorktreeExecutionQuarantineSummary(seedExecutionQuarantineSummary)}`),
|
||||
);
|
||||
}
|
||||
if (pausedScheduledRoutineCount != null) {
|
||||
p.log.message(pc.dim(`Paused scheduled routines: ${pausedScheduledRoutineCount}`));
|
||||
}
|
||||
for (const rebound of reboundWorkspaceSummary) {
|
||||
p.log.message(
|
||||
pc.dim(`Rebound workspace ${rebound.name}: ${rebound.fromCwd} -> ${rebound.toCwd}`),
|
||||
@@ -2947,11 +3121,20 @@ async function runWorktreeReseed(opts: WorktreeReseedOptions): Promise<void> {
|
||||
targetPaths,
|
||||
instanceId: targetPaths.instanceId,
|
||||
seedMode,
|
||||
preserveLiveWork: opts.preserveLiveWork,
|
||||
});
|
||||
spinner.stop(`Reseeded ${targetEndpoint.label} (${seedMode}).`);
|
||||
p.log.message(pc.dim(`Source: ${source.configPath}`));
|
||||
p.log.message(pc.dim(`Target: ${targetEndpoint.configPath}`));
|
||||
p.log.message(pc.dim(`Seed snapshot: ${seeded.backupSummary}`));
|
||||
if (opts.preserveLiveWork) {
|
||||
p.log.warning("Preserved copied live work; this worktree instance may auto-run source-instance assignments.");
|
||||
} else {
|
||||
p.log.message(
|
||||
pc.dim(`Seed execution quarantine: ${formatSeededWorktreeExecutionQuarantineSummary(seeded.executionQuarantine)}`),
|
||||
);
|
||||
}
|
||||
p.log.message(pc.dim(`Paused scheduled routines: ${seeded.pausedScheduledRoutines}`));
|
||||
for (const rebound of seeded.reboundWorkspaces) {
|
||||
p.log.message(
|
||||
pc.dim(`Rebound workspace ${rebound.name}: ${rebound.fromCwd} -> ${rebound.toCwd}`),
|
||||
@@ -3015,6 +3198,7 @@ export async function worktreeRepairCommand(opts: WorktreeRepairOptions): Promis
|
||||
fromConfig: source.configPath,
|
||||
to: target.rootPath,
|
||||
seedMode,
|
||||
preserveLiveWork: opts.preserveLiveWork,
|
||||
yes: true,
|
||||
allowLiveTarget: opts.allowLiveTarget,
|
||||
});
|
||||
@@ -3047,6 +3231,7 @@ export async function worktreeRepairCommand(opts: WorktreeRepairOptions): Promis
|
||||
fromInstance: opts.fromInstance,
|
||||
seed: opts.noSeed ? false : true,
|
||||
seedMode,
|
||||
preserveLiveWork: opts.preserveLiveWork,
|
||||
force: true,
|
||||
});
|
||||
} finally {
|
||||
@@ -3070,6 +3255,7 @@ export function registerWorktreeCommands(program: Command): void {
|
||||
.option("--server-port <port>", "Preferred server port", (value) => Number(value))
|
||||
.option("--db-port <port>", "Preferred embedded Postgres port", (value) => Number(value))
|
||||
.option("--seed-mode <mode>", "Seed profile: minimal or full (default: minimal)", "minimal")
|
||||
.option("--preserve-live-work", "Do not quarantine copied agent timers or assigned open issues in the seeded worktree", false)
|
||||
.option("--no-seed", "Skip database seeding from the source instance")
|
||||
.option("--force", "Replace existing repo-local config and isolated instance data", false)
|
||||
.action(worktreeMakeCommand);
|
||||
@@ -3086,6 +3272,7 @@ export function registerWorktreeCommands(program: Command): void {
|
||||
.option("--server-port <port>", "Preferred server port", (value) => Number(value))
|
||||
.option("--db-port <port>", "Preferred embedded Postgres port", (value) => Number(value))
|
||||
.option("--seed-mode <mode>", "Seed profile: minimal or full (default: minimal)", "minimal")
|
||||
.option("--preserve-live-work", "Do not quarantine copied agent timers or assigned open issues in the seeded worktree", false)
|
||||
.option("--no-seed", "Skip database seeding from the source instance")
|
||||
.option("--force", "Replace existing repo-local config and isolated instance data", false)
|
||||
.action(worktreeInitCommand);
|
||||
@@ -3125,6 +3312,7 @@ export function registerWorktreeCommands(program: Command): void {
|
||||
.option("--from-data-dir <path>", "Source PAPERCLIP_HOME used when deriving the source config")
|
||||
.option("--from-instance <id>", "Source instance id when deriving the source config")
|
||||
.option("--seed-mode <mode>", "Seed profile: minimal or full (default: full)", "full")
|
||||
.option("--preserve-live-work", "Do not quarantine copied agent timers or assigned open issues in the seeded worktree", false)
|
||||
.option("--yes", "Skip the destructive confirmation prompt", false)
|
||||
.option("--allow-live-target", "Override the guard that requires the target worktree DB to be stopped first", false)
|
||||
.action(worktreeReseedCommand);
|
||||
@@ -3138,6 +3326,7 @@ export function registerWorktreeCommands(program: Command): void {
|
||||
.option("--from-data-dir <path>", "Source PAPERCLIP_HOME used when deriving the source config")
|
||||
.option("--from-instance <id>", "Source instance id when deriving the source config (default: default)")
|
||||
.option("--seed-mode <mode>", "Seed profile: minimal or full (default: minimal)", "minimal")
|
||||
.option("--preserve-live-work", "Do not quarantine copied agent timers or assigned open issues in the seeded worktree", false)
|
||||
.option("--no-seed", "Repair metadata only and skip reseeding when bootstrapping a missing worktree config", false)
|
||||
.option("--allow-live-target", "Override the guard that requires the target worktree DB to be stopped first", false)
|
||||
.action(worktreeRepairCommand);
|
||||
|
||||
@@ -8,6 +8,7 @@ import { heartbeatRun } from "./commands/heartbeat-run.js";
|
||||
import { runCommand } from "./commands/run.js";
|
||||
import { bootstrapCeoInvite } from "./commands/auth-bootstrap-ceo.js";
|
||||
import { dbBackupCommand } from "./commands/db-backup.js";
|
||||
import { registerEnvLabCommands } from "./commands/env-lab.js";
|
||||
import { registerContextCommands } from "./commands/client/context.js";
|
||||
import { registerCompanyCommands } from "./commands/client/company.js";
|
||||
import { registerIssueCommands } from "./commands/client/issue.js";
|
||||
@@ -147,6 +148,7 @@ registerDashboardCommands(program);
|
||||
registerRoutineCommands(program);
|
||||
registerFeedbackCommands(program);
|
||||
registerWorktreeCommands(program);
|
||||
registerEnvLabCommands(program);
|
||||
registerPluginCommands(program);
|
||||
|
||||
const auth = program.command("auth").description("Authentication and bootstrap utilities");
|
||||
|
||||
11
doc/CLI.md
11
doc/CLI.md
@@ -2,7 +2,7 @@
|
||||
|
||||
Paperclip CLI now supports both:
|
||||
|
||||
- instance setup/diagnostics (`onboard`, `doctor`, `configure`, `env`, `allowed-hostname`)
|
||||
- instance setup/diagnostics (`onboard`, `doctor`, `configure`, `env`, `allowed-hostname`, `env-lab`)
|
||||
- control-plane client operations (issues, approvals, agents, activity, dashboard)
|
||||
|
||||
## Base Usage
|
||||
@@ -45,6 +45,15 @@ Allow an authenticated/private hostname (for example custom Tailscale DNS):
|
||||
pnpm paperclipai allowed-hostname dotta-macbook-pro
|
||||
```
|
||||
|
||||
Bring up the default local SSH fixture for environment testing:
|
||||
|
||||
```sh
|
||||
pnpm paperclipai env-lab up
|
||||
pnpm paperclipai env-lab doctor
|
||||
pnpm paperclipai env-lab status --json
|
||||
pnpm paperclipai env-lab down
|
||||
```
|
||||
|
||||
All client commands support:
|
||||
|
||||
- `--data-dir <path>`
|
||||
|
||||
@@ -27,6 +27,18 @@ pnpm db:migrate
|
||||
|
||||
When `DATABASE_URL` is unset, this command targets the current embedded PostgreSQL instance for your active Paperclip config/instance.
|
||||
|
||||
Issue reference mentions follow the normal migration path: the schema migration creates the tracking table, but it does not backfill historical issue titles, descriptions, comments, or documents automatically.
|
||||
|
||||
To backfill existing content manually after migrating, run:
|
||||
|
||||
```sh
|
||||
pnpm issue-references:backfill
|
||||
# optional: limit to one company
|
||||
pnpm issue-references:backfill -- --company <company-id>
|
||||
```
|
||||
|
||||
Future issue, comment, and document writes sync references automatically without running the backfill command.
|
||||
|
||||
This mode is ideal for local development and one-command installs.
|
||||
|
||||
Docker note: the Docker quickstart image also uses embedded PostgreSQL by default. Persist `/paperclip` to keep DB state across container restarts (see `doc/DOCKER.md`).
|
||||
@@ -94,6 +106,16 @@ Set `DATABASE_URL` in your `.env`:
|
||||
DATABASE_URL=postgres://postgres.[PROJECT-REF]:[PASSWORD]@aws-0-[REGION].pooler.supabase.com:6543/postgres
|
||||
```
|
||||
|
||||
For hosted deployments that use a pooled runtime URL, set
|
||||
`DATABASE_MIGRATION_URL` to the direct connection URL. Paperclip uses it for
|
||||
startup schema checks/migrations and plugin namespace migrations, while the app
|
||||
continues to use `DATABASE_URL` for runtime queries:
|
||||
|
||||
```sh
|
||||
DATABASE_URL=postgres://postgres.[PROJECT-REF]:[PASSWORD]@aws-0-[REGION].pooler.supabase.com:6543/postgres
|
||||
DATABASE_MIGRATION_URL=postgres://postgres.[PROJECT-REF]:[PASSWORD]@aws-0-[REGION].pooler.supabase.com:5432/postgres
|
||||
```
|
||||
|
||||
If using connection pooling (port 6543), the `postgres` client must disable prepared statements. Update `packages/db/src/client.ts`:
|
||||
|
||||
```ts
|
||||
|
||||
@@ -142,3 +142,4 @@ This prevents lockout when a user migrates from long-running local trusted usage
|
||||
- implementation plan: `doc/plans/deployment-auth-mode-consolidation.md`
|
||||
- V1 contract: `doc/SPEC-implementation.md`
|
||||
- operator workflows: `doc/DEVELOPING.md` and `doc/CLI.md`
|
||||
- invite/join state map: `doc/spec/invite-flow.md`
|
||||
|
||||
@@ -43,6 +43,17 @@ This starts:
|
||||
|
||||
`pnpm dev` and `pnpm dev:once` are now idempotent for the current repo and instance: if the matching Paperclip dev runner is already alive, Paperclip reports the existing process instead of starting a duplicate.
|
||||
|
||||
## Storybook
|
||||
|
||||
The board UI Storybook keeps stories and Storybook config under `ui/storybook/` so component review files stay out of the app source routes.
|
||||
|
||||
```sh
|
||||
pnpm storybook
|
||||
pnpm build-storybook
|
||||
```
|
||||
|
||||
These run the `@paperclipai/ui` Storybook on port `6006` and build the static output to `ui/storybook-static/`.
|
||||
|
||||
Inspect or stop the current repo's managed dev runner:
|
||||
|
||||
```sh
|
||||
@@ -209,6 +220,8 @@ Seed modes:
|
||||
- `full` makes a full logical clone of the source instance
|
||||
- `--no-seed` creates an empty isolated instance
|
||||
|
||||
Seeded worktree instances quarantine copied live execution by default for both `minimal` and `full` seeds. During restore, Paperclip disables copied agent timer heartbeats, resets copied `running` agents to `idle`, blocks and unassigns copied agent-owned `in_progress` issues, and unassigns copied agent-owned `todo`/`in_review` issues. This keeps a freshly booted worktree from starting agents for work already owned by the source instance. Pass `--preserve-live-work` only when you intentionally want the isolated worktree to resume copied assignments.
|
||||
|
||||
After `worktree init`, both the server and the CLI auto-load the repo-local `.paperclip/.env` when run inside that worktree, so normal commands like `pnpm dev`, `paperclipai doctor`, and `paperclipai db:backup` stay scoped to the worktree instance.
|
||||
|
||||
`pnpm dev` now fails fast in a linked git worktree when `.paperclip/.env` is missing, instead of silently booting against the default instance/port. If that happens, run `paperclipai worktree init` in the worktree first.
|
||||
@@ -222,6 +235,8 @@ That repo-local env also sets:
|
||||
- `PAPERCLIP_WORKTREE_COLOR=<hex-color>`
|
||||
|
||||
The server/UI use those values for worktree-specific branding such as the top banner and dynamically colored favicon.
|
||||
Authenticated worktree servers also use the `PAPERCLIP_INSTANCE_ID` value to scope Better Auth cookie names.
|
||||
Browser cookies are shared by host rather than port, so this prevents logging into one `127.0.0.1:<port>` worktree from replacing another worktree server's session cookie.
|
||||
|
||||
Print shell exports explicitly when needed:
|
||||
|
||||
|
||||
@@ -115,38 +115,6 @@ If the first real publish returns npm `E404`, check npm-side prerequisites befor
|
||||
- The initial publish must include `--access public` for a public scoped package.
|
||||
- npm also requires either account 2FA for publishing or a granular token that is allowed to bypass 2FA.
|
||||
|
||||
### Manual first publish for `@paperclipai/mcp-server`
|
||||
|
||||
If you need to publish only the MCP server package once by hand, use:
|
||||
|
||||
- `@paperclipai/mcp-server`
|
||||
|
||||
Recommended flow from the repo root:
|
||||
|
||||
```bash
|
||||
# optional sanity check: this 404s until the first publish exists
|
||||
npm view @paperclipai/mcp-server version
|
||||
|
||||
# make sure the build output is fresh
|
||||
pnpm --filter @paperclipai/mcp-server build
|
||||
|
||||
# confirm your local npm auth before the real publish
|
||||
npm whoami
|
||||
|
||||
# safe preview of the exact publish payload
|
||||
cd packages/mcp-server
|
||||
pnpm publish --dry-run --no-git-checks --access public
|
||||
|
||||
# real publish
|
||||
pnpm publish --no-git-checks --access public
|
||||
```
|
||||
|
||||
Notes:
|
||||
|
||||
- Publish from `packages/mcp-server/`, not the repo root.
|
||||
- If `npm view @paperclipai/mcp-server version` already returns the same version that is in [`packages/mcp-server/package.json`](../packages/mcp-server/package.json), do not republish. Bump the version or use the normal repo-wide release flow in [`scripts/release.sh`](../scripts/release.sh).
|
||||
- The same npm-side prerequisites apply as above: valid npm auth, permission to publish to the `@paperclipai` scope, `--access public`, and the required publish auth/2FA policy.
|
||||
|
||||
## Version formats
|
||||
|
||||
Paperclip uses calendar versions:
|
||||
|
||||
@@ -37,7 +37,7 @@ These decisions close open questions from `SPEC.md` for V1.
|
||||
| Visibility | Full visibility to board and all agents in same company |
|
||||
| Communication | Tasks + comments only (no separate chat system) |
|
||||
| Task ownership | Single assignee; atomic checkout required for `in_progress` transition |
|
||||
| Recovery | No automatic reassignment; work recovery stays manual/explicit |
|
||||
| Recovery | No automatic reassignment; control-plane recovery may retry lost execution continuity once, then uses explicit recovery issues or human escalation |
|
||||
| Agent adapters | Built-in `process` and `http` adapters |
|
||||
| Auth | Mode-dependent human auth (`local_trusted` implicit board in current code; authenticated mode uses sessions), API keys for agents |
|
||||
| Budget period | Monthly UTC calendar window |
|
||||
@@ -395,7 +395,14 @@ Side effects:
|
||||
- entering `done` sets `completed_at`
|
||||
- entering `cancelled` sets `cancelled_at`
|
||||
|
||||
Detailed ownership, execution, blocker, and crash-recovery semantics are documented in `doc/execution-semantics.md`.
|
||||
V1 non-terminal liveness rule:
|
||||
|
||||
- agent-owned `todo`, `in_progress`, `in_review`, and `blocked` issues must have a live execution path, an explicit waiting path, or an explicit recovery path
|
||||
- `in_review` is healthy only when a typed execution participant, pending issue-thread interaction or approval, user owner, active run, queued wake, or explicit recovery issue owns the next action
|
||||
- a blocked chain is covered only when each unresolved leaf issue is live or explicitly waiting
|
||||
- when Paperclip cannot safely infer the next action, it surfaces the problem through visible blocked/recovery work instead of silently completing or reassigning work
|
||||
|
||||
Detailed ownership, execution, blocker, active-run watchdog, crash-recovery, and non-terminal liveness semantics are documented in `doc/execution-semantics.md`.
|
||||
|
||||
## 8.3 Approval Status
|
||||
|
||||
@@ -484,6 +491,7 @@ All endpoints are under `/api` and return JSON.
|
||||
- `DELETE /issues/:issueId/documents/:key`
|
||||
- `POST /issues/:issueId/checkout`
|
||||
- `POST /issues/:issueId/release`
|
||||
- `POST /issues/:issueId/admin/force-release` (board-only lock recovery)
|
||||
- `POST /issues/:issueId/comments`
|
||||
- `GET /issues/:issueId/comments`
|
||||
- `POST /companies/:companyId/issues/:issueId/attachments` (multipart upload)
|
||||
@@ -508,6 +516,8 @@ Server behavior:
|
||||
2. if updated row count is 0, return `409` with current owner/status
|
||||
3. successful checkout sets `assignee_agent_id`, `status = in_progress`, and `started_at`
|
||||
|
||||
`POST /issues/:issueId/admin/force-release` is an operator recovery endpoint for stale harness locks. It requires board access to the issue company, clears checkout and execution run lock fields, and may clear the agent assignee when `clearAssignee=true` is passed. The route must write an `issue.admin_force_release` activity log entry containing the previous checkout and execution run IDs.
|
||||
|
||||
## 10.5 Projects
|
||||
|
||||
- `GET /companies/:companyId/projects`
|
||||
@@ -619,7 +629,7 @@ Per-agent schedule fields in `adapter_config`:
|
||||
|
||||
- `enabled` boolean
|
||||
- `intervalSec` integer (minimum 30)
|
||||
- `maxConcurrentRuns` fixed at `1` for V1
|
||||
- `maxConcurrentRuns` integer; new agents default to `5`
|
||||
|
||||
Scheduler must skip invocation when:
|
||||
|
||||
|
||||
BIN
doc/assets/pap-2189/desktop-1440x900-dark.png
Normal file
BIN
doc/assets/pap-2189/desktop-1440x900-dark.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 174 KiB |
BIN
doc/assets/pap-2189/desktop-1440x900-light.png
Normal file
BIN
doc/assets/pap-2189/desktop-1440x900-light.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 174 KiB |
BIN
doc/assets/pap-2189/mobile-390x844-dark.png
Normal file
BIN
doc/assets/pap-2189/mobile-390x844-dark.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 177 KiB |
BIN
doc/assets/pap-2189/mobile-390x844-light.png
Normal file
BIN
doc/assets/pap-2189/mobile-390x844-light.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 177 KiB |
@@ -1,7 +1,7 @@
|
||||
# Execution Semantics
|
||||
|
||||
Status: Current implementation guide
|
||||
Date: 2026-04-13
|
||||
Date: 2026-04-26
|
||||
Audience: Product and engineering
|
||||
|
||||
This document explains how Paperclip interprets issue assignment, issue status, execution runs, wakeups, parent/sub-issue structure, and blocker relationships.
|
||||
@@ -146,13 +146,27 @@ Use it for:
|
||||
- explicit waiting relationships
|
||||
- automatic wakeups when all blockers resolve
|
||||
|
||||
Blocked issues should stay idle while blockers remain unresolved. Paperclip should not create a queued heartbeat run for that issue until the final blocker is done and the `issue_blockers_resolved` wake can start real work.
|
||||
|
||||
If a parent is truly waiting on a child, model that with blockers. Do not rely on the parent/child relationship alone.
|
||||
|
||||
## 7. Consistent Execution Path Rules
|
||||
## 7. Non-Terminal Issue Liveness Contract
|
||||
|
||||
For agent-assigned, non-terminal, actionable issues, Paperclip should not leave work in a state where nobody is working it and nothing will wake it.
|
||||
For agent-owned, non-terminal issues, Paperclip should never leave work in a state where nobody is responsible for the next move and nothing will wake or surface it.
|
||||
|
||||
The relevant execution path depends on status.
|
||||
This is a visibility contract, not an auto-completion contract. If Paperclip cannot safely infer the next action, it should surface the ambiguity with a blocked state, a visible comment, or an explicit recovery issue. It must not silently mark work done from prose comments or guess that a dependency is complete.
|
||||
|
||||
An issue is healthy when the product can answer "what moves this forward next?" without requiring a human to reconstruct intent from the whole thread. An issue is stalled when it is non-terminal but has no live execution path, no explicit waiting path, and no recovery path.
|
||||
|
||||
The valid action-path primitives are:
|
||||
|
||||
- an active run linked to the issue
|
||||
- a queued wake or continuation that can be delivered to the responsible agent
|
||||
- a typed execution-policy participant, such as `executionState.currentParticipant`
|
||||
- a pending issue-thread interaction or linked approval that is waiting for a specific responder
|
||||
- a human owner via `assigneeUserId`
|
||||
- a first-class blocker chain whose unresolved leaf issues are themselves healthy
|
||||
- an open explicit recovery issue that names the owner and action needed to restore liveness
|
||||
|
||||
### Agent-assigned `todo`
|
||||
|
||||
@@ -160,9 +174,11 @@ This is dispatch state: ready to start, not yet actively claimed.
|
||||
|
||||
A healthy dispatch state means at least one of these is true:
|
||||
|
||||
- the issue already has a queued/running wake path
|
||||
- the issue is intentionally resting in `todo` after a successful agent heartbeat, not after an interrupted dispatch
|
||||
- the issue has been explicitly surfaced as stranded
|
||||
- the issue already has a queued wake path
|
||||
- the issue is intentionally resting in `todo` after a completed agent heartbeat, with no interrupted dispatch evidence
|
||||
- the issue has been explicitly surfaced as stranded through a visible blocked/recovery path
|
||||
|
||||
An assigned `todo` issue is stalled when dispatch was interrupted, no wake remains queued or running, and no recovery path has been opened.
|
||||
|
||||
### Agent-assigned `in_progress`
|
||||
|
||||
@@ -172,7 +188,39 @@ A healthy active-work state means at least one of these is true:
|
||||
|
||||
- there is an active run for the issue
|
||||
- there is already a queued continuation wake
|
||||
- the issue has been explicitly surfaced as stranded
|
||||
- there is an open explicit recovery issue for the lost execution path
|
||||
|
||||
An agent-owned `in_progress` issue is stalled when it has no active run, no queued continuation, and no explicit recovery surface. A still-running but silent process is not automatically stalled; it is handled by the active-run watchdog contract.
|
||||
|
||||
### `in_review`
|
||||
|
||||
This is review/approval state: execution is paused because the next move belongs to a reviewer, approver, board user, or recovery owner.
|
||||
|
||||
A healthy `in_review` issue has at least one valid action path:
|
||||
|
||||
- a typed execution-policy participant who can approve or request changes
|
||||
- a pending issue-thread interaction or linked approval waiting for a named responder
|
||||
- a human owner via `assigneeUserId`
|
||||
- an active run or queued wake that is expected to process the review state
|
||||
- an open explicit recovery issue for an ambiguous review handoff
|
||||
|
||||
Agent-assigned `in_review` with no typed participant is only healthy when one of the other paths exists. Assignment to the same agent that produced the handoff is not, by itself, a review path.
|
||||
|
||||
An `in_review` issue is stalled when it has no typed participant, no pending interaction or approval, no user owner, no active run, no queued wake, and no explicit recovery issue. Paperclip should surface that state as recovery work rather than silently completing the issue or leaving blocker chains parked indefinitely.
|
||||
|
||||
### `blocked`
|
||||
|
||||
This is explicit waiting state.
|
||||
|
||||
A healthy `blocked` issue has an explicit waiting path:
|
||||
|
||||
- first-class blockers exist, and each unresolved leaf has a valid action path under this contract
|
||||
- the issue is blocked on an explicit recovery issue that itself has a live or waiting path
|
||||
- the issue is waiting on a pending interaction, linked approval, human owner, or clearly named external owner/action
|
||||
|
||||
A blocker chain is covered only when its unresolved leaf is live or explicitly waiting. An intermediate `blocked` issue does not make the chain healthy by itself.
|
||||
|
||||
A `blocked` issue is stalled when the unresolved blocker leaf has no active run, queued wake, typed participant, pending interaction or approval, user owner, external owner/action, or recovery issue. In that case the parent should show the first stalled leaf instead of presenting the dependency as calmly covered.
|
||||
|
||||
## 8. Crash and Restart Recovery
|
||||
|
||||
@@ -216,15 +264,81 @@ This is an active-work continuity recovery.
|
||||
|
||||
Startup recovery and periodic recovery are different from normal wakeup delivery.
|
||||
|
||||
On startup and on the periodic recovery loop, Paperclip now does three things in sequence:
|
||||
On startup and on the periodic recovery loop, Paperclip now does four things in sequence:
|
||||
|
||||
1. reap orphaned `running` runs
|
||||
2. resume persisted `queued` runs
|
||||
3. reconcile stranded assigned work
|
||||
4. scan silent active runs and create or update explicit watchdog review issues
|
||||
|
||||
That last step is what closes the gap where issue state survives a crash but the wake/run path does not.
|
||||
The stranded-work pass closes the gap where issue state survives a crash but the wake/run path does not. The silent-run scan covers the separate case where a live process exists but has stopped producing observable output.
|
||||
|
||||
## 10. What This Does Not Mean
|
||||
## 10. Silent Active-Run Watchdog
|
||||
|
||||
An active run can still be unhealthy even when its process is `running`. Paperclip treats prolonged output silence as a watchdog signal, not as proof that the run is failed.
|
||||
|
||||
The recovery service owns this contract:
|
||||
|
||||
- classify active-run output silence as `ok`, `suspicious`, `critical`, `snoozed`, or `not_applicable`
|
||||
- collect bounded evidence from run logs, recent run events, child issues, and blockers
|
||||
- preserve redaction and truncation before evidence is written to issue descriptions
|
||||
- create at most one open `stale_active_run_evaluation` issue per run
|
||||
- honor active snooze decisions before creating more review work
|
||||
- build the `outputSilence` summary shown by live-run and active-run API responses
|
||||
|
||||
Suspicious silence creates a medium-priority review issue for the selected recovery owner. Critical silence raises that review issue to high priority and blocks the source issue on the explicit evaluation task without cancelling the active process.
|
||||
|
||||
Watchdog decisions are explicit operator/recovery-owner decisions:
|
||||
|
||||
- `snooze` records an operator-chosen future quiet-until time and suppresses scan-created review work during that window
|
||||
- `continue` records that the current evidence is acceptable, does not cancel or mutate the active run, and sets a 30-minute default re-arm window before the watchdog evaluates the still-silent run again
|
||||
- `dismissed_false_positive` records why the review was not actionable
|
||||
|
||||
Operators should prefer `snooze` for known time-bounded quiet periods. `continue` is only a short acknowledgement of the current evidence; if the run remains silent after the re-arm window, the periodic watchdog scan can create or update review work again.
|
||||
|
||||
The board can record watchdog decisions. The assigned owner of the watchdog evaluation issue can also record them. Other agents cannot.
|
||||
|
||||
## 11. Auto-Recover vs Explicit Recovery vs Human Escalation
|
||||
|
||||
Paperclip uses three different recovery outcomes, depending on how much it can safely infer.
|
||||
|
||||
### Auto-Recover
|
||||
|
||||
Auto-recovery is allowed when ownership is clear and the control plane only lost execution continuity.
|
||||
|
||||
Examples:
|
||||
|
||||
- requeue one dispatch wake for an assigned `todo` issue whose latest run failed, timed out, or was cancelled
|
||||
- requeue one continuation wake for an assigned `in_progress` issue whose live execution path disappeared
|
||||
- assign an orphan blocker back to its creator when that blocker is already preventing other work
|
||||
|
||||
Auto-recovery preserves the existing owner. It does not choose a replacement agent.
|
||||
|
||||
### Explicit Recovery Issue
|
||||
|
||||
Paperclip creates an explicit recovery issue when the system can identify a problem but cannot safely complete the work itself.
|
||||
|
||||
Examples:
|
||||
|
||||
- automatic stranded-work retry was already exhausted
|
||||
- a dependency graph has an invalid/uninvokable owner, unassigned blocker, or invalid review participant
|
||||
- an active run is silent past the watchdog threshold
|
||||
|
||||
The source issue remains visible and blocked on the recovery issue when blocking is necessary for correctness. The recovery owner must restore a live path, resolve the source issue manually, or record the reason it is a false positive.
|
||||
|
||||
### Human Escalation
|
||||
|
||||
Human escalation is required when the next safe action depends on board judgment, budget/approval policy, or information unavailable to the control plane.
|
||||
|
||||
Examples:
|
||||
|
||||
- all candidate recovery owners are paused, terminated, pending approval, or budget-blocked
|
||||
- the issue is human-owned rather than agent-owned
|
||||
- the run is intentionally quiet but needs an operator decision before cancellation or continuation
|
||||
|
||||
In these cases Paperclip should leave a visible issue/comment trail instead of silently retrying.
|
||||
|
||||
## 12. What This Does Not Mean
|
||||
|
||||
These semantics do not change V1 into an auto-reassignment system.
|
||||
|
||||
@@ -238,9 +352,10 @@ The recovery model is intentionally conservative:
|
||||
|
||||
- preserve ownership
|
||||
- retry once when the control plane lost execution continuity
|
||||
- create explicit recovery work when the system can identify a bounded recovery owner/action
|
||||
- escalate visibly when the system cannot safely keep going
|
||||
|
||||
## 11. Practical Interpretation
|
||||
## 13. Practical Interpretation
|
||||
|
||||
For a board operator, the intended meaning is:
|
||||
|
||||
|
||||
@@ -10,6 +10,9 @@ It is intentionally narrower than [PLUGIN_SPEC.md](./PLUGIN_SPEC.md). The spec i
|
||||
- Plugin UI runs as same-origin JavaScript inside the main Paperclip app.
|
||||
- Worker-side host APIs are capability-gated.
|
||||
- Plugin UI is not sandboxed by manifest capabilities.
|
||||
- Plugin database migrations are restricted to a host-derived plugin namespace.
|
||||
- Plugin-owned JSON API routes must be declared in the manifest and are mounted
|
||||
only under `/api/plugins/:pluginId/api/*`.
|
||||
- There is no host-provided shared React component kit for plugins yet.
|
||||
- `ctx.assets` is not supported in the current runtime.
|
||||
|
||||
@@ -77,10 +80,12 @@ Worker:
|
||||
- secrets
|
||||
- activity
|
||||
- state
|
||||
- database namespace via `ctx.db`
|
||||
- scoped JSON API routes declared with `apiRoutes`
|
||||
- entities
|
||||
- projects and project workspaces
|
||||
- companies
|
||||
- issues and comments
|
||||
- issues, comments, namespaced `plugin:<pluginKey>` origins, blocker relations, checkout assertions, assignment wakeups, and orchestration summaries
|
||||
- agents and agent sessions
|
||||
- goals
|
||||
- data/actions
|
||||
@@ -89,6 +94,55 @@ Worker:
|
||||
- metrics
|
||||
- logger
|
||||
|
||||
### Plugin database declarations
|
||||
|
||||
First-party or otherwise trusted orchestration plugins can declare:
|
||||
|
||||
```ts
|
||||
database: {
|
||||
migrationsDir: "migrations",
|
||||
coreReadTables: ["issues"],
|
||||
}
|
||||
```
|
||||
|
||||
Required capabilities are `database.namespace.migrate` and
|
||||
`database.namespace.read`; add `database.namespace.write` for runtime mutations.
|
||||
The host derives `ctx.db.namespace`, runs SQL files in filename order before the
|
||||
worker starts, records checksums in `plugin_migrations`, and rejects changed
|
||||
already-applied migrations.
|
||||
|
||||
Migration SQL may create or alter objects only inside `ctx.db.namespace`. It may
|
||||
reference whitelisted `public` core tables for foreign keys or read-only views,
|
||||
but may not mutate/alter/drop/truncate public tables, create extensions,
|
||||
triggers, untrusted languages, or runtime multi-statement SQL. Runtime
|
||||
`ctx.db.query()` is restricted to `SELECT`; runtime `ctx.db.execute()` is
|
||||
restricted to namespace-local `INSERT`, `UPDATE`, and `DELETE`.
|
||||
|
||||
### Scoped plugin API routes
|
||||
|
||||
Plugins can expose JSON-only routes under their own namespace:
|
||||
|
||||
```ts
|
||||
apiRoutes: [
|
||||
{
|
||||
routeKey: "initialize",
|
||||
method: "POST",
|
||||
path: "/issues/:issueId/smoke",
|
||||
auth: "board-or-agent",
|
||||
capability: "api.routes.register",
|
||||
checkoutPolicy: "required-for-agent-in-progress",
|
||||
companyResolution: { from: "issue", param: "issueId" },
|
||||
},
|
||||
]
|
||||
```
|
||||
|
||||
The host resolves the plugin, checks that it is ready, enforces
|
||||
`api.routes.register`, matches the declared method/path, resolves company access,
|
||||
and applies checkout policy before dispatching to the worker's `onApiRequest`
|
||||
handler. The worker receives sanitized headers, route params, query, parsed JSON
|
||||
body, actor context, and company id. Do not use plugin routes to claim core
|
||||
paths; they always remain under `/api/plugins/:pluginId/api/*`.
|
||||
|
||||
UI:
|
||||
|
||||
- `usePluginData`
|
||||
|
||||
@@ -28,6 +28,9 @@ Current limitations to keep in mind:
|
||||
- The repo example plugins under `packages/plugins/examples/` are development conveniences. They work from a source checkout and should not be assumed to exist in a generic published build unless they are explicitly shipped with that build.
|
||||
- Dynamic plugin install is not yet cloud-ready for horizontally scaled or ephemeral deployments. There is no shared artifact store, install coordination, or cross-node distribution layer yet.
|
||||
- The current runtime does not yet ship a real host-provided plugin UI component kit, and it does not support plugin asset uploads/reads. Treat those as future-scope ideas in this spec, not current implementation promises.
|
||||
- Scoped plugin API routes are JSON-only and must be declared in `apiRoutes`.
|
||||
They mount under `/api/plugins/:pluginId/api/*`; plugins cannot shadow core
|
||||
API routes.
|
||||
|
||||
In practice, that means the current implementation is a good fit for local development and self-hosted persistent deployments, but not yet for multi-instance cloud plugin distribution.
|
||||
|
||||
@@ -624,7 +627,46 @@ Required SDK clients:
|
||||
|
||||
Plugins that need filesystem, git, terminal, or process operations handle those directly using standard Node APIs or libraries. The host provides project workspace metadata through `ctx.projects` so plugins can resolve workspace paths, but the host does not proxy low-level OS operations.
|
||||
|
||||
## 14.1 Example SDK Shape
|
||||
## 14.1 Issue Orchestration APIs
|
||||
|
||||
Trusted orchestration plugins can create and update Paperclip issues through `ctx.issues` instead of importing server internals. The public issue contract includes parent/project/goal links, board or agent assignees, blocker IDs, labels, billing code, request depth, execution workspace inheritance, and plugin origin metadata.
|
||||
|
||||
Origin rules:
|
||||
|
||||
- Built-in core issues keep built-in origins such as `manual` and `routine_execution`.
|
||||
- Plugin-managed issues use `plugin:<pluginKey>` or a sub-kind such as `plugin:<pluginKey>:feature`.
|
||||
- The host derives the default plugin origin from the installed plugin key and rejects attempts to set `plugin:<otherPluginKey>` origins.
|
||||
- `originId` is plugin-defined and should be stable for idempotent generated work.
|
||||
|
||||
Relation and read helpers:
|
||||
|
||||
- `ctx.issues.relations.get(issueId, companyId)`
|
||||
- `ctx.issues.relations.setBlockedBy(issueId, blockerIssueIds, companyId)`
|
||||
- `ctx.issues.relations.addBlockers(issueId, blockerIssueIds, companyId)`
|
||||
- `ctx.issues.relations.removeBlockers(issueId, blockerIssueIds, companyId)`
|
||||
- `ctx.issues.getSubtree(issueId, companyId, options)`
|
||||
- `ctx.issues.summaries.getOrchestration({ issueId, companyId, includeSubtree, billingCode })`
|
||||
|
||||
Governance helpers:
|
||||
|
||||
- `ctx.issues.assertCheckoutOwner({ issueId, companyId, actorAgentId, actorRunId })` lets plugin actions preserve agent-run checkout ownership.
|
||||
- `ctx.issues.requestWakeup(issueId, companyId, options)` requests assignment wakeups through host heartbeat semantics, including terminal-status, blocker, assignee, and budget hard-stop checks.
|
||||
- `ctx.issues.requestWakeups(issueIds, companyId, options)` applies the same host-owned wakeup semantics to a batch and may use an idempotency key prefix for stable coordinator retries.
|
||||
|
||||
Plugin-originated issue, relation, document, comment, and wakeup mutations must write activity entries with `actorType: "plugin"` and details fields for `sourcePluginId`, `sourcePluginKey`, `initiatingActorType`, `initiatingActorId`, and `initiatingRunId` when a user or agent run initiated the plugin work.
|
||||
|
||||
Scoped API routes:
|
||||
|
||||
- `apiRoutes[]` declares `routeKey`, `method`, plugin-local `path`, `auth`,
|
||||
`capability`, optional checkout policy, and company resolution.
|
||||
- The host enforces auth, company access, `api.routes.register`, route matching,
|
||||
and checkout policy before worker dispatch.
|
||||
- The worker implements `onApiRequest(input)` and returns a JSON response shape
|
||||
`{ status?, headers?, body? }`.
|
||||
- Only safe request headers are forwarded; auth/cookie headers are never passed
|
||||
to the worker.
|
||||
|
||||
## 14.2 Example SDK Shape
|
||||
|
||||
```ts
|
||||
/** Top-level helper for defining a plugin with type checking */
|
||||
@@ -696,16 +738,24 @@ The host enforces capabilities in the SDK layer and refuses calls outside the gr
|
||||
- `project.workspaces.read`
|
||||
- `issues.read`
|
||||
- `issue.comments.read`
|
||||
- `issue.documents.read`
|
||||
- `issue.relations.read`
|
||||
- `issue.subtree.read`
|
||||
- `agents.read`
|
||||
- `goals.read`
|
||||
- `activity.read`
|
||||
- `costs.read`
|
||||
- `issues.orchestration.read`
|
||||
|
||||
### Data Write
|
||||
|
||||
- `issues.create`
|
||||
- `issues.update`
|
||||
- `issue.comments.create`
|
||||
- `issue.documents.write`
|
||||
- `issue.relations.write`
|
||||
- `issues.checkout`
|
||||
- `issues.wakeup`
|
||||
- `assets.write`
|
||||
- `assets.read`
|
||||
- `activity.log.write`
|
||||
@@ -772,6 +822,13 @@ Minimum event set:
|
||||
- `issue.created`
|
||||
- `issue.updated`
|
||||
- `issue.comment.created`
|
||||
- `issue.document.created`
|
||||
- `issue.document.updated`
|
||||
- `issue.document.deleted`
|
||||
- `issue.relations.updated`
|
||||
- `issue.checked_out`
|
||||
- `issue.released`
|
||||
- `issue.assignment_wakeup_requested`
|
||||
- `agent.created`
|
||||
- `agent.updated`
|
||||
- `agent.status_changed`
|
||||
@@ -781,6 +838,8 @@ Minimum event set:
|
||||
- `agent.run.cancelled`
|
||||
- `approval.created`
|
||||
- `approval.decided`
|
||||
- `budget.incident.opened`
|
||||
- `budget.incident.resolved`
|
||||
- `cost_event.created`
|
||||
- `activity.logged`
|
||||
|
||||
@@ -1238,6 +1297,8 @@ Plugin-originated mutations should write:
|
||||
|
||||
- `actor_type = plugin`
|
||||
- `actor_id = <plugin-id>`
|
||||
- details include `sourcePluginId` and `sourcePluginKey`
|
||||
- details include `initiatingActorType`, `initiatingActorId`, and `initiatingRunId` when a user or agent run triggered the plugin work
|
||||
|
||||
## 21.5 Plugin Migrations
|
||||
|
||||
|
||||
@@ -114,14 +114,14 @@ If the connection drops, the UI reconnects automatically.
|
||||
|
||||
1. Enable timer wakeups (for example every 300s)
|
||||
2. Keep assignment wakeups on
|
||||
3. Use a focused prompt template
|
||||
3. Use a focused prompt template that tells agents to act in the same heartbeat, leave durable progress, and mark blocked work with an owner/action
|
||||
4. Watch run logs and adjust prompt/config over time
|
||||
|
||||
## 7.2 Event-driven loop (less constant polling)
|
||||
|
||||
1. Disable timer or set a long interval
|
||||
2. Keep wake-on-assignment enabled
|
||||
3. Use on-demand wakeups for manual nudges
|
||||
3. Use child issues, comments, and on-demand wakeups for handoffs instead of loops that poll agents, sessions, or processes
|
||||
|
||||
## 7.3 Safety-first loop
|
||||
|
||||
|
||||
299
doc/spec/invite-flow.md
Normal file
299
doc/spec/invite-flow.md
Normal file
@@ -0,0 +1,299 @@
|
||||
# Invite Flow State Map
|
||||
|
||||
Status: Current implementation map
|
||||
Date: 2026-04-13
|
||||
|
||||
This document maps the current invite creation and acceptance states implemented in:
|
||||
|
||||
- `ui/src/pages/CompanyInvites.tsx`
|
||||
- `ui/src/pages/CompanySettings.tsx`
|
||||
- `ui/src/pages/InviteLanding.tsx`
|
||||
- `server/src/routes/access.ts`
|
||||
- `server/src/lib/join-request-dedupe.ts`
|
||||
|
||||
## State Legend
|
||||
|
||||
- Invite state: `active`, `revoked`, `accepted`, `expired`
|
||||
- Join request status: `pending_approval`, `approved`, `rejected`
|
||||
- Claim secret state for agent joins: `available`, `consumed`, `expired`
|
||||
- Invite type: `company_join` or `bootstrap_ceo`
|
||||
- Join type: `human`, `agent`, or `both`
|
||||
|
||||
## Entity Lifecycle
|
||||
|
||||
```mermaid
|
||||
flowchart TD
|
||||
Board[Board user on invite screen]
|
||||
HumanInvite[Create human company invite]
|
||||
OpenClawInvite[Generate OpenClaw invite prompt]
|
||||
Active[Invite state: active]
|
||||
Revoked[Invite state: revoked]
|
||||
Expired[Invite state: expired]
|
||||
Accepted[Invite state: accepted]
|
||||
BootstrapDone[Bootstrap accepted<br/>no join request]
|
||||
HumanReuse{Matching human join request<br/>already exists for same user/email?}
|
||||
HumanPending[Join request<br/>pending_approval]
|
||||
HumanApproved[Join request<br/>approved]
|
||||
HumanRejected[Join request<br/>rejected]
|
||||
AgentPending[Agent join request<br/>pending_approval<br/>+ optional claim secret]
|
||||
AgentApproved[Agent join request<br/>approved]
|
||||
AgentRejected[Agent join request<br/>rejected]
|
||||
ClaimAvailable[Claim secret available]
|
||||
ClaimConsumed[Claim secret consumed]
|
||||
ClaimExpired[Claim secret expired]
|
||||
OpenClawReplay[Special replay path:<br/>accepted invite can be POSTed again<br/>for openclaw_gateway only]
|
||||
|
||||
Board --> HumanInvite --> Active
|
||||
Board --> OpenClawInvite --> Active
|
||||
Active --> Revoked: revoke
|
||||
Active --> Expired: expiresAt passes
|
||||
|
||||
Active --> BootstrapDone: bootstrap_ceo accept
|
||||
BootstrapDone --> Accepted
|
||||
|
||||
Active --> HumanReuse: human accept
|
||||
HumanReuse --> HumanPending: reuse existing pending request
|
||||
HumanReuse --> HumanApproved: reuse existing approved request
|
||||
HumanReuse --> HumanPending: no reusable request<br/>create new request
|
||||
HumanPending --> HumanApproved: board approves
|
||||
HumanPending --> HumanRejected: board rejects
|
||||
HumanPending --> Accepted
|
||||
HumanApproved --> Accepted
|
||||
|
||||
Active --> AgentPending: agent accept
|
||||
AgentPending --> Accepted
|
||||
AgentPending --> AgentApproved: board approves
|
||||
AgentPending --> AgentRejected: board rejects
|
||||
AgentApproved --> ClaimAvailable: createdAgentId + claimSecretHash
|
||||
ClaimAvailable --> ClaimConsumed: POST claim-api-key succeeds
|
||||
ClaimAvailable --> ClaimExpired: secret expires
|
||||
|
||||
Accepted --> OpenClawReplay
|
||||
OpenClawReplay --> AgentPending
|
||||
OpenClawReplay --> AgentApproved
|
||||
```
|
||||
|
||||
## Board-Side Screen States
|
||||
|
||||
```mermaid
|
||||
stateDiagram-v2
|
||||
[*] --> CompanySelection
|
||||
|
||||
CompanySelection --> NoCompany: no company selected
|
||||
CompanySelection --> LoadingHistory: selectedCompanyId present
|
||||
LoadingHistory --> HistoryError: listInvites failed
|
||||
LoadingHistory --> Ready: listInvites succeeded
|
||||
|
||||
state Ready {
|
||||
[*] --> EmptyHistory
|
||||
EmptyHistory --> PopulatedHistory: invites exist
|
||||
PopulatedHistory --> LoadingMore: View more
|
||||
LoadingMore --> PopulatedHistory: next page loaded
|
||||
|
||||
PopulatedHistory --> RevokePending: Revoke active invite
|
||||
RevokePending --> PopulatedHistory: revoke succeeded
|
||||
RevokePending --> PopulatedHistory: revoke failed
|
||||
|
||||
EmptyHistory --> CreatePending: Create invite
|
||||
PopulatedHistory --> CreatePending: Create invite
|
||||
CreatePending --> LatestInviteVisible: create succeeded
|
||||
CreatePending --> Ready: create failed
|
||||
LatestInviteVisible --> CopiedToast: clipboard copy succeeded
|
||||
LatestInviteVisible --> Ready: navigate away or refresh
|
||||
}
|
||||
|
||||
CompanySelection --> OpenClawPromptReady: Company settings prompt generator
|
||||
OpenClawPromptReady --> OpenClawPromptPending: Generate OpenClaw Invite Prompt
|
||||
OpenClawPromptPending --> OpenClawSnippetVisible: prompt generated
|
||||
OpenClawPromptPending --> OpenClawPromptReady: generation failed
|
||||
```
|
||||
|
||||
## Invite Landing Screen States
|
||||
|
||||
```mermaid
|
||||
stateDiagram-v2
|
||||
[*] --> TokenGate
|
||||
|
||||
TokenGate --> InvalidToken: token missing
|
||||
TokenGate --> Loading: token present
|
||||
Loading --> InviteUnavailable: invite fetch failed or invite not returned
|
||||
Loading --> CheckingAccess: signed-in session and invite.companyId
|
||||
Loading --> InviteResolved: invite loaded without membership check
|
||||
Loading --> AcceptedInviteSummary: invite already consumed<br/>but linked join request still exists
|
||||
|
||||
CheckingAccess --> RedirectToBoard: current user already belongs to company
|
||||
CheckingAccess --> InviteResolved: membership check finished and no join-request summary state is active
|
||||
CheckingAccess --> AcceptedInviteSummary: membership check finished and invite has joinRequestStatus
|
||||
|
||||
state InviteResolved {
|
||||
[*] --> Branch
|
||||
Branch --> AgentForm: company_join + allowedJoinTypes=agent
|
||||
Branch --> InlineAuth: authenticated mode + no session + join is not agent-only
|
||||
Branch --> AcceptReady: bootstrap invite or human-ready session/local_trusted
|
||||
|
||||
InlineAuth --> InlineAuth: toggle sign-up/sign-in
|
||||
InlineAuth --> InlineAuth: auth validation or auth error message
|
||||
InlineAuth --> RedirectToBoard: auth succeeded and company membership already exists
|
||||
InlineAuth --> AcceptPending: auth succeeded and invite still needs acceptance
|
||||
|
||||
AgentForm --> AcceptPending: submit request
|
||||
AgentForm --> AgentForm: validation or accept error
|
||||
|
||||
AcceptReady --> AcceptPending: Accept invite
|
||||
AcceptReady --> AcceptReady: accept error
|
||||
}
|
||||
|
||||
AcceptPending --> BootstrapComplete: bootstrapAccepted=true
|
||||
AcceptPending --> RedirectToBoard: join status=approved
|
||||
AcceptPending --> PendingApprovalResult: join status=pending_approval
|
||||
AcceptPending --> RejectedResult: join status=rejected
|
||||
|
||||
state AcceptedInviteSummary {
|
||||
[*] --> SummaryBranch
|
||||
SummaryBranch --> PendingApprovalReload: joinRequestStatus=pending_approval
|
||||
SummaryBranch --> OpeningCompany: joinRequestStatus=approved<br/>and human invite user is now a member
|
||||
SummaryBranch --> RejectedReload: joinRequestStatus=rejected
|
||||
SummaryBranch --> ConsumedReload: approved agent invite or other consumed state
|
||||
}
|
||||
|
||||
PendingApprovalResult --> PendingApprovalReload: reload after submit
|
||||
RejectedResult --> RejectedReload: reload after board rejects
|
||||
RedirectToBoard --> OpeningCompany: brief pre-navigation render when approved membership is detected
|
||||
OpeningCompany --> RedirectToBoard: navigate to board
|
||||
```
|
||||
|
||||
## Sequence Diagrams
|
||||
|
||||
### Human Invite Creation And First Acceptance
|
||||
|
||||
```mermaid
|
||||
sequenceDiagram
|
||||
autonumber
|
||||
actor Board as Board user
|
||||
participant Settings as Company Invites UI
|
||||
participant API as Access routes
|
||||
participant Invites as invites table
|
||||
actor Invitee as Invite recipient
|
||||
participant Landing as Invite landing UI
|
||||
participant Auth as Auth session
|
||||
participant Join as join_requests table
|
||||
|
||||
Board->>Settings: Choose role and click Create invite
|
||||
Settings->>API: POST /api/companies/:companyId/invites
|
||||
API->>Invites: Insert active invite
|
||||
API-->>Settings: inviteUrl + metadata
|
||||
|
||||
Invitee->>Landing: Open invite URL
|
||||
Landing->>API: GET /api/invites/:token
|
||||
API->>Invites: Load active invite
|
||||
API-->>Landing: Invite summary
|
||||
|
||||
alt Authenticated mode and no session
|
||||
Landing->>Auth: Sign up or sign in
|
||||
Auth-->>Landing: Session established
|
||||
end
|
||||
|
||||
Landing->>API: POST /api/invites/:token/accept (requestType=human)
|
||||
API->>Join: Look for reusable human join request
|
||||
alt Reusable pending or approved request exists
|
||||
API->>Invites: Mark invite accepted
|
||||
API-->>Landing: Existing join request status
|
||||
else No reusable request exists
|
||||
API->>Invites: Mark invite accepted
|
||||
API->>Join: Insert pending_approval join request
|
||||
API-->>Landing: New pending_approval join request
|
||||
end
|
||||
```
|
||||
|
||||
### Human Approval And Reload Path
|
||||
|
||||
```mermaid
|
||||
sequenceDiagram
|
||||
autonumber
|
||||
actor Invitee as Invite recipient
|
||||
participant Landing as Invite landing UI
|
||||
participant API as Access routes
|
||||
participant Join as join_requests table
|
||||
actor Approver as Company admin
|
||||
participant Queue as Access queue UI
|
||||
participant Membership as company_memberships + grants
|
||||
|
||||
Invitee->>Landing: Reload consumed invite URL
|
||||
Landing->>API: GET /api/invites/:token
|
||||
API->>Join: Load join request by inviteId
|
||||
API-->>Landing: joinRequestStatus + joinRequestType
|
||||
|
||||
alt joinRequestStatus = pending_approval
|
||||
Landing-->>Invitee: Show waiting-for-approval panel
|
||||
Approver->>Queue: Review request in Company Settings -> Access
|
||||
Queue->>API: POST /companies/:companyId/join-requests/:requestId/approve
|
||||
API->>Membership: Ensure membership and grants
|
||||
API->>Join: Mark join request approved
|
||||
Invitee->>Landing: Refresh after approval
|
||||
Landing->>API: GET /api/invites/:token
|
||||
API->>Join: Reload approved join request
|
||||
API-->>Landing: approved status
|
||||
Landing-->>Invitee: Opening company and redirect
|
||||
else joinRequestStatus = rejected
|
||||
Landing-->>Invitee: Show rejected error panel
|
||||
else joinRequestStatus = approved but membership missing
|
||||
Landing-->>Invitee: Fall through to consumed/unavailable state
|
||||
end
|
||||
```
|
||||
|
||||
### Agent Invite Approval, Claim, And Replay
|
||||
|
||||
```mermaid
|
||||
sequenceDiagram
|
||||
autonumber
|
||||
actor Board as Board user
|
||||
participant Settings as Company Settings UI
|
||||
participant API as Access routes
|
||||
participant Invites as invites table
|
||||
actor Gateway as OpenClaw gateway agent
|
||||
participant Join as join_requests table
|
||||
actor Approver as Company admin
|
||||
participant Agents as agents table
|
||||
participant Keys as agent_api_keys table
|
||||
|
||||
Board->>Settings: Generate OpenClaw invite prompt
|
||||
Settings->>API: POST /api/companies/:companyId/openclaw-invite-prompt
|
||||
API->>Invites: Insert active agent invite
|
||||
API-->>Settings: Prompt text + invite token
|
||||
|
||||
Gateway->>API: POST /api/invites/:token/accept (agent, openclaw_gateway)
|
||||
API->>Invites: Mark invite accepted
|
||||
API->>Join: Insert pending_approval join request + claimSecretHash
|
||||
API-->>Gateway: requestId + claimSecret + claimApiKeyPath
|
||||
|
||||
Approver->>API: POST /companies/:companyId/join-requests/:requestId/approve
|
||||
API->>Agents: Create agent + membership + grants
|
||||
API->>Join: Mark request approved and store createdAgentId
|
||||
|
||||
Gateway->>API: POST /api/join-requests/:requestId/claim-api-key (claimSecret)
|
||||
API->>Keys: Create initial API key
|
||||
API->>Join: Mark claim secret consumed
|
||||
API-->>Gateway: Plaintext Paperclip API key
|
||||
|
||||
opt Replay accepted invite for updated gateway defaults
|
||||
Gateway->>API: POST /api/invites/:token/accept again
|
||||
API->>Join: Reuse existing approved or pending request
|
||||
API->>Agents: Update approved agent adapter config when applicable
|
||||
API-->>Gateway: Updated join request payload
|
||||
end
|
||||
```
|
||||
|
||||
## Notes
|
||||
|
||||
- `GET /api/invites/:token` treats `revoked` and `expired` invites as unavailable. Accepted invites remain resolvable when they already have a linked join request, and the summary now includes `joinRequestStatus` plus `joinRequestType`.
|
||||
- Human acceptance consumes the invite immediately and then either creates a new join request or reuses an existing `pending_approval` or `approved` human join request for the same user/email.
|
||||
- The landing page has two layers of post-accept UI:
|
||||
- immediate mutation-result UI from `POST /api/invites/:token/accept`
|
||||
- reload-time summary UI from `GET /api/invites/:token` once the invite has already been consumed
|
||||
- Reload behavior for accepted company invites is now status-sensitive:
|
||||
- `pending_approval` re-renders the waiting-for-approval panel
|
||||
- `rejected` renders the "This join request was not approved." error panel
|
||||
- `approved` only becomes a success path for human invites after membership is visible to the current session; otherwise the page falls through to the generic consumed/unavailable state
|
||||
- `GET /api/invites/:token/logo` still rejects accepted invites, so accepted-invite reload states may fall back to the generated company icon even though the summary payload still carries `companyLogoUrl`.
|
||||
- The only accepted-invite replay path in the current implementation is `POST /api/invites/:token/accept` for `agent` requests with `adapterType=openclaw_gateway`, and only when the existing join request is still `pending_approval` or already `approved`.
|
||||
- `bootstrap_ceo` invites are one-time and do not create join requests.
|
||||
@@ -124,14 +124,14 @@ If the connection drops, the UI reconnects automatically.
|
||||
|
||||
1. Enable timer wakeups (for example every 300s)
|
||||
2. Keep assignment wakeups on
|
||||
3. Use a focused prompt template
|
||||
3. Use a focused prompt template that tells agents to act in the same heartbeat, leave durable progress, and mark blocked work with an owner/action
|
||||
4. Watch run logs and adjust prompt/config over time
|
||||
|
||||
## 7.2 Event-driven loop (less constant polling)
|
||||
|
||||
1. Disable timer or set a long interval
|
||||
2. Keep wake-on-assignment enabled
|
||||
3. Use on-demand wakeups for manual nudges
|
||||
3. Use child issues, comments, and on-demand wakeups for handoffs instead of loops that poll agents, sessions, or processes
|
||||
|
||||
## 7.3 Safety-first loop
|
||||
|
||||
|
||||
@@ -1,9 +1,9 @@
|
||||
---
|
||||
title: Issues
|
||||
summary: Issue CRUD, checkout/release, comments, documents, and attachments
|
||||
summary: Issue CRUD, checkout/release, comments, documents, interactions, and attachments
|
||||
---
|
||||
|
||||
Issues are the unit of work in Paperclip. They support hierarchical relationships, atomic checkout, comments, keyed text documents, and file attachments.
|
||||
Issues are the unit of work in Paperclip. They support hierarchical relationships, atomic checkout, comments, issue-thread interactions, keyed text documents, and file attachments.
|
||||
|
||||
## List Issues
|
||||
|
||||
@@ -121,6 +121,65 @@ POST /api/issues/{issueId}/comments
|
||||
|
||||
@-mentions (`@AgentName`) in comments trigger heartbeats for the mentioned agent.
|
||||
|
||||
## Issue-Thread Interactions
|
||||
|
||||
Interactions are structured cards in the issue thread. Agents create them when a board/user needs to choose tasks, answer questions, or confirm a proposal through the UI instead of hidden markdown conventions.
|
||||
|
||||
### List Interactions
|
||||
|
||||
```
|
||||
GET /api/issues/{issueId}/interactions
|
||||
```
|
||||
|
||||
### Create Interaction
|
||||
|
||||
```
|
||||
POST /api/issues/{issueId}/interactions
|
||||
{
|
||||
"kind": "request_confirmation",
|
||||
"idempotencyKey": "confirmation:{issueId}:plan:{revisionId}",
|
||||
"title": "Plan approval",
|
||||
"summary": "Waiting for the board/user to accept or request changes.",
|
||||
"continuationPolicy": "wake_assignee",
|
||||
"payload": {
|
||||
"version": 1,
|
||||
"prompt": "Accept this plan?",
|
||||
"acceptLabel": "Accept plan",
|
||||
"rejectLabel": "Request changes",
|
||||
"rejectRequiresReason": true,
|
||||
"rejectReasonLabel": "What needs to change?",
|
||||
"detailsMarkdown": "Review the latest plan document before accepting.",
|
||||
"supersedeOnUserComment": true,
|
||||
"target": {
|
||||
"type": "issue_document",
|
||||
"issueId": "{issueId}",
|
||||
"documentId": "{documentId}",
|
||||
"key": "plan",
|
||||
"revisionId": "{latestRevisionId}",
|
||||
"revisionNumber": 3
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
Supported `kind` values:
|
||||
|
||||
- `suggest_tasks`: propose child issues for the board/user to accept or reject
|
||||
- `ask_user_questions`: ask structured questions and store selected answers
|
||||
- `request_confirmation`: ask the board/user to accept or reject a proposal
|
||||
|
||||
For `request_confirmation`, `continuationPolicy: "wake_assignee"` wakes the assignee only after acceptance. Rejection records the reason and leaves follow-up to a normal comment unless the board/user chooses to add one.
|
||||
|
||||
### Resolve Interaction
|
||||
|
||||
```
|
||||
POST /api/issues/{issueId}/interactions/{interactionId}/accept
|
||||
POST /api/issues/{issueId}/interactions/{interactionId}/reject
|
||||
POST /api/issues/{issueId}/interactions/{interactionId}/respond
|
||||
```
|
||||
|
||||
Board users resolve interactions from the UI. Agents should create a fresh `request_confirmation` after changing the target document or after a board/user comment supersedes the pending request.
|
||||
|
||||
## Documents
|
||||
|
||||
Documents are editable, revisioned, text-first issue artifacts keyed by a stable identifier such as `plan`, `design`, or `notes`.
|
||||
|
||||
@@ -48,6 +48,8 @@
|
||||
"guides/board-operator/managing-tasks",
|
||||
"guides/board-operator/execution-workspaces-and-runtime-services",
|
||||
"guides/board-operator/delegation",
|
||||
"guides/board-operator/execution-workspaces-and-runtime-services",
|
||||
"guides/board-operator/delegation",
|
||||
"guides/board-operator/approvals",
|
||||
"guides/board-operator/costs-and-budgets",
|
||||
"guides/board-operator/activity-log",
|
||||
|
||||
@@ -55,3 +55,15 @@ The name must match the agent's `name` field exactly (case-insensitive). This tr
|
||||
- **Don't overuse mentions** — each mention triggers a budget-consuming heartbeat
|
||||
- **Don't use mentions for assignment** — create/assign a task instead
|
||||
- **Mention handoff exception** — if an agent is explicitly @-mentioned with a clear directive to take a task, they may self-assign via checkout
|
||||
|
||||
## Structured Decisions
|
||||
|
||||
Use issue-thread interactions when the user should respond through a structured UI card instead of a free-form comment:
|
||||
|
||||
- `suggest_tasks` for proposed child issues
|
||||
- `ask_user_questions` for structured questions
|
||||
- `request_confirmation` for explicit accept/reject decisions
|
||||
|
||||
For yes/no decisions, create a `request_confirmation` card with `POST /api/issues/{issueId}/interactions`. Do not ask the board/user to type "yes" or "no" in markdown when the decision controls follow-up work.
|
||||
|
||||
Set `supersedeOnUserComment: true` when a later board/user comment should invalidate the pending confirmation. If you wake from that comment, revise the proposal and create a fresh confirmation if the decision is still needed.
|
||||
|
||||
@@ -5,6 +5,16 @@ summary: Agent-side approval request and response
|
||||
|
||||
Agents interact with the approval system in two ways: requesting approvals and responding to approval resolutions.
|
||||
|
||||
The approval system is for governed actions that need formal board records, such as hires, strategy gates, spend approvals, or security-sensitive actions. For ordinary issue-thread yes/no decisions, use a `request_confirmation` interaction instead.
|
||||
|
||||
Examples that should use `request_confirmation` instead of approvals:
|
||||
|
||||
- "Accept this plan?"
|
||||
- "Proceed with this issue breakdown?"
|
||||
- "Use option A or reject and request changes?"
|
||||
|
||||
Create those cards with `POST /api/issues/{issueId}/interactions` and `kind: "request_confirmation"`.
|
||||
|
||||
## Requesting a Hire
|
||||
|
||||
Managers and CEOs can request to hire new agents:
|
||||
@@ -37,6 +47,16 @@ POST /api/companies/{companyId}/approvals
|
||||
}
|
||||
```
|
||||
|
||||
## Plan Approval Cards
|
||||
|
||||
For normal issue implementation plans, use the issue-thread confirmation surface:
|
||||
|
||||
1. Update the `plan` issue document.
|
||||
2. Create `request_confirmation` bound to the latest `plan` revision.
|
||||
3. Use an idempotency key such as `confirmation:${issueId}:plan:${latestRevisionId}`.
|
||||
4. Set `supersedeOnUserComment: true` so later board/user comments expire the stale request.
|
||||
5. Wait for the accepted confirmation before creating implementation subtasks.
|
||||
|
||||
## Responding to Approval Resolutions
|
||||
|
||||
When an approval you requested is resolved, you may be woken with:
|
||||
|
||||
@@ -66,7 +66,11 @@ Read ancestors to understand why this task exists. If woken by a specific commen
|
||||
|
||||
### Step 7: Do the Work
|
||||
|
||||
Use your tools and capabilities to complete the task.
|
||||
Use your tools and capabilities to complete the task. If the issue is actionable, take a concrete action in the same heartbeat. Do not stop at a plan unless the issue asked for planning.
|
||||
|
||||
Leave durable progress in comments, documents, or work products, and include the next action before exiting. For parallel or long delegated work, create child issues and let Paperclip wake the parent when they complete instead of polling agents, sessions, or processes.
|
||||
|
||||
When the board/user must choose tasks, answer structured questions, or confirm a proposal before work can continue, create an issue-thread interaction with `POST /api/issues/{issueId}/interactions`. Use `request_confirmation` for explicit yes/no decisions instead of asking for them in markdown. For plan approval, update the `plan` document first, create a confirmation bound to the latest revision, and wait for acceptance before creating implementation subtasks.
|
||||
|
||||
### Step 8: Update Status
|
||||
|
||||
@@ -102,6 +106,23 @@ Always set `parentId` and `goalId` on subtasks.
|
||||
- **Always checkout** before working — never PATCH to `in_progress` manually
|
||||
- **Never retry a 409** — the task belongs to someone else
|
||||
- **Always comment** on in-progress work before exiting a heartbeat
|
||||
- **Start actionable work** in the same heartbeat; planning-only exits are for planning tasks
|
||||
- **Leave a clear next action** in durable issue context
|
||||
- **Use child issues instead of polling** for long or parallel delegated work
|
||||
- **Use `request_confirmation`** for issue-scoped yes/no decisions and plan approval cards
|
||||
- **Always set parentId** on subtasks
|
||||
- **Never cancel cross-team tasks** — reassign to your manager
|
||||
- **Escalate when stuck** — use your chain of command
|
||||
|
||||
## Run Liveness
|
||||
|
||||
Paperclip records run liveness as metadata on heartbeat runs. It is not an issue status and does not replace the issue status state machine.
|
||||
|
||||
- Issue status remains authoritative for workflow: `todo`, `in_progress`, `blocked`, `in_review`, `done`, and related states.
|
||||
- Run liveness describes the latest run outcome: for example `completed`, `advanced`, `plan_only`, `empty_response`, `blocked`, `failed`, or `needs_followup`.
|
||||
- Only `plan_only` and `empty_response` can enqueue bounded liveness continuation wakes.
|
||||
- Continuations re-wake the same assigned agent on the same issue when the issue is still active and budget/execution policy allow it.
|
||||
- `continuationAttempt` counts semantic liveness continuations for a source run chain. It is separate from process recovery, queued wake delivery, adapter session resume, and other operational retries.
|
||||
- Liveness continuation wake prompts include the attempt, source run, liveness state, liveness reason, and the instruction for the next heartbeat.
|
||||
- Continuations do not mark the issue `blocked` or `done`. If automatic continuations are exhausted, Paperclip leaves an audit comment so a human or manager can clarify, block, or assign follow-up work.
|
||||
- Workspace provisioning alone is not treated as concrete task progress. Durable progress should appear as tool/action events, issue comments, document or work-product revisions, activity log entries, commits, or tests.
|
||||
|
||||
@@ -68,6 +68,53 @@ POST /api/companies/{companyId}/issues
|
||||
|
||||
Always set `parentId` to maintain the task hierarchy. Set `goalId` when applicable.
|
||||
|
||||
## Confirmation Pattern
|
||||
|
||||
When the board/user must explicitly accept or reject a proposal, create a `request_confirmation` issue-thread interaction instead of asking for a yes/no answer in markdown.
|
||||
|
||||
```
|
||||
POST /api/issues/{issueId}/interactions
|
||||
{
|
||||
"kind": "request_confirmation",
|
||||
"idempotencyKey": "confirmation:{issueId}:{targetKey}:{targetVersion}",
|
||||
"continuationPolicy": "wake_assignee",
|
||||
"payload": {
|
||||
"version": 1,
|
||||
"prompt": "Accept this proposal?",
|
||||
"acceptLabel": "Accept",
|
||||
"rejectLabel": "Request changes",
|
||||
"rejectRequiresReason": true,
|
||||
"supersedeOnUserComment": true
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
Use `continuationPolicy: "wake_assignee"` when acceptance should wake you to continue. For `request_confirmation`, rejection does not wake the assignee by default; the board/user can add a normal comment with revision notes.
|
||||
|
||||
## Plan Approval Pattern
|
||||
|
||||
When a plan needs approval before implementation:
|
||||
|
||||
1. Create or update the issue document with key `plan`.
|
||||
2. Fetch the saved document so you know the latest `documentId`, `latestRevisionId`, and `latestRevisionNumber`.
|
||||
3. Create a `request_confirmation` targeting that exact `plan` revision.
|
||||
4. Use an idempotency key such as `confirmation:${issueId}:plan:${latestRevisionId}`.
|
||||
5. Wait for acceptance before creating implementation subtasks.
|
||||
6. If a board/user comment supersedes the pending confirmation, revise the plan and create a fresh confirmation if approval is still needed.
|
||||
|
||||
Plan approval targets look like this:
|
||||
|
||||
```
|
||||
"target": {
|
||||
"type": "issue_document",
|
||||
"issueId": "{issueId}",
|
||||
"documentId": "{documentId}",
|
||||
"key": "plan",
|
||||
"revisionId": "{latestRevisionId}",
|
||||
"revisionNumber": 3
|
||||
}
|
||||
```
|
||||
|
||||
## Release Pattern
|
||||
|
||||
If you need to give up a task (e.g. you realize it should go to someone else):
|
||||
|
||||
@@ -20,6 +20,13 @@ The Heartbeat Procedure:
|
||||
8. Update status: PATCH /api/issues/{issueId} with status and comment
|
||||
9. Delegate if needed: POST /api/companies/{companyId}/issues
|
||||
|
||||
Execution Contract:
|
||||
- If the issue is actionable, start concrete work in this heartbeat. Do not stop at a plan unless the issue asks for planning.
|
||||
- Leave durable progress in comments, documents, or work products, with a clear next action.
|
||||
- Use child issues for parallel or long delegated work instead of polling agents, sessions, or processes.
|
||||
- If blocked, PATCH the issue to blocked and name the unblock owner and action.
|
||||
- Respect budget, pause/cancel, approval gates, and company boundaries.
|
||||
|
||||
Critical Rules:
|
||||
- Always checkout before working. Never PATCH to in_progress manually.
|
||||
- Never retry a 409. The task belongs to someone else.
|
||||
|
||||
@@ -11,13 +11,16 @@
|
||||
"dev:stop": "pnpm --filter @paperclipai/server exec tsx ../scripts/dev-service.ts stop",
|
||||
"dev:server": "pnpm --filter @paperclipai/server dev",
|
||||
"dev:ui": "pnpm --filter @paperclipai/ui dev",
|
||||
"storybook": "pnpm --filter @paperclipai/ui storybook",
|
||||
"build-storybook": "pnpm --filter @paperclipai/ui build-storybook",
|
||||
"build": "pnpm run preflight:workspace-links && pnpm -r build",
|
||||
"typecheck": "pnpm run preflight:workspace-links && pnpm -r typecheck",
|
||||
"test": "pnpm run test:run",
|
||||
"test:watch": "pnpm run preflight:workspace-links && vitest",
|
||||
"test:run": "pnpm run preflight:workspace-links && vitest run",
|
||||
"test:run": "pnpm run preflight:workspace-links && node scripts/run-vitest-stable.mjs",
|
||||
"db:generate": "pnpm --filter @paperclipai/db generate",
|
||||
"db:migrate": "pnpm --filter @paperclipai/db migrate",
|
||||
"issue-references:backfill": "pnpm run preflight:workspace-links && tsx scripts/backfill-issue-reference-mentions.ts",
|
||||
"secrets:migrate-inline-env": "tsx scripts/migrate-inline-env-secrets.ts",
|
||||
"db:backup": "./scripts/backup-db.sh",
|
||||
"paperclipai": "node cli/node_modules/tsx/dist/cli.mjs cli/src/index.ts",
|
||||
@@ -34,6 +37,7 @@
|
||||
"smoke:openclaw-sse-standalone": "./scripts/smoke/openclaw-sse-standalone.sh",
|
||||
"test:e2e": "npx playwright test --config tests/e2e/playwright.config.ts",
|
||||
"test:e2e:headed": "npx playwright test --config tests/e2e/playwright.config.ts --headed",
|
||||
"test:e2e:multiuser-authenticated": "npx playwright test --config tests/e2e/playwright-multiuser-authenticated.config.ts",
|
||||
"evals:smoke": "cd evals/promptfoo && npx promptfoo@0.103.3 eval",
|
||||
"test:release-smoke": "npx playwright test --config tests/release-smoke/playwright.config.ts",
|
||||
"test:release-smoke:headed": "npx playwright test --config tests/release-smoke/playwright.config.ts --headed",
|
||||
|
||||
152
packages/adapter-utils/src/command-managed-runtime.ts
Normal file
152
packages/adapter-utils/src/command-managed-runtime.ts
Normal file
@@ -0,0 +1,152 @@
|
||||
import path from "node:path";
|
||||
import {
|
||||
prepareSandboxManagedRuntime,
|
||||
type PreparedSandboxManagedRuntime,
|
||||
type SandboxManagedRuntimeAsset,
|
||||
type SandboxManagedRuntimeClient,
|
||||
type SandboxRemoteExecutionSpec,
|
||||
} from "./sandbox-managed-runtime.js";
|
||||
import type { RunProcessResult } from "./server-utils.js";
|
||||
|
||||
export interface CommandManagedRuntimeRunner {
|
||||
execute(input: {
|
||||
command: string;
|
||||
args?: string[];
|
||||
cwd?: string;
|
||||
env?: Record<string, string>;
|
||||
stdin?: string;
|
||||
timeoutMs?: number;
|
||||
onLog?: (stream: "stdout" | "stderr", chunk: string) => Promise<void>;
|
||||
onSpawn?: (meta: { pid: number; startedAt: string }) => Promise<void>;
|
||||
}): Promise<RunProcessResult>;
|
||||
}
|
||||
|
||||
export interface CommandManagedRuntimeSpec {
|
||||
providerKey?: string | null;
|
||||
leaseId?: string | null;
|
||||
remoteCwd: string;
|
||||
timeoutMs?: number | null;
|
||||
paperclipApiUrl?: string | null;
|
||||
}
|
||||
|
||||
export type CommandManagedRuntimeAsset = SandboxManagedRuntimeAsset;
|
||||
|
||||
function shellQuote(value: string) {
|
||||
return `'${value.replace(/'/g, `'"'"'`)}'`;
|
||||
}
|
||||
|
||||
function toBuffer(bytes: Buffer | Uint8Array | ArrayBuffer): Buffer {
|
||||
if (Buffer.isBuffer(bytes)) return bytes;
|
||||
if (bytes instanceof ArrayBuffer) return Buffer.from(bytes);
|
||||
return Buffer.from(bytes.buffer, bytes.byteOffset, bytes.byteLength);
|
||||
}
|
||||
|
||||
function requireSuccessfulResult(result: RunProcessResult, action: string): void {
|
||||
if (result.exitCode === 0 && !result.timedOut) return;
|
||||
const stderr = result.stderr.trim();
|
||||
const detail = stderr.length > 0 ? `: ${stderr}` : "";
|
||||
throw new Error(`${action} failed with exit code ${result.exitCode ?? "null"}${detail}`);
|
||||
}
|
||||
|
||||
function createCommandManagedRuntimeClient(input: {
|
||||
runner: CommandManagedRuntimeRunner;
|
||||
remoteCwd: string;
|
||||
timeoutMs: number;
|
||||
}): SandboxManagedRuntimeClient {
|
||||
const runShell = async (script: string, opts: { stdin?: string; timeoutMs?: number } = {}) => {
|
||||
const result = await input.runner.execute({
|
||||
command: "sh",
|
||||
args: ["-lc", script],
|
||||
cwd: input.remoteCwd,
|
||||
stdin: opts.stdin,
|
||||
timeoutMs: opts.timeoutMs ?? input.timeoutMs,
|
||||
});
|
||||
requireSuccessfulResult(result, script);
|
||||
return result;
|
||||
};
|
||||
|
||||
return {
|
||||
makeDir: async (remotePath) => {
|
||||
await runShell(`mkdir -p ${shellQuote(remotePath)}`);
|
||||
},
|
||||
writeFile: async (remotePath, bytes) => {
|
||||
const body = toBuffer(bytes).toString("base64");
|
||||
await runShell(
|
||||
`mkdir -p ${shellQuote(path.posix.dirname(remotePath))} && base64 -d > ${shellQuote(remotePath)}`,
|
||||
{ stdin: body },
|
||||
);
|
||||
},
|
||||
readFile: async (remotePath) => {
|
||||
const result = await runShell(`base64 < ${shellQuote(remotePath)}`);
|
||||
return Buffer.from(result.stdout.replace(/\s+/g, ""), "base64");
|
||||
},
|
||||
remove: async (remotePath) => {
|
||||
const result = await input.runner.execute({
|
||||
command: "sh",
|
||||
args: ["-lc", `rm -rf ${shellQuote(remotePath)}`],
|
||||
cwd: input.remoteCwd,
|
||||
timeoutMs: input.timeoutMs,
|
||||
});
|
||||
requireSuccessfulResult(result, `remove ${remotePath}`);
|
||||
},
|
||||
run: async (command, options) => {
|
||||
const result = await input.runner.execute({
|
||||
command: "sh",
|
||||
args: ["-lc", command],
|
||||
cwd: input.remoteCwd,
|
||||
timeoutMs: options.timeoutMs,
|
||||
});
|
||||
requireSuccessfulResult(result, command);
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
export async function prepareCommandManagedRuntime(input: {
|
||||
runner: CommandManagedRuntimeRunner;
|
||||
spec: CommandManagedRuntimeSpec;
|
||||
adapterKey: string;
|
||||
workspaceLocalDir: string;
|
||||
workspaceRemoteDir?: string;
|
||||
workspaceExclude?: string[];
|
||||
preserveAbsentOnRestore?: string[];
|
||||
assets?: CommandManagedRuntimeAsset[];
|
||||
installCommand?: string | null;
|
||||
}): Promise<PreparedSandboxManagedRuntime> {
|
||||
const timeoutMs = input.spec.timeoutMs && input.spec.timeoutMs > 0 ? input.spec.timeoutMs : 300_000;
|
||||
const workspaceRemoteDir = input.workspaceRemoteDir ?? input.spec.remoteCwd;
|
||||
const runtimeSpec: SandboxRemoteExecutionSpec = {
|
||||
transport: "sandbox",
|
||||
provider: input.spec.providerKey ?? "sandbox",
|
||||
sandboxId: input.spec.leaseId ?? "managed",
|
||||
remoteCwd: workspaceRemoteDir,
|
||||
timeoutMs,
|
||||
apiKey: null,
|
||||
paperclipApiUrl: input.spec.paperclipApiUrl ?? null,
|
||||
};
|
||||
const client = createCommandManagedRuntimeClient({
|
||||
runner: input.runner,
|
||||
remoteCwd: workspaceRemoteDir,
|
||||
timeoutMs,
|
||||
});
|
||||
|
||||
if (input.installCommand?.trim()) {
|
||||
const result = await input.runner.execute({
|
||||
command: "sh",
|
||||
args: ["-lc", input.installCommand.trim()],
|
||||
cwd: workspaceRemoteDir,
|
||||
timeoutMs,
|
||||
});
|
||||
requireSuccessfulResult(result, input.installCommand.trim());
|
||||
}
|
||||
|
||||
return await prepareSandboxManagedRuntime({
|
||||
spec: runtimeSpec,
|
||||
client,
|
||||
adapterKey: input.adapterKey,
|
||||
workspaceLocalDir: input.workspaceLocalDir,
|
||||
workspaceRemoteDir,
|
||||
workspaceExclude: input.workspaceExclude,
|
||||
preserveAbsentOnRestore: input.preserveAbsentOnRestore,
|
||||
assets: input.assets,
|
||||
});
|
||||
}
|
||||
96
packages/adapter-utils/src/execution-target-sandbox.test.ts
Normal file
96
packages/adapter-utils/src/execution-target-sandbox.test.ts
Normal file
@@ -0,0 +1,96 @@
|
||||
import { describe, expect, it, vi } from "vitest";
|
||||
|
||||
import {
|
||||
adapterExecutionTargetSessionIdentity,
|
||||
adapterExecutionTargetToRemoteSpec,
|
||||
runAdapterExecutionTargetProcess,
|
||||
runAdapterExecutionTargetShellCommand,
|
||||
type AdapterSandboxExecutionTarget,
|
||||
} from "./execution-target.js";
|
||||
|
||||
describe("sandbox adapter execution targets", () => {
|
||||
it("executes through the provider-neutral runner without a remote spec", async () => {
|
||||
const runner = {
|
||||
execute: vi.fn(async () => ({
|
||||
exitCode: 0,
|
||||
signal: null,
|
||||
timedOut: false,
|
||||
stdout: "ok\n",
|
||||
stderr: "",
|
||||
pid: null,
|
||||
startedAt: new Date().toISOString(),
|
||||
})),
|
||||
};
|
||||
const target: AdapterSandboxExecutionTarget = {
|
||||
kind: "remote",
|
||||
transport: "sandbox",
|
||||
providerKey: "acme-sandbox",
|
||||
environmentId: "env-1",
|
||||
leaseId: "lease-1",
|
||||
remoteCwd: "/workspace",
|
||||
timeoutMs: 30_000,
|
||||
runner,
|
||||
};
|
||||
|
||||
expect(adapterExecutionTargetToRemoteSpec(target)).toBeNull();
|
||||
|
||||
const result = await runAdapterExecutionTargetProcess("run-1", target, "agent-cli", ["--json"], {
|
||||
cwd: "/local/workspace",
|
||||
env: { TOKEN: "token" },
|
||||
stdin: "prompt",
|
||||
timeoutSec: 5,
|
||||
graceSec: 1,
|
||||
onLog: async () => {},
|
||||
});
|
||||
|
||||
expect(result.stdout).toBe("ok\n");
|
||||
expect(runner.execute).toHaveBeenCalledWith(expect.objectContaining({
|
||||
command: "agent-cli",
|
||||
args: ["--json"],
|
||||
cwd: "/workspace",
|
||||
env: { TOKEN: "token" },
|
||||
stdin: "prompt",
|
||||
timeoutMs: 5000,
|
||||
}));
|
||||
expect(adapterExecutionTargetSessionIdentity(target)).toEqual({
|
||||
transport: "sandbox",
|
||||
providerKey: "acme-sandbox",
|
||||
environmentId: "env-1",
|
||||
leaseId: "lease-1",
|
||||
remoteCwd: "/workspace",
|
||||
});
|
||||
});
|
||||
|
||||
it("runs shell commands through the same runner", async () => {
|
||||
const runner = {
|
||||
execute: vi.fn(async () => ({
|
||||
exitCode: 0,
|
||||
signal: null,
|
||||
timedOut: false,
|
||||
stdout: "/home/sandbox",
|
||||
stderr: "",
|
||||
pid: null,
|
||||
startedAt: new Date().toISOString(),
|
||||
})),
|
||||
};
|
||||
const target: AdapterSandboxExecutionTarget = {
|
||||
kind: "remote",
|
||||
transport: "sandbox",
|
||||
remoteCwd: "/workspace",
|
||||
runner,
|
||||
};
|
||||
|
||||
await runAdapterExecutionTargetShellCommand("run-2", target, 'printf %s "$HOME"', {
|
||||
cwd: "/local/workspace",
|
||||
env: {},
|
||||
timeoutSec: 7,
|
||||
});
|
||||
|
||||
expect(runner.execute).toHaveBeenCalledWith(expect.objectContaining({
|
||||
command: "sh",
|
||||
args: ["-lc", 'printf %s "$HOME"'],
|
||||
cwd: "/workspace",
|
||||
timeoutMs: 7000,
|
||||
}));
|
||||
});
|
||||
});
|
||||
161
packages/adapter-utils/src/execution-target.test.ts
Normal file
161
packages/adapter-utils/src/execution-target.test.ts
Normal file
@@ -0,0 +1,161 @@
|
||||
import { afterEach, describe, expect, it, vi } from "vitest";
|
||||
import * as ssh from "./ssh.js";
|
||||
import {
|
||||
adapterExecutionTargetUsesManagedHome,
|
||||
runAdapterExecutionTargetShellCommand,
|
||||
} from "./execution-target.js";
|
||||
|
||||
describe("runAdapterExecutionTargetShellCommand", () => {
|
||||
afterEach(() => {
|
||||
vi.restoreAllMocks();
|
||||
});
|
||||
|
||||
it("quotes remote shell commands with the shared SSH quoting helper", async () => {
|
||||
const runSshCommandSpy = vi.spyOn(ssh, "runSshCommand").mockResolvedValue({
|
||||
stdout: "",
|
||||
stderr: "",
|
||||
});
|
||||
|
||||
await runAdapterExecutionTargetShellCommand(
|
||||
"run-1",
|
||||
{
|
||||
kind: "remote",
|
||||
transport: "ssh",
|
||||
remoteCwd: "/srv/paperclip/workspace",
|
||||
spec: {
|
||||
host: "ssh.example.test",
|
||||
port: 22,
|
||||
username: "ssh-user",
|
||||
remoteCwd: "/srv/paperclip/workspace",
|
||||
remoteWorkspacePath: "/srv/paperclip/workspace",
|
||||
privateKey: null,
|
||||
knownHosts: null,
|
||||
strictHostKeyChecking: true,
|
||||
},
|
||||
},
|
||||
`printf '%s\\n' "$HOME" && echo "it's ok"`,
|
||||
{
|
||||
cwd: "/tmp/local",
|
||||
env: {},
|
||||
},
|
||||
);
|
||||
|
||||
expect(runSshCommandSpy).toHaveBeenCalledWith(
|
||||
expect.objectContaining({
|
||||
host: "ssh.example.test",
|
||||
username: "ssh-user",
|
||||
}),
|
||||
`sh -lc ${ssh.shellQuote(`printf '%s\\n' "$HOME" && echo "it's ok"`)}`,
|
||||
expect.any(Object),
|
||||
);
|
||||
});
|
||||
|
||||
it("returns a timedOut result when the SSH shell command times out", async () => {
|
||||
vi.spyOn(ssh, "runSshCommand").mockRejectedValue(Object.assign(new Error("timed out"), {
|
||||
code: "ETIMEDOUT",
|
||||
stdout: "partial stdout",
|
||||
stderr: "partial stderr",
|
||||
signal: "SIGTERM",
|
||||
}));
|
||||
const onLog = vi.fn(async () => {});
|
||||
|
||||
const result = await runAdapterExecutionTargetShellCommand(
|
||||
"run-2",
|
||||
{
|
||||
kind: "remote",
|
||||
transport: "ssh",
|
||||
remoteCwd: "/srv/paperclip/workspace",
|
||||
spec: {
|
||||
host: "ssh.example.test",
|
||||
port: 22,
|
||||
username: "ssh-user",
|
||||
remoteCwd: "/srv/paperclip/workspace",
|
||||
remoteWorkspacePath: "/srv/paperclip/workspace",
|
||||
privateKey: null,
|
||||
knownHosts: null,
|
||||
strictHostKeyChecking: true,
|
||||
},
|
||||
},
|
||||
"sleep 10",
|
||||
{
|
||||
cwd: "/tmp/local",
|
||||
env: {},
|
||||
onLog,
|
||||
},
|
||||
);
|
||||
|
||||
expect(result).toMatchObject({
|
||||
exitCode: null,
|
||||
signal: "SIGTERM",
|
||||
timedOut: true,
|
||||
stdout: "partial stdout",
|
||||
stderr: "partial stderr",
|
||||
});
|
||||
expect(onLog).toHaveBeenCalledWith("stdout", "partial stdout");
|
||||
expect(onLog).toHaveBeenCalledWith("stderr", "partial stderr");
|
||||
});
|
||||
|
||||
it("returns the SSH process exit code for non-zero remote command failures", async () => {
|
||||
vi.spyOn(ssh, "runSshCommand").mockRejectedValue(Object.assign(new Error("non-zero exit"), {
|
||||
code: 17,
|
||||
stdout: "partial stdout",
|
||||
stderr: "partial stderr",
|
||||
signal: null,
|
||||
}));
|
||||
const onLog = vi.fn(async () => {});
|
||||
|
||||
const result = await runAdapterExecutionTargetShellCommand(
|
||||
"run-3",
|
||||
{
|
||||
kind: "remote",
|
||||
transport: "ssh",
|
||||
remoteCwd: "/srv/paperclip/workspace",
|
||||
spec: {
|
||||
host: "ssh.example.test",
|
||||
port: 22,
|
||||
username: "ssh-user",
|
||||
remoteCwd: "/srv/paperclip/workspace",
|
||||
remoteWorkspacePath: "/srv/paperclip/workspace",
|
||||
privateKey: null,
|
||||
knownHosts: null,
|
||||
strictHostKeyChecking: true,
|
||||
},
|
||||
},
|
||||
"false",
|
||||
{
|
||||
cwd: "/tmp/local",
|
||||
env: {},
|
||||
onLog,
|
||||
},
|
||||
);
|
||||
|
||||
expect(result).toMatchObject({
|
||||
exitCode: 17,
|
||||
signal: null,
|
||||
timedOut: false,
|
||||
stdout: "partial stdout",
|
||||
stderr: "partial stderr",
|
||||
});
|
||||
expect(onLog).toHaveBeenCalledWith("stdout", "partial stdout");
|
||||
expect(onLog).toHaveBeenCalledWith("stderr", "partial stderr");
|
||||
});
|
||||
|
||||
it("keeps managed homes disabled for both local and SSH targets", () => {
|
||||
expect(adapterExecutionTargetUsesManagedHome(null)).toBe(false);
|
||||
expect(adapterExecutionTargetUsesManagedHome({
|
||||
kind: "remote",
|
||||
transport: "ssh",
|
||||
remoteCwd: "/srv/paperclip/workspace",
|
||||
spec: {
|
||||
host: "ssh.example.test",
|
||||
port: 22,
|
||||
username: "ssh-user",
|
||||
remoteCwd: "/srv/paperclip/workspace",
|
||||
remoteWorkspacePath: "/srv/paperclip/workspace",
|
||||
privateKey: null,
|
||||
knownHosts: null,
|
||||
strictHostKeyChecking: true,
|
||||
},
|
||||
})).toBe(false);
|
||||
});
|
||||
});
|
||||
516
packages/adapter-utils/src/execution-target.ts
Normal file
516
packages/adapter-utils/src/execution-target.ts
Normal file
@@ -0,0 +1,516 @@
|
||||
import path from "node:path";
|
||||
import type { SshRemoteExecutionSpec } from "./ssh.js";
|
||||
import {
|
||||
prepareCommandManagedRuntime,
|
||||
type CommandManagedRuntimeRunner,
|
||||
} from "./command-managed-runtime.js";
|
||||
import {
|
||||
buildRemoteExecutionSessionIdentity,
|
||||
prepareRemoteManagedRuntime,
|
||||
remoteExecutionSessionMatches,
|
||||
type RemoteManagedRuntimeAsset,
|
||||
} from "./remote-managed-runtime.js";
|
||||
import { parseSshRemoteExecutionSpec, runSshCommand, shellQuote } from "./ssh.js";
|
||||
import {
|
||||
ensureCommandResolvable,
|
||||
resolveCommandForLogs,
|
||||
runChildProcess,
|
||||
type RunProcessResult,
|
||||
type TerminalResultCleanupOptions,
|
||||
} from "./server-utils.js";
|
||||
|
||||
export interface AdapterLocalExecutionTarget {
|
||||
kind: "local";
|
||||
environmentId?: string | null;
|
||||
leaseId?: string | null;
|
||||
}
|
||||
|
||||
export interface AdapterSshExecutionTarget {
|
||||
kind: "remote";
|
||||
transport: "ssh";
|
||||
environmentId?: string | null;
|
||||
leaseId?: string | null;
|
||||
remoteCwd: string;
|
||||
paperclipApiUrl?: string | null;
|
||||
spec: SshRemoteExecutionSpec;
|
||||
}
|
||||
|
||||
export interface AdapterSandboxExecutionTarget {
|
||||
kind: "remote";
|
||||
transport: "sandbox";
|
||||
providerKey?: string | null;
|
||||
environmentId?: string | null;
|
||||
leaseId?: string | null;
|
||||
remoteCwd: string;
|
||||
paperclipApiUrl?: string | null;
|
||||
timeoutMs?: number | null;
|
||||
runner?: CommandManagedRuntimeRunner;
|
||||
}
|
||||
|
||||
export type AdapterExecutionTarget =
|
||||
| AdapterLocalExecutionTarget
|
||||
| AdapterSshExecutionTarget
|
||||
| AdapterSandboxExecutionTarget;
|
||||
|
||||
export type AdapterRemoteExecutionSpec = SshRemoteExecutionSpec;
|
||||
|
||||
export type AdapterManagedRuntimeAsset = RemoteManagedRuntimeAsset;
|
||||
|
||||
export interface PreparedAdapterExecutionTargetRuntime {
|
||||
target: AdapterExecutionTarget;
|
||||
runtimeRootDir: string | null;
|
||||
assetDirs: Record<string, string>;
|
||||
restoreWorkspace(): Promise<void>;
|
||||
}
|
||||
|
||||
export interface AdapterExecutionTargetProcessOptions {
|
||||
cwd: string;
|
||||
env: Record<string, string>;
|
||||
stdin?: string;
|
||||
timeoutSec: number;
|
||||
graceSec: number;
|
||||
onLog: (stream: "stdout" | "stderr", chunk: string) => Promise<void>;
|
||||
onSpawn?: (meta: { pid: number; processGroupId: number | null; startedAt: string }) => Promise<void>;
|
||||
terminalResultCleanup?: TerminalResultCleanupOptions;
|
||||
}
|
||||
|
||||
export interface AdapterExecutionTargetShellOptions {
|
||||
cwd: string;
|
||||
env: Record<string, string>;
|
||||
timeoutSec?: number;
|
||||
graceSec?: number;
|
||||
onLog?: (stream: "stdout" | "stderr", chunk: string) => Promise<void>;
|
||||
}
|
||||
|
||||
function parseObject(value: unknown): Record<string, unknown> {
|
||||
return value && typeof value === "object" && !Array.isArray(value)
|
||||
? (value as Record<string, unknown>)
|
||||
: {};
|
||||
}
|
||||
|
||||
function readString(value: unknown): string | null {
|
||||
return typeof value === "string" && value.trim().length > 0 ? value.trim() : null;
|
||||
}
|
||||
|
||||
function readStringMeta(parsed: Record<string, unknown>, key: string): string | null {
|
||||
return readString(parsed[key]);
|
||||
}
|
||||
|
||||
function isAdapterExecutionTargetInstance(value: unknown): value is AdapterExecutionTarget {
|
||||
const parsed = parseObject(value);
|
||||
if (parsed.kind === "local") return true;
|
||||
if (parsed.kind !== "remote") return false;
|
||||
if (parsed.transport === "ssh") return parseSshRemoteExecutionSpec(parseObject(parsed.spec)) !== null;
|
||||
if (parsed.transport !== "sandbox") return false;
|
||||
return readStringMeta(parsed, "remoteCwd") !== null;
|
||||
}
|
||||
|
||||
export function adapterExecutionTargetToRemoteSpec(
|
||||
target: AdapterExecutionTarget | null | undefined,
|
||||
): AdapterRemoteExecutionSpec | null {
|
||||
return target?.kind === "remote" && target.transport === "ssh" ? target.spec : null;
|
||||
}
|
||||
|
||||
export function adapterExecutionTargetIsRemote(
|
||||
target: AdapterExecutionTarget | null | undefined,
|
||||
): boolean {
|
||||
return target?.kind === "remote";
|
||||
}
|
||||
|
||||
export function adapterExecutionTargetUsesManagedHome(
|
||||
target: AdapterExecutionTarget | null | undefined,
|
||||
): boolean {
|
||||
return target?.kind === "remote" && target.transport === "sandbox";
|
||||
}
|
||||
|
||||
export function adapterExecutionTargetRemoteCwd(
|
||||
target: AdapterExecutionTarget | null | undefined,
|
||||
localCwd: string,
|
||||
): string {
|
||||
return target?.kind === "remote" ? target.remoteCwd : localCwd;
|
||||
}
|
||||
|
||||
export function adapterExecutionTargetPaperclipApiUrl(
|
||||
target: AdapterExecutionTarget | null | undefined,
|
||||
): string | null {
|
||||
if (target?.kind !== "remote") return null;
|
||||
if (target.transport === "ssh") return target.paperclipApiUrl ?? target.spec.paperclipApiUrl ?? null;
|
||||
return target.paperclipApiUrl ?? null;
|
||||
}
|
||||
|
||||
export function describeAdapterExecutionTarget(
|
||||
target: AdapterExecutionTarget | null | undefined,
|
||||
): string {
|
||||
if (!target || target.kind === "local") return "local environment";
|
||||
if (target.transport === "ssh") {
|
||||
return `SSH environment ${target.spec.username}@${target.spec.host}:${target.spec.port}`;
|
||||
}
|
||||
return `sandbox environment${target.providerKey ? ` (${target.providerKey})` : ""}`;
|
||||
}
|
||||
|
||||
function requireSandboxRunner(target: AdapterSandboxExecutionTarget): CommandManagedRuntimeRunner {
|
||||
if (target.runner) return target.runner;
|
||||
throw new Error(
|
||||
"Sandbox execution target is missing its provider runtime runner. Sandbox commands must execute through the environment runtime.",
|
||||
);
|
||||
}
|
||||
|
||||
export async function ensureAdapterExecutionTargetCommandResolvable(
|
||||
command: string,
|
||||
target: AdapterExecutionTarget | null | undefined,
|
||||
cwd: string,
|
||||
env: NodeJS.ProcessEnv,
|
||||
) {
|
||||
if (target?.kind === "remote" && target.transport === "sandbox") {
|
||||
return;
|
||||
}
|
||||
await ensureCommandResolvable(command, cwd, env, {
|
||||
remoteExecution: adapterExecutionTargetToRemoteSpec(target),
|
||||
});
|
||||
}
|
||||
|
||||
export async function resolveAdapterExecutionTargetCommandForLogs(
|
||||
command: string,
|
||||
target: AdapterExecutionTarget | null | undefined,
|
||||
cwd: string,
|
||||
env: NodeJS.ProcessEnv,
|
||||
): Promise<string> {
|
||||
if (target?.kind === "remote" && target.transport === "sandbox") {
|
||||
return `sandbox://${target.providerKey ?? "provider"}/${target.leaseId ?? "lease"}/${target.remoteCwd} :: ${command}`;
|
||||
}
|
||||
return await resolveCommandForLogs(command, cwd, env, {
|
||||
remoteExecution: adapterExecutionTargetToRemoteSpec(target),
|
||||
});
|
||||
}
|
||||
|
||||
export async function runAdapterExecutionTargetProcess(
|
||||
runId: string,
|
||||
target: AdapterExecutionTarget | null | undefined,
|
||||
command: string,
|
||||
args: string[],
|
||||
options: AdapterExecutionTargetProcessOptions,
|
||||
): Promise<RunProcessResult> {
|
||||
if (target?.kind === "remote" && target.transport === "sandbox") {
|
||||
const runner = requireSandboxRunner(target);
|
||||
return await runner.execute({
|
||||
command,
|
||||
args,
|
||||
cwd: target.remoteCwd,
|
||||
env: options.env,
|
||||
stdin: options.stdin,
|
||||
timeoutMs: options.timeoutSec > 0 ? options.timeoutSec * 1000 : target.timeoutMs ?? undefined,
|
||||
onLog: options.onLog,
|
||||
onSpawn: options.onSpawn
|
||||
? async (meta) => options.onSpawn?.({ ...meta, processGroupId: null })
|
||||
: undefined,
|
||||
});
|
||||
}
|
||||
|
||||
return await runChildProcess(runId, command, args, {
|
||||
cwd: options.cwd,
|
||||
env: options.env,
|
||||
stdin: options.stdin,
|
||||
timeoutSec: options.timeoutSec,
|
||||
graceSec: options.graceSec,
|
||||
onLog: options.onLog,
|
||||
onSpawn: options.onSpawn,
|
||||
terminalResultCleanup: options.terminalResultCleanup,
|
||||
remoteExecution: adapterExecutionTargetToRemoteSpec(target),
|
||||
});
|
||||
}
|
||||
|
||||
export async function runAdapterExecutionTargetShellCommand(
|
||||
runId: string,
|
||||
target: AdapterExecutionTarget | null | undefined,
|
||||
command: string,
|
||||
options: AdapterExecutionTargetShellOptions,
|
||||
): Promise<RunProcessResult> {
|
||||
const onLog = options.onLog ?? (async () => {});
|
||||
if (target?.kind === "remote") {
|
||||
const startedAt = new Date().toISOString();
|
||||
if (target.transport === "ssh") {
|
||||
try {
|
||||
const result = await runSshCommand(target.spec, `sh -lc ${shellQuote(command)}`, {
|
||||
timeoutMs: (options.timeoutSec ?? 15) * 1000,
|
||||
});
|
||||
if (result.stdout) await onLog("stdout", result.stdout);
|
||||
if (result.stderr) await onLog("stderr", result.stderr);
|
||||
return {
|
||||
exitCode: 0,
|
||||
signal: null,
|
||||
timedOut: false,
|
||||
stdout: result.stdout,
|
||||
stderr: result.stderr,
|
||||
pid: null,
|
||||
startedAt,
|
||||
};
|
||||
} catch (error) {
|
||||
const timedOutError = error as NodeJS.ErrnoException & {
|
||||
stdout?: string;
|
||||
stderr?: string;
|
||||
signal?: string | null;
|
||||
};
|
||||
const stdout = timedOutError.stdout ?? "";
|
||||
const stderr = timedOutError.stderr ?? "";
|
||||
if (typeof timedOutError.code === "number") {
|
||||
if (stdout) await onLog("stdout", stdout);
|
||||
if (stderr) await onLog("stderr", stderr);
|
||||
return {
|
||||
exitCode: timedOutError.code,
|
||||
signal: timedOutError.signal ?? null,
|
||||
timedOut: false,
|
||||
stdout,
|
||||
stderr,
|
||||
pid: null,
|
||||
startedAt,
|
||||
};
|
||||
}
|
||||
if (timedOutError.code !== "ETIMEDOUT") {
|
||||
throw error;
|
||||
}
|
||||
if (stdout) await onLog("stdout", stdout);
|
||||
if (stderr) await onLog("stderr", stderr);
|
||||
return {
|
||||
exitCode: null,
|
||||
signal: timedOutError.signal ?? null,
|
||||
timedOut: true,
|
||||
stdout,
|
||||
stderr,
|
||||
pid: null,
|
||||
startedAt,
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
return await requireSandboxRunner(target).execute({
|
||||
command: "sh",
|
||||
args: ["-lc", command],
|
||||
cwd: target.remoteCwd,
|
||||
env: options.env,
|
||||
timeoutMs: (options.timeoutSec ?? 15) * 1000,
|
||||
onLog,
|
||||
});
|
||||
}
|
||||
|
||||
return await runAdapterExecutionTargetProcess(
|
||||
runId,
|
||||
target,
|
||||
"sh",
|
||||
["-lc", command],
|
||||
{
|
||||
cwd: options.cwd,
|
||||
env: options.env,
|
||||
timeoutSec: options.timeoutSec ?? 15,
|
||||
graceSec: options.graceSec ?? 5,
|
||||
onLog,
|
||||
},
|
||||
);
|
||||
}
|
||||
|
||||
export async function readAdapterExecutionTargetHomeDir(
|
||||
runId: string,
|
||||
target: AdapterExecutionTarget | null | undefined,
|
||||
options: AdapterExecutionTargetShellOptions,
|
||||
): Promise<string | null> {
|
||||
const result = await runAdapterExecutionTargetShellCommand(
|
||||
runId,
|
||||
target,
|
||||
'printf %s "$HOME"',
|
||||
options,
|
||||
);
|
||||
const homeDir = result.stdout.trim();
|
||||
return homeDir.length > 0 ? homeDir : null;
|
||||
}
|
||||
|
||||
export async function ensureAdapterExecutionTargetFile(
|
||||
runId: string,
|
||||
target: AdapterExecutionTarget | null | undefined,
|
||||
filePath: string,
|
||||
options: AdapterExecutionTargetShellOptions,
|
||||
): Promise<void> {
|
||||
await runAdapterExecutionTargetShellCommand(
|
||||
runId,
|
||||
target,
|
||||
`mkdir -p ${shellQuote(path.posix.dirname(filePath))} && : > ${shellQuote(filePath)}`,
|
||||
options,
|
||||
);
|
||||
}
|
||||
|
||||
export function adapterExecutionTargetSessionIdentity(
|
||||
target: AdapterExecutionTarget | null | undefined,
|
||||
): Record<string, unknown> | null {
|
||||
if (!target || target.kind === "local") return null;
|
||||
if (target.transport === "ssh") return buildRemoteExecutionSessionIdentity(target.spec);
|
||||
return {
|
||||
transport: "sandbox",
|
||||
providerKey: target.providerKey ?? null,
|
||||
environmentId: target.environmentId ?? null,
|
||||
leaseId: target.leaseId ?? null,
|
||||
remoteCwd: target.remoteCwd,
|
||||
...(target.paperclipApiUrl ? { paperclipApiUrl: target.paperclipApiUrl } : {}),
|
||||
};
|
||||
}
|
||||
|
||||
export function adapterExecutionTargetSessionMatches(
|
||||
saved: unknown,
|
||||
target: AdapterExecutionTarget | null | undefined,
|
||||
): boolean {
|
||||
if (!target || target.kind === "local") {
|
||||
return Object.keys(parseObject(saved)).length === 0;
|
||||
}
|
||||
if (target.transport === "ssh") return remoteExecutionSessionMatches(saved, target.spec);
|
||||
const current = adapterExecutionTargetSessionIdentity(target);
|
||||
const parsedSaved = parseObject(saved);
|
||||
return (
|
||||
readStringMeta(parsedSaved, "transport") === current?.transport &&
|
||||
readStringMeta(parsedSaved, "providerKey") === current?.providerKey &&
|
||||
readStringMeta(parsedSaved, "environmentId") === current?.environmentId &&
|
||||
readStringMeta(parsedSaved, "leaseId") === current?.leaseId &&
|
||||
readStringMeta(parsedSaved, "remoteCwd") === current?.remoteCwd &&
|
||||
readStringMeta(parsedSaved, "paperclipApiUrl") === (current?.paperclipApiUrl ?? null)
|
||||
);
|
||||
}
|
||||
|
||||
export function parseAdapterExecutionTarget(value: unknown): AdapterExecutionTarget | null {
|
||||
const parsed = parseObject(value);
|
||||
const kind = readStringMeta(parsed, "kind");
|
||||
|
||||
if (kind === "local") {
|
||||
return {
|
||||
kind: "local",
|
||||
environmentId: readStringMeta(parsed, "environmentId"),
|
||||
leaseId: readStringMeta(parsed, "leaseId"),
|
||||
};
|
||||
}
|
||||
|
||||
if (kind === "remote" && readStringMeta(parsed, "transport") === "ssh") {
|
||||
const spec = parseSshRemoteExecutionSpec(parseObject(parsed.spec));
|
||||
if (!spec) return null;
|
||||
return {
|
||||
kind: "remote",
|
||||
transport: "ssh",
|
||||
environmentId: readStringMeta(parsed, "environmentId"),
|
||||
leaseId: readStringMeta(parsed, "leaseId"),
|
||||
remoteCwd: spec.remoteCwd,
|
||||
paperclipApiUrl: readStringMeta(parsed, "paperclipApiUrl") ?? spec.paperclipApiUrl ?? null,
|
||||
spec,
|
||||
};
|
||||
}
|
||||
|
||||
if (kind === "remote" && readStringMeta(parsed, "transport") === "sandbox") {
|
||||
const remoteCwd = readStringMeta(parsed, "remoteCwd");
|
||||
if (!remoteCwd) return null;
|
||||
return {
|
||||
kind: "remote",
|
||||
transport: "sandbox",
|
||||
providerKey: readStringMeta(parsed, "providerKey"),
|
||||
environmentId: readStringMeta(parsed, "environmentId"),
|
||||
leaseId: readStringMeta(parsed, "leaseId"),
|
||||
remoteCwd,
|
||||
paperclipApiUrl: readStringMeta(parsed, "paperclipApiUrl"),
|
||||
timeoutMs: typeof parsed.timeoutMs === "number" ? parsed.timeoutMs : null,
|
||||
};
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
export function adapterExecutionTargetFromRemoteExecution(
|
||||
remoteExecution: unknown,
|
||||
metadata: Pick<AdapterLocalExecutionTarget, "environmentId" | "leaseId"> = {},
|
||||
): AdapterExecutionTarget | null {
|
||||
const parsed = parseObject(remoteExecution);
|
||||
const ssh = parseSshRemoteExecutionSpec(parsed);
|
||||
if (ssh) {
|
||||
return {
|
||||
kind: "remote",
|
||||
transport: "ssh",
|
||||
environmentId: metadata.environmentId ?? null,
|
||||
leaseId: metadata.leaseId ?? null,
|
||||
remoteCwd: ssh.remoteCwd,
|
||||
paperclipApiUrl: ssh.paperclipApiUrl ?? null,
|
||||
spec: ssh,
|
||||
};
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
export function readAdapterExecutionTarget(input: {
|
||||
executionTarget?: unknown;
|
||||
legacyRemoteExecution?: unknown;
|
||||
}): AdapterExecutionTarget | null {
|
||||
if (isAdapterExecutionTargetInstance(input.executionTarget)) {
|
||||
return input.executionTarget;
|
||||
}
|
||||
return (
|
||||
parseAdapterExecutionTarget(input.executionTarget) ??
|
||||
adapterExecutionTargetFromRemoteExecution(input.legacyRemoteExecution)
|
||||
);
|
||||
}
|
||||
|
||||
export async function prepareAdapterExecutionTargetRuntime(input: {
|
||||
target: AdapterExecutionTarget | null | undefined;
|
||||
adapterKey: string;
|
||||
workspaceLocalDir: string;
|
||||
workspaceExclude?: string[];
|
||||
preserveAbsentOnRestore?: string[];
|
||||
assets?: AdapterManagedRuntimeAsset[];
|
||||
installCommand?: string | null;
|
||||
}): Promise<PreparedAdapterExecutionTargetRuntime> {
|
||||
const target = input.target ?? { kind: "local" as const };
|
||||
if (target.kind === "local") {
|
||||
return {
|
||||
target,
|
||||
runtimeRootDir: null,
|
||||
assetDirs: {},
|
||||
restoreWorkspace: async () => {},
|
||||
};
|
||||
}
|
||||
|
||||
if (target.transport === "ssh") {
|
||||
const prepared = await prepareRemoteManagedRuntime({
|
||||
spec: target.spec,
|
||||
adapterKey: input.adapterKey,
|
||||
workspaceLocalDir: input.workspaceLocalDir,
|
||||
assets: input.assets,
|
||||
});
|
||||
return {
|
||||
target,
|
||||
runtimeRootDir: prepared.runtimeRootDir,
|
||||
assetDirs: prepared.assetDirs,
|
||||
restoreWorkspace: prepared.restoreWorkspace,
|
||||
};
|
||||
}
|
||||
|
||||
const prepared = await prepareCommandManagedRuntime({
|
||||
runner: requireSandboxRunner(target),
|
||||
spec: {
|
||||
providerKey: target.providerKey,
|
||||
leaseId: target.leaseId,
|
||||
remoteCwd: target.remoteCwd,
|
||||
timeoutMs: target.timeoutMs,
|
||||
paperclipApiUrl: target.paperclipApiUrl,
|
||||
},
|
||||
adapterKey: input.adapterKey,
|
||||
workspaceLocalDir: input.workspaceLocalDir,
|
||||
workspaceExclude: input.workspaceExclude,
|
||||
preserveAbsentOnRestore: input.preserveAbsentOnRestore,
|
||||
assets: input.assets,
|
||||
installCommand: input.installCommand,
|
||||
});
|
||||
return {
|
||||
target,
|
||||
runtimeRootDir: prepared.runtimeRootDir,
|
||||
assetDirs: prepared.assetDirs,
|
||||
restoreWorkspace: prepared.restoreWorkspace,
|
||||
};
|
||||
}
|
||||
|
||||
export function runtimeAssetDir(
|
||||
prepared: Pick<PreparedAdapterExecutionTargetRuntime, "assetDirs">,
|
||||
key: string,
|
||||
fallbackRemoteCwd: string,
|
||||
): string {
|
||||
return prepared.assetDirs[key] ?? path.posix.join(fallbackRemoteCwd, ".paperclip-runtime", key);
|
||||
}
|
||||
118
packages/adapter-utils/src/remote-managed-runtime.ts
Normal file
118
packages/adapter-utils/src/remote-managed-runtime.ts
Normal file
@@ -0,0 +1,118 @@
|
||||
import path from "node:path";
|
||||
import {
|
||||
type SshRemoteExecutionSpec,
|
||||
prepareWorkspaceForSshExecution,
|
||||
restoreWorkspaceFromSshExecution,
|
||||
syncDirectoryToSsh,
|
||||
} from "./ssh.js";
|
||||
|
||||
export interface RemoteManagedRuntimeAsset {
|
||||
key: string;
|
||||
localDir: string;
|
||||
followSymlinks?: boolean;
|
||||
exclude?: string[];
|
||||
}
|
||||
|
||||
export interface PreparedRemoteManagedRuntime {
|
||||
spec: SshRemoteExecutionSpec;
|
||||
workspaceLocalDir: string;
|
||||
workspaceRemoteDir: string;
|
||||
runtimeRootDir: string;
|
||||
assetDirs: Record<string, string>;
|
||||
restoreWorkspace(): Promise<void>;
|
||||
}
|
||||
|
||||
function asObject(value: unknown): Record<string, unknown> {
|
||||
return value && typeof value === "object" && !Array.isArray(value)
|
||||
? (value as Record<string, unknown>)
|
||||
: {};
|
||||
}
|
||||
|
||||
function asString(value: unknown): string {
|
||||
return typeof value === "string" ? value : "";
|
||||
}
|
||||
|
||||
function asNumber(value: unknown): number {
|
||||
return typeof value === "number" ? value : Number(value);
|
||||
}
|
||||
|
||||
export function buildRemoteExecutionSessionIdentity(spec: SshRemoteExecutionSpec | null) {
|
||||
if (!spec) return null;
|
||||
return {
|
||||
transport: "ssh",
|
||||
host: spec.host,
|
||||
port: spec.port,
|
||||
username: spec.username,
|
||||
remoteCwd: spec.remoteCwd,
|
||||
...(spec.paperclipApiUrl ? { paperclipApiUrl: spec.paperclipApiUrl } : {}),
|
||||
} as const;
|
||||
}
|
||||
|
||||
export function remoteExecutionSessionMatches(saved: unknown, current: SshRemoteExecutionSpec | null): boolean {
|
||||
const currentIdentity = buildRemoteExecutionSessionIdentity(current);
|
||||
if (!currentIdentity) return false;
|
||||
|
||||
const parsedSaved = asObject(saved);
|
||||
return (
|
||||
asString(parsedSaved.transport) === currentIdentity.transport &&
|
||||
asString(parsedSaved.host) === currentIdentity.host &&
|
||||
asNumber(parsedSaved.port) === currentIdentity.port &&
|
||||
asString(parsedSaved.username) === currentIdentity.username &&
|
||||
asString(parsedSaved.remoteCwd) === currentIdentity.remoteCwd &&
|
||||
asString(parsedSaved.paperclipApiUrl) === asString(currentIdentity.paperclipApiUrl)
|
||||
);
|
||||
}
|
||||
|
||||
export async function prepareRemoteManagedRuntime(input: {
|
||||
spec: SshRemoteExecutionSpec;
|
||||
adapterKey: string;
|
||||
workspaceLocalDir: string;
|
||||
workspaceRemoteDir?: string;
|
||||
assets?: RemoteManagedRuntimeAsset[];
|
||||
}): Promise<PreparedRemoteManagedRuntime> {
|
||||
const workspaceRemoteDir = input.workspaceRemoteDir ?? input.spec.remoteCwd;
|
||||
const runtimeRootDir = path.posix.join(workspaceRemoteDir, ".paperclip-runtime", input.adapterKey);
|
||||
|
||||
await prepareWorkspaceForSshExecution({
|
||||
spec: input.spec,
|
||||
localDir: input.workspaceLocalDir,
|
||||
remoteDir: workspaceRemoteDir,
|
||||
});
|
||||
|
||||
const assetDirs: Record<string, string> = {};
|
||||
try {
|
||||
for (const asset of input.assets ?? []) {
|
||||
const remoteDir = path.posix.join(runtimeRootDir, asset.key);
|
||||
assetDirs[asset.key] = remoteDir;
|
||||
await syncDirectoryToSsh({
|
||||
spec: input.spec,
|
||||
localDir: asset.localDir,
|
||||
remoteDir,
|
||||
followSymlinks: asset.followSymlinks,
|
||||
exclude: asset.exclude,
|
||||
});
|
||||
}
|
||||
} catch (error) {
|
||||
await restoreWorkspaceFromSshExecution({
|
||||
spec: input.spec,
|
||||
localDir: input.workspaceLocalDir,
|
||||
remoteDir: workspaceRemoteDir,
|
||||
});
|
||||
throw error;
|
||||
}
|
||||
|
||||
return {
|
||||
spec: input.spec,
|
||||
workspaceLocalDir: input.workspaceLocalDir,
|
||||
workspaceRemoteDir,
|
||||
runtimeRootDir,
|
||||
assetDirs,
|
||||
restoreWorkspace: async () => {
|
||||
await restoreWorkspaceFromSshExecution({
|
||||
spec: input.spec,
|
||||
localDir: input.workspaceLocalDir,
|
||||
remoteDir: workspaceRemoteDir,
|
||||
});
|
||||
},
|
||||
};
|
||||
}
|
||||
126
packages/adapter-utils/src/sandbox-managed-runtime.test.ts
Normal file
126
packages/adapter-utils/src/sandbox-managed-runtime.test.ts
Normal file
@@ -0,0 +1,126 @@
|
||||
import { lstat, mkdir, mkdtemp, readFile, rm, symlink, writeFile } from "node:fs/promises";
|
||||
import os from "node:os";
|
||||
import path from "node:path";
|
||||
import { execFile as execFileCallback } from "node:child_process";
|
||||
import { promisify } from "node:util";
|
||||
import { afterEach, describe, expect, it } from "vitest";
|
||||
|
||||
import {
|
||||
mirrorDirectory,
|
||||
prepareSandboxManagedRuntime,
|
||||
type SandboxManagedRuntimeClient,
|
||||
} from "./sandbox-managed-runtime.js";
|
||||
|
||||
const execFile = promisify(execFileCallback);
|
||||
|
||||
describe("sandbox managed runtime", () => {
|
||||
const cleanupDirs: string[] = [];
|
||||
|
||||
afterEach(async () => {
|
||||
while (cleanupDirs.length > 0) {
|
||||
const dir = cleanupDirs.pop();
|
||||
if (!dir) continue;
|
||||
await rm(dir, { recursive: true, force: true }).catch(() => undefined);
|
||||
}
|
||||
});
|
||||
|
||||
it("preserves excluded local workspace artifacts during restore mirroring", async () => {
|
||||
const rootDir = await mkdtemp(path.join(os.tmpdir(), "paperclip-sandbox-restore-"));
|
||||
cleanupDirs.push(rootDir);
|
||||
const sourceDir = path.join(rootDir, "source");
|
||||
const targetDir = path.join(rootDir, "target");
|
||||
await mkdir(path.join(sourceDir, "src"), { recursive: true });
|
||||
await mkdir(path.join(targetDir, ".claude"), { recursive: true });
|
||||
await mkdir(path.join(targetDir, ".paperclip-runtime"), { recursive: true });
|
||||
await writeFile(path.join(sourceDir, "src", "app.ts"), "export const value = 2;\n", "utf8");
|
||||
await writeFile(path.join(targetDir, "stale.txt"), "remove me\n", "utf8");
|
||||
await writeFile(path.join(targetDir, ".claude", "settings.json"), "{\"keep\":true}\n", "utf8");
|
||||
await writeFile(path.join(targetDir, ".claude.json"), "{\"keep\":true}\n", "utf8");
|
||||
await writeFile(path.join(targetDir, ".paperclip-runtime", "state.json"), "{}\n", "utf8");
|
||||
|
||||
await mirrorDirectory(sourceDir, targetDir, {
|
||||
preserveAbsent: [".paperclip-runtime", ".claude", ".claude.json"],
|
||||
});
|
||||
|
||||
await expect(readFile(path.join(targetDir, "src", "app.ts"), "utf8")).resolves.toBe("export const value = 2;\n");
|
||||
await expect(readFile(path.join(targetDir, ".claude", "settings.json"), "utf8")).resolves.toBe("{\"keep\":true}\n");
|
||||
await expect(readFile(path.join(targetDir, ".claude.json"), "utf8")).resolves.toBe("{\"keep\":true}\n");
|
||||
await expect(readFile(path.join(targetDir, ".paperclip-runtime", "state.json"), "utf8")).resolves.toBe("{}\n");
|
||||
await expect(readFile(path.join(targetDir, "stale.txt"), "utf8")).rejects.toMatchObject({ code: "ENOENT" });
|
||||
});
|
||||
|
||||
it("syncs workspace and assets through a provider-neutral sandbox client", async () => {
|
||||
const rootDir = await mkdtemp(path.join(os.tmpdir(), "paperclip-sandbox-managed-"));
|
||||
cleanupDirs.push(rootDir);
|
||||
const localWorkspaceDir = path.join(rootDir, "local-workspace");
|
||||
const remoteWorkspaceDir = path.join(rootDir, "remote-workspace");
|
||||
const localAssetsDir = path.join(rootDir, "local-assets");
|
||||
const linkedAssetPath = path.join(rootDir, "linked-skill.md");
|
||||
await mkdir(path.join(localWorkspaceDir, ".claude"), { recursive: true });
|
||||
await mkdir(localAssetsDir, { recursive: true });
|
||||
await writeFile(path.join(localWorkspaceDir, "README.md"), "local workspace\n", "utf8");
|
||||
await writeFile(path.join(localWorkspaceDir, "._README.md"), "appledouble\n", "utf8");
|
||||
await writeFile(path.join(localWorkspaceDir, ".claude", "settings.json"), "{\"local\":true}\n", "utf8");
|
||||
await writeFile(linkedAssetPath, "skill body\n", "utf8");
|
||||
await symlink(linkedAssetPath, path.join(localAssetsDir, "skill.md"));
|
||||
|
||||
const client: SandboxManagedRuntimeClient = {
|
||||
makeDir: async (remotePath) => {
|
||||
await mkdir(remotePath, { recursive: true });
|
||||
},
|
||||
writeFile: async (remotePath, bytes) => {
|
||||
await mkdir(path.dirname(remotePath), { recursive: true });
|
||||
await writeFile(remotePath, Buffer.from(bytes));
|
||||
},
|
||||
readFile: async (remotePath) => await readFile(remotePath),
|
||||
remove: async (remotePath) => {
|
||||
await rm(remotePath, { recursive: true, force: true });
|
||||
},
|
||||
run: async (command) => {
|
||||
await execFile("sh", ["-lc", command], {
|
||||
maxBuffer: 32 * 1024 * 1024,
|
||||
});
|
||||
},
|
||||
};
|
||||
|
||||
const prepared = await prepareSandboxManagedRuntime({
|
||||
spec: {
|
||||
transport: "sandbox",
|
||||
provider: "test",
|
||||
sandboxId: "sandbox-1",
|
||||
remoteCwd: remoteWorkspaceDir,
|
||||
timeoutMs: 30_000,
|
||||
apiKey: null,
|
||||
},
|
||||
adapterKey: "test-adapter",
|
||||
client,
|
||||
workspaceLocalDir: localWorkspaceDir,
|
||||
workspaceExclude: [".claude"],
|
||||
preserveAbsentOnRestore: [".claude"],
|
||||
assets: [{
|
||||
key: "skills",
|
||||
localDir: localAssetsDir,
|
||||
followSymlinks: true,
|
||||
}],
|
||||
});
|
||||
|
||||
await expect(readFile(path.join(remoteWorkspaceDir, "README.md"), "utf8")).resolves.toBe("local workspace\n");
|
||||
await expect(readFile(path.join(remoteWorkspaceDir, "._README.md"), "utf8")).rejects.toMatchObject({ code: "ENOENT" });
|
||||
await expect(readFile(path.join(remoteWorkspaceDir, ".claude", "settings.json"), "utf8")).rejects.toMatchObject({ code: "ENOENT" });
|
||||
await expect(readFile(path.join(prepared.assetDirs.skills, "skill.md"), "utf8")).resolves.toBe("skill body\n");
|
||||
expect((await lstat(path.join(prepared.assetDirs.skills, "skill.md"))).isFile()).toBe(true);
|
||||
|
||||
await writeFile(path.join(remoteWorkspaceDir, "README.md"), "remote workspace\n", "utf8");
|
||||
await writeFile(path.join(remoteWorkspaceDir, "remote-only.txt"), "sync back\n", "utf8");
|
||||
await mkdir(path.join(localWorkspaceDir, ".paperclip-runtime"), { recursive: true });
|
||||
await writeFile(path.join(localWorkspaceDir, ".paperclip-runtime", "state.json"), "{}\n", "utf8");
|
||||
await writeFile(path.join(localWorkspaceDir, "local-stale.txt"), "remove\n", "utf8");
|
||||
await prepared.restoreWorkspace();
|
||||
|
||||
await expect(readFile(path.join(localWorkspaceDir, "README.md"), "utf8")).resolves.toBe("remote workspace\n");
|
||||
await expect(readFile(path.join(localWorkspaceDir, "remote-only.txt"), "utf8")).resolves.toBe("sync back\n");
|
||||
await expect(readFile(path.join(localWorkspaceDir, "local-stale.txt"), "utf8")).rejects.toMatchObject({ code: "ENOENT" });
|
||||
await expect(readFile(path.join(localWorkspaceDir, ".claude", "settings.json"), "utf8")).resolves.toBe("{\"local\":true}\n");
|
||||
await expect(readFile(path.join(localWorkspaceDir, ".paperclip-runtime", "state.json"), "utf8")).resolves.toBe("{}\n");
|
||||
});
|
||||
});
|
||||
338
packages/adapter-utils/src/sandbox-managed-runtime.ts
Normal file
338
packages/adapter-utils/src/sandbox-managed-runtime.ts
Normal file
@@ -0,0 +1,338 @@
|
||||
import { execFile as execFileCallback } from "node:child_process";
|
||||
import { constants as fsConstants, promises as fs } from "node:fs";
|
||||
import os from "node:os";
|
||||
import path from "node:path";
|
||||
import { promisify } from "node:util";
|
||||
|
||||
const execFile = promisify(execFileCallback);
|
||||
|
||||
export interface SandboxRemoteExecutionSpec {
|
||||
transport: "sandbox";
|
||||
provider: string;
|
||||
sandboxId: string;
|
||||
remoteCwd: string;
|
||||
timeoutMs: number;
|
||||
apiKey: string | null;
|
||||
paperclipApiUrl?: string | null;
|
||||
}
|
||||
|
||||
export interface SandboxManagedRuntimeAsset {
|
||||
key: string;
|
||||
localDir: string;
|
||||
followSymlinks?: boolean;
|
||||
exclude?: string[];
|
||||
}
|
||||
|
||||
export interface SandboxManagedRuntimeClient {
|
||||
makeDir(remotePath: string): Promise<void>;
|
||||
writeFile(remotePath: string, bytes: ArrayBuffer): Promise<void>;
|
||||
readFile(remotePath: string): Promise<Buffer | Uint8Array | ArrayBuffer>;
|
||||
remove(remotePath: string): Promise<void>;
|
||||
run(command: string, options: { timeoutMs: number }): Promise<void>;
|
||||
}
|
||||
|
||||
export interface PreparedSandboxManagedRuntime {
|
||||
spec: SandboxRemoteExecutionSpec;
|
||||
workspaceLocalDir: string;
|
||||
workspaceRemoteDir: string;
|
||||
runtimeRootDir: string;
|
||||
assetDirs: Record<string, string>;
|
||||
restoreWorkspace(): Promise<void>;
|
||||
}
|
||||
|
||||
function asObject(value: unknown): Record<string, unknown> {
|
||||
return value && typeof value === "object" && !Array.isArray(value)
|
||||
? (value as Record<string, unknown>)
|
||||
: {};
|
||||
}
|
||||
|
||||
function asString(value: unknown): string {
|
||||
return typeof value === "string" ? value : "";
|
||||
}
|
||||
|
||||
function asNumber(value: unknown): number {
|
||||
return typeof value === "number" ? value : Number(value);
|
||||
}
|
||||
|
||||
function shellQuote(value: string) {
|
||||
return `'${value.replace(/'/g, `'\"'\"'`)}'`;
|
||||
}
|
||||
|
||||
export function parseSandboxRemoteExecutionSpec(value: unknown): SandboxRemoteExecutionSpec | null {
|
||||
const parsed = asObject(value);
|
||||
const transport = asString(parsed.transport).trim();
|
||||
const provider = asString(parsed.provider).trim();
|
||||
const sandboxId = asString(parsed.sandboxId).trim();
|
||||
const remoteCwd = asString(parsed.remoteCwd).trim();
|
||||
const timeoutMs = asNumber(parsed.timeoutMs);
|
||||
|
||||
if (
|
||||
transport !== "sandbox" ||
|
||||
provider.length === 0 ||
|
||||
sandboxId.length === 0 ||
|
||||
remoteCwd.length === 0 ||
|
||||
!Number.isFinite(timeoutMs) ||
|
||||
timeoutMs <= 0
|
||||
) {
|
||||
return null;
|
||||
}
|
||||
|
||||
return {
|
||||
transport: "sandbox",
|
||||
provider,
|
||||
sandboxId,
|
||||
remoteCwd,
|
||||
timeoutMs,
|
||||
apiKey: asString(parsed.apiKey).trim() || null,
|
||||
paperclipApiUrl: asString(parsed.paperclipApiUrl).trim() || null,
|
||||
};
|
||||
}
|
||||
|
||||
export function buildSandboxExecutionSessionIdentity(spec: SandboxRemoteExecutionSpec | null) {
|
||||
if (!spec) return null;
|
||||
return {
|
||||
transport: "sandbox",
|
||||
provider: spec.provider,
|
||||
sandboxId: spec.sandboxId,
|
||||
remoteCwd: spec.remoteCwd,
|
||||
...(spec.paperclipApiUrl ? { paperclipApiUrl: spec.paperclipApiUrl } : {}),
|
||||
} as const;
|
||||
}
|
||||
|
||||
export function sandboxExecutionSessionMatches(saved: unknown, current: SandboxRemoteExecutionSpec | null): boolean {
|
||||
const currentIdentity = buildSandboxExecutionSessionIdentity(current);
|
||||
if (!currentIdentity) return false;
|
||||
const parsedSaved = asObject(saved);
|
||||
return (
|
||||
asString(parsedSaved.transport) === currentIdentity.transport &&
|
||||
asString(parsedSaved.provider) === currentIdentity.provider &&
|
||||
asString(parsedSaved.sandboxId) === currentIdentity.sandboxId &&
|
||||
asString(parsedSaved.remoteCwd) === currentIdentity.remoteCwd &&
|
||||
asString(parsedSaved.paperclipApiUrl) === asString(currentIdentity.paperclipApiUrl)
|
||||
);
|
||||
}
|
||||
|
||||
async function withTempDir<T>(prefix: string, fn: (dir: string) => Promise<T>): Promise<T> {
|
||||
const dir = await fs.mkdtemp(path.join(os.tmpdir(), prefix));
|
||||
try {
|
||||
return await fn(dir);
|
||||
} finally {
|
||||
await fs.rm(dir, { recursive: true, force: true }).catch(() => undefined);
|
||||
}
|
||||
}
|
||||
|
||||
async function execTar(args: string[]): Promise<void> {
|
||||
await execFile("tar", args, {
|
||||
env: {
|
||||
...process.env,
|
||||
COPYFILE_DISABLE: "1",
|
||||
},
|
||||
maxBuffer: 32 * 1024 * 1024,
|
||||
});
|
||||
}
|
||||
|
||||
async function createTarballFromDirectory(input: {
|
||||
localDir: string;
|
||||
archivePath: string;
|
||||
exclude?: string[];
|
||||
followSymlinks?: boolean;
|
||||
}): Promise<void> {
|
||||
const excludeArgs = ["._*", ...(input.exclude ?? [])].flatMap((entry) => ["--exclude", entry]);
|
||||
await execTar([
|
||||
"-c",
|
||||
...(input.followSymlinks ? ["-h"] : []),
|
||||
"-f",
|
||||
input.archivePath,
|
||||
"-C",
|
||||
input.localDir,
|
||||
...excludeArgs,
|
||||
".",
|
||||
]);
|
||||
}
|
||||
|
||||
async function extractTarballToDirectory(input: {
|
||||
archivePath: string;
|
||||
localDir: string;
|
||||
}): Promise<void> {
|
||||
await fs.mkdir(input.localDir, { recursive: true });
|
||||
await execTar(["-xf", input.archivePath, "-C", input.localDir]);
|
||||
}
|
||||
|
||||
async function walkDirectory(root: string, relative = ""): Promise<string[]> {
|
||||
const current = path.join(root, relative);
|
||||
const entries = await fs.readdir(current, { withFileTypes: true }).catch(() => []);
|
||||
const out: string[] = [];
|
||||
for (const entry of entries) {
|
||||
const nextRelative = relative ? path.posix.join(relative, entry.name) : entry.name;
|
||||
out.push(nextRelative);
|
||||
if (entry.isDirectory()) {
|
||||
out.push(...(await walkDirectory(root, nextRelative)));
|
||||
}
|
||||
}
|
||||
return out.sort((left, right) => right.length - left.length);
|
||||
}
|
||||
|
||||
function isRelativePathOrDescendant(relative: string, candidate: string): boolean {
|
||||
return relative === candidate || relative.startsWith(`${candidate}/`);
|
||||
}
|
||||
|
||||
export async function mirrorDirectory(
|
||||
sourceDir: string,
|
||||
targetDir: string,
|
||||
options: { preserveAbsent?: string[] } = {},
|
||||
): Promise<void> {
|
||||
await fs.mkdir(targetDir, { recursive: true });
|
||||
const preserveAbsent = new Set(options.preserveAbsent ?? []);
|
||||
const shouldPreserveAbsent = (relative: string) =>
|
||||
[...preserveAbsent].some((candidate) => isRelativePathOrDescendant(relative, candidate));
|
||||
|
||||
const sourceEntries = new Set(await walkDirectory(sourceDir));
|
||||
const targetEntries = await walkDirectory(targetDir);
|
||||
for (const relative of targetEntries) {
|
||||
if (shouldPreserveAbsent(relative)) continue;
|
||||
if (!sourceEntries.has(relative)) {
|
||||
await fs.rm(path.join(targetDir, relative), { recursive: true, force: true }).catch(() => undefined);
|
||||
}
|
||||
}
|
||||
|
||||
const copyEntry = async (relative: string) => {
|
||||
const sourcePath = path.join(sourceDir, relative);
|
||||
const targetPath = path.join(targetDir, relative);
|
||||
const stats = await fs.lstat(sourcePath);
|
||||
|
||||
if (stats.isDirectory()) {
|
||||
await fs.mkdir(targetPath, { recursive: true });
|
||||
return;
|
||||
}
|
||||
|
||||
await fs.mkdir(path.dirname(targetPath), { recursive: true });
|
||||
await fs.rm(targetPath, { recursive: true, force: true }).catch(() => undefined);
|
||||
if (stats.isSymbolicLink()) {
|
||||
const linkTarget = await fs.readlink(sourcePath);
|
||||
await fs.symlink(linkTarget, targetPath);
|
||||
return;
|
||||
}
|
||||
|
||||
await fs.copyFile(sourcePath, targetPath, fsConstants.COPYFILE_FICLONE).catch(async () => {
|
||||
await fs.copyFile(sourcePath, targetPath);
|
||||
});
|
||||
await fs.chmod(targetPath, stats.mode);
|
||||
};
|
||||
|
||||
const entries = (await walkDirectory(sourceDir)).sort((left, right) => left.localeCompare(right));
|
||||
for (const relative of entries) {
|
||||
await copyEntry(relative);
|
||||
}
|
||||
}
|
||||
|
||||
function toArrayBuffer(bytes: Buffer): ArrayBuffer {
|
||||
return Uint8Array.from(bytes).buffer;
|
||||
}
|
||||
|
||||
function toBuffer(bytes: Buffer | Uint8Array | ArrayBuffer): Buffer {
|
||||
if (Buffer.isBuffer(bytes)) return bytes;
|
||||
if (bytes instanceof ArrayBuffer) return Buffer.from(bytes);
|
||||
return Buffer.from(bytes.buffer, bytes.byteOffset, bytes.byteLength);
|
||||
}
|
||||
|
||||
function tarExcludeFlags(exclude: string[] | undefined): string {
|
||||
return ["._*", ...(exclude ?? [])].map((entry) => `--exclude ${shellQuote(entry)}`).join(" ");
|
||||
}
|
||||
|
||||
export async function prepareSandboxManagedRuntime(input: {
|
||||
spec: SandboxRemoteExecutionSpec;
|
||||
adapterKey: string;
|
||||
client: SandboxManagedRuntimeClient;
|
||||
workspaceLocalDir: string;
|
||||
workspaceRemoteDir?: string;
|
||||
workspaceExclude?: string[];
|
||||
preserveAbsentOnRestore?: string[];
|
||||
assets?: SandboxManagedRuntimeAsset[];
|
||||
}): Promise<PreparedSandboxManagedRuntime> {
|
||||
const workspaceRemoteDir = input.workspaceRemoteDir ?? input.spec.remoteCwd;
|
||||
const runtimeRootDir = path.posix.join(workspaceRemoteDir, ".paperclip-runtime", input.adapterKey);
|
||||
|
||||
await withTempDir("paperclip-sandbox-sync-", async (tempDir) => {
|
||||
const workspaceTarPath = path.join(tempDir, "workspace.tar");
|
||||
await createTarballFromDirectory({
|
||||
localDir: input.workspaceLocalDir,
|
||||
archivePath: workspaceTarPath,
|
||||
exclude: input.workspaceExclude,
|
||||
});
|
||||
const workspaceTarBytes = await fs.readFile(workspaceTarPath);
|
||||
const remoteWorkspaceTar = path.posix.join(runtimeRootDir, "workspace-upload.tar");
|
||||
await input.client.makeDir(runtimeRootDir);
|
||||
await input.client.writeFile(remoteWorkspaceTar, toArrayBuffer(workspaceTarBytes));
|
||||
const preservedNames = new Set([".paperclip-runtime", ...(input.preserveAbsentOnRestore ?? [])]);
|
||||
const findPreserveArgs = [...preservedNames].map((entry) => `! -name ${shellQuote(entry)}`).join(" ");
|
||||
await input.client.run(
|
||||
`sh -lc ${shellQuote(
|
||||
`mkdir -p ${shellQuote(workspaceRemoteDir)} && ` +
|
||||
`find ${shellQuote(workspaceRemoteDir)} -mindepth 1 -maxdepth 1 ${findPreserveArgs} -exec rm -rf -- {} + && ` +
|
||||
`tar -xf ${shellQuote(remoteWorkspaceTar)} -C ${shellQuote(workspaceRemoteDir)} && ` +
|
||||
`rm -f ${shellQuote(remoteWorkspaceTar)}`,
|
||||
)}`,
|
||||
{ timeoutMs: input.spec.timeoutMs },
|
||||
);
|
||||
|
||||
for (const asset of input.assets ?? []) {
|
||||
const assetTarPath = path.join(tempDir, `${asset.key}.tar`);
|
||||
await createTarballFromDirectory({
|
||||
localDir: asset.localDir,
|
||||
archivePath: assetTarPath,
|
||||
followSymlinks: asset.followSymlinks,
|
||||
exclude: asset.exclude,
|
||||
});
|
||||
const assetTarBytes = await fs.readFile(assetTarPath);
|
||||
const remoteAssetDir = path.posix.join(runtimeRootDir, asset.key);
|
||||
const remoteAssetTar = path.posix.join(runtimeRootDir, `${asset.key}-upload.tar`);
|
||||
await input.client.writeFile(remoteAssetTar, toArrayBuffer(assetTarBytes));
|
||||
await input.client.run(
|
||||
`sh -lc ${shellQuote(
|
||||
`rm -rf ${shellQuote(remoteAssetDir)} && ` +
|
||||
`mkdir -p ${shellQuote(remoteAssetDir)} && ` +
|
||||
`tar -xf ${shellQuote(remoteAssetTar)} -C ${shellQuote(remoteAssetDir)} && ` +
|
||||
`rm -f ${shellQuote(remoteAssetTar)}`,
|
||||
)}`,
|
||||
{ timeoutMs: input.spec.timeoutMs },
|
||||
);
|
||||
}
|
||||
});
|
||||
|
||||
const assetDirs = Object.fromEntries(
|
||||
(input.assets ?? []).map((asset) => [asset.key, path.posix.join(runtimeRootDir, asset.key)]),
|
||||
);
|
||||
|
||||
return {
|
||||
spec: input.spec,
|
||||
workspaceLocalDir: input.workspaceLocalDir,
|
||||
workspaceRemoteDir,
|
||||
runtimeRootDir,
|
||||
assetDirs,
|
||||
restoreWorkspace: async () => {
|
||||
await withTempDir("paperclip-sandbox-restore-", async (tempDir) => {
|
||||
const remoteWorkspaceTar = path.posix.join(runtimeRootDir, "workspace-download.tar");
|
||||
await input.client.run(
|
||||
`sh -lc ${shellQuote(
|
||||
`mkdir -p ${shellQuote(runtimeRootDir)} && ` +
|
||||
`tar -cf ${shellQuote(remoteWorkspaceTar)} -C ${shellQuote(workspaceRemoteDir)} ` +
|
||||
`${tarExcludeFlags(input.workspaceExclude)} .`,
|
||||
)}`,
|
||||
{ timeoutMs: input.spec.timeoutMs },
|
||||
);
|
||||
const archiveBytes = await input.client.readFile(remoteWorkspaceTar);
|
||||
await input.client.remove(remoteWorkspaceTar).catch(() => undefined);
|
||||
const localArchivePath = path.join(tempDir, "workspace.tar");
|
||||
const extractedDir = path.join(tempDir, "workspace");
|
||||
await fs.writeFile(localArchivePath, toBuffer(archiveBytes));
|
||||
await extractTarballToDirectory({
|
||||
archivePath: localArchivePath,
|
||||
localDir: extractedDir,
|
||||
});
|
||||
await mirrorDirectory(extractedDir, input.workspaceLocalDir, {
|
||||
preserveAbsent: [".paperclip-runtime", ...(input.preserveAbsentOnRestore ?? [])],
|
||||
});
|
||||
});
|
||||
},
|
||||
};
|
||||
}
|
||||
@@ -1,6 +1,14 @@
|
||||
import { randomUUID } from "node:crypto";
|
||||
import { describe, expect, it } from "vitest";
|
||||
import { runChildProcess } from "./server-utils.js";
|
||||
import {
|
||||
applyPaperclipWorkspaceEnv,
|
||||
appendWithByteCap,
|
||||
DEFAULT_PAPERCLIP_AGENT_PROMPT_TEMPLATE,
|
||||
renderPaperclipWakePrompt,
|
||||
runningProcesses,
|
||||
runChildProcess,
|
||||
stringifyPaperclipWakePayload,
|
||||
} from "./server-utils.js";
|
||||
|
||||
function isPidAlive(pid: number) {
|
||||
try {
|
||||
@@ -20,7 +28,37 @@ async function waitForPidExit(pid: number, timeoutMs = 2_000) {
|
||||
return !isPidAlive(pid);
|
||||
}
|
||||
|
||||
async function waitForTextMatch(read: () => string, pattern: RegExp, timeoutMs = 1_000) {
|
||||
const deadline = Date.now() + timeoutMs;
|
||||
while (Date.now() < deadline) {
|
||||
const value = read();
|
||||
const match = value.match(pattern);
|
||||
if (match) return match;
|
||||
await new Promise((resolve) => setTimeout(resolve, 25));
|
||||
}
|
||||
return read().match(pattern);
|
||||
}
|
||||
|
||||
describe("runChildProcess", () => {
|
||||
it("does not arm a timeout when timeoutSec is 0", async () => {
|
||||
const result = await runChildProcess(
|
||||
randomUUID(),
|
||||
process.execPath,
|
||||
["-e", "setTimeout(() => process.stdout.write('done'), 150);"],
|
||||
{
|
||||
cwd: process.cwd(),
|
||||
env: {},
|
||||
timeoutSec: 0,
|
||||
graceSec: 1,
|
||||
onLog: async () => {},
|
||||
},
|
||||
);
|
||||
|
||||
expect(result.exitCode).toBe(0);
|
||||
expect(result.timedOut).toBe(false);
|
||||
expect(result.stdout).toBe("done");
|
||||
});
|
||||
|
||||
it("waits for onSpawn before sending stdin to the child", async () => {
|
||||
const spawnDelayMs = 150;
|
||||
const startedAt = Date.now();
|
||||
@@ -85,4 +123,359 @@ describe("runChildProcess", () => {
|
||||
|
||||
expect(await waitForPidExit(descendantPid!, 2_000)).toBe(true);
|
||||
});
|
||||
|
||||
it.skipIf(process.platform === "win32")("cleans up a lingering process group after terminal output and child exit", async () => {
|
||||
const result = await runChildProcess(
|
||||
randomUUID(),
|
||||
process.execPath,
|
||||
[
|
||||
"-e",
|
||||
[
|
||||
"const { spawn } = require('node:child_process');",
|
||||
"const child = spawn(process.execPath, ['-e', 'setInterval(() => {}, 1000)'], { stdio: ['ignore', 'inherit', 'ignore'] });",
|
||||
"process.stdout.write(`descendant:${child.pid}\\n`);",
|
||||
"process.stdout.write(`${JSON.stringify({ type: 'result', result: 'done' })}\\n`);",
|
||||
"setTimeout(() => process.exit(0), 25);",
|
||||
].join(" "),
|
||||
],
|
||||
{
|
||||
cwd: process.cwd(),
|
||||
env: {},
|
||||
timeoutSec: 0,
|
||||
graceSec: 1,
|
||||
onLog: async () => {},
|
||||
terminalResultCleanup: {
|
||||
graceMs: 100,
|
||||
hasTerminalResult: ({ stdout }) => stdout.includes('"type":"result"'),
|
||||
},
|
||||
},
|
||||
);
|
||||
|
||||
const descendantPid = Number.parseInt(result.stdout.match(/descendant:(\d+)/)?.[1] ?? "", 10);
|
||||
expect(result.timedOut).toBe(false);
|
||||
expect(result.exitCode).toBe(0);
|
||||
expect(Number.isInteger(descendantPid) && descendantPid > 0).toBe(true);
|
||||
expect(await waitForPidExit(descendantPid, 2_000)).toBe(true);
|
||||
});
|
||||
|
||||
it.skipIf(process.platform === "win32")("cleans up a still-running child after terminal output", async () => {
|
||||
const result = await runChildProcess(
|
||||
randomUUID(),
|
||||
process.execPath,
|
||||
[
|
||||
"-e",
|
||||
[
|
||||
"process.stdout.write(`${JSON.stringify({ type: 'result', result: 'done' })}\\n`);",
|
||||
"setInterval(() => {}, 1000);",
|
||||
].join(" "),
|
||||
],
|
||||
{
|
||||
cwd: process.cwd(),
|
||||
env: {},
|
||||
timeoutSec: 0,
|
||||
graceSec: 1,
|
||||
onLog: async () => {},
|
||||
terminalResultCleanup: {
|
||||
graceMs: 100,
|
||||
hasTerminalResult: ({ stdout }) => stdout.includes('"type":"result"'),
|
||||
},
|
||||
},
|
||||
);
|
||||
|
||||
expect(result.timedOut).toBe(false);
|
||||
expect(result.signal).toBe("SIGTERM");
|
||||
expect(result.stdout).toContain('"type":"result"');
|
||||
});
|
||||
|
||||
it.skipIf(process.platform === "win32")("does not clean up noisy runs that have no terminal output", async () => {
|
||||
const runId = randomUUID();
|
||||
let observed = "";
|
||||
const resultPromise = runChildProcess(
|
||||
runId,
|
||||
process.execPath,
|
||||
[
|
||||
"-e",
|
||||
[
|
||||
"const { spawn } = require('node:child_process');",
|
||||
"const child = spawn(process.execPath, ['-e', \"setInterval(() => process.stdout.write('noise\\\\n'), 50)\"], { stdio: ['ignore', 'inherit', 'ignore'] });",
|
||||
"process.stdout.write(`descendant:${child.pid}\\n`);",
|
||||
"setTimeout(() => process.exit(0), 25);",
|
||||
].join(" "),
|
||||
],
|
||||
{
|
||||
cwd: process.cwd(),
|
||||
env: {},
|
||||
timeoutSec: 0,
|
||||
graceSec: 1,
|
||||
onLog: async (_stream, chunk) => {
|
||||
observed += chunk;
|
||||
},
|
||||
terminalResultCleanup: {
|
||||
graceMs: 50,
|
||||
hasTerminalResult: ({ stdout }) => stdout.includes('"type":"result"'),
|
||||
},
|
||||
},
|
||||
);
|
||||
|
||||
const pidMatch = await waitForTextMatch(() => observed, /descendant:(\d+)/);
|
||||
const descendantPid = Number.parseInt(pidMatch?.[1] ?? "", 10);
|
||||
expect(Number.isInteger(descendantPid) && descendantPid > 0).toBe(true);
|
||||
|
||||
const race = await Promise.race([
|
||||
resultPromise.then(() => "settled" as const),
|
||||
new Promise<"pending">((resolve) => setTimeout(() => resolve("pending"), 300)),
|
||||
]);
|
||||
expect(race).toBe("pending");
|
||||
expect(isPidAlive(descendantPid)).toBe(true);
|
||||
|
||||
const running = runningProcesses.get(runId) as
|
||||
| { child: { kill(signal: NodeJS.Signals): boolean }; processGroupId: number | null }
|
||||
| undefined;
|
||||
try {
|
||||
if (running?.processGroupId) {
|
||||
process.kill(-running.processGroupId, "SIGKILL");
|
||||
} else {
|
||||
running?.child.kill("SIGKILL");
|
||||
}
|
||||
await resultPromise;
|
||||
} finally {
|
||||
runningProcesses.delete(runId);
|
||||
if (isPidAlive(descendantPid)) {
|
||||
try {
|
||||
process.kill(descendantPid, "SIGKILL");
|
||||
} catch {
|
||||
// Ignore cleanup races.
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
describe("renderPaperclipWakePrompt", () => {
|
||||
it("keeps the default local-agent prompt action-oriented", () => {
|
||||
expect(DEFAULT_PAPERCLIP_AGENT_PROMPT_TEMPLATE).toContain("Start actionable work in this heartbeat");
|
||||
expect(DEFAULT_PAPERCLIP_AGENT_PROMPT_TEMPLATE).toContain("do not stop at a plan");
|
||||
expect(DEFAULT_PAPERCLIP_AGENT_PROMPT_TEMPLATE).toContain("Prefer the smallest verification that proves the change");
|
||||
expect(DEFAULT_PAPERCLIP_AGENT_PROMPT_TEMPLATE).toContain("Use child issues");
|
||||
expect(DEFAULT_PAPERCLIP_AGENT_PROMPT_TEMPLATE).toContain("instead of polling agents, sessions, or processes");
|
||||
expect(DEFAULT_PAPERCLIP_AGENT_PROMPT_TEMPLATE).toContain("Create child issues directly when you know what needs to be done");
|
||||
expect(DEFAULT_PAPERCLIP_AGENT_PROMPT_TEMPLATE).toContain("POST /api/issues/{issueId}/interactions");
|
||||
expect(DEFAULT_PAPERCLIP_AGENT_PROMPT_TEMPLATE).toContain("kind suggest_tasks, ask_user_questions, or request_confirmation");
|
||||
expect(DEFAULT_PAPERCLIP_AGENT_PROMPT_TEMPLATE).toContain("confirmation:{issueId}:plan:{revisionId}");
|
||||
expect(DEFAULT_PAPERCLIP_AGENT_PROMPT_TEMPLATE).toContain("Wait for acceptance before creating implementation subtasks");
|
||||
expect(DEFAULT_PAPERCLIP_AGENT_PROMPT_TEMPLATE).toContain(
|
||||
"Respect budget, pause/cancel, approval gates, and company boundaries",
|
||||
);
|
||||
});
|
||||
|
||||
it("adds the execution contract to scoped wake prompts", () => {
|
||||
const prompt = renderPaperclipWakePrompt({
|
||||
reason: "issue_assigned",
|
||||
issue: {
|
||||
id: "issue-1",
|
||||
identifier: "PAP-1580",
|
||||
title: "Update prompts",
|
||||
status: "in_progress",
|
||||
},
|
||||
commentWindow: {
|
||||
requestedCount: 0,
|
||||
includedCount: 0,
|
||||
missingCount: 0,
|
||||
},
|
||||
comments: [],
|
||||
fallbackFetchNeeded: false,
|
||||
});
|
||||
|
||||
expect(prompt).toContain("## Paperclip Wake Payload");
|
||||
expect(prompt).toContain("Execution contract: take concrete action in this heartbeat");
|
||||
expect(prompt).toContain("use child issues instead of polling");
|
||||
expect(prompt).toContain("mark blocked work with the unblock owner/action");
|
||||
});
|
||||
|
||||
it("renders dependency-blocked interaction guidance", () => {
|
||||
const prompt = renderPaperclipWakePrompt({
|
||||
reason: "issue_commented",
|
||||
issue: {
|
||||
id: "issue-1",
|
||||
identifier: "PAP-1703",
|
||||
title: "Blocked parent",
|
||||
status: "todo",
|
||||
},
|
||||
dependencyBlockedInteraction: true,
|
||||
unresolvedBlockerIssueIds: ["blocker-1"],
|
||||
unresolvedBlockerSummaries: [
|
||||
{
|
||||
id: "blocker-1",
|
||||
identifier: "PAP-1723",
|
||||
title: "Finish blocker",
|
||||
status: "todo",
|
||||
priority: "medium",
|
||||
},
|
||||
],
|
||||
commentWindow: {
|
||||
requestedCount: 1,
|
||||
includedCount: 1,
|
||||
missingCount: 0,
|
||||
},
|
||||
commentIds: ["comment-1"],
|
||||
latestCommentId: "comment-1",
|
||||
comments: [{ id: "comment-1", body: "hello" }],
|
||||
fallbackFetchNeeded: false,
|
||||
});
|
||||
|
||||
expect(prompt).toContain("dependency-blocked interaction: yes");
|
||||
expect(prompt).toContain("respond or triage the human comment");
|
||||
expect(prompt).toContain("PAP-1723 Finish blocker (todo)");
|
||||
});
|
||||
|
||||
it("renders loose review request instructions for execution handoffs", () => {
|
||||
const prompt = renderPaperclipWakePrompt({
|
||||
reason: "execution_review_requested",
|
||||
issue: {
|
||||
id: "issue-1",
|
||||
identifier: "PAP-2011",
|
||||
title: "Review request handoff",
|
||||
status: "in_review",
|
||||
},
|
||||
executionStage: {
|
||||
wakeRole: "reviewer",
|
||||
stageId: "stage-1",
|
||||
stageType: "review",
|
||||
currentParticipant: { type: "agent", agentId: "agent-1" },
|
||||
returnAssignee: { type: "agent", agentId: "agent-2" },
|
||||
reviewRequest: {
|
||||
instructions: "Please focus on edge cases and leave a short risk summary.",
|
||||
},
|
||||
allowedActions: ["approve", "request_changes"],
|
||||
},
|
||||
fallbackFetchNeeded: false,
|
||||
});
|
||||
|
||||
expect(prompt).toContain("Review request instructions:");
|
||||
expect(prompt).toContain("Please focus on edge cases and leave a short risk summary.");
|
||||
expect(prompt).toContain("You are waking as the active reviewer for this issue.");
|
||||
});
|
||||
|
||||
it("includes continuation and child issue summaries in structured wake context", () => {
|
||||
const payload = {
|
||||
reason: "issue_children_completed",
|
||||
issue: {
|
||||
id: "parent-1",
|
||||
identifier: "PAP-100",
|
||||
title: "Integrate child work",
|
||||
status: "in_progress",
|
||||
priority: "medium",
|
||||
},
|
||||
continuationSummary: {
|
||||
key: "continuation-summary",
|
||||
title: "Continuation Summary",
|
||||
body: "# Continuation Summary\n\n## Next Action\n\n- Integrate child outputs.",
|
||||
updatedAt: "2026-04-18T12:00:00.000Z",
|
||||
},
|
||||
livenessContinuation: {
|
||||
attempt: 2,
|
||||
maxAttempts: 2,
|
||||
sourceRunId: "run-1",
|
||||
state: "plan_only",
|
||||
reason: "Run described future work without concrete action evidence",
|
||||
instruction: "Take the first concrete action now.",
|
||||
},
|
||||
childIssueSummaries: [
|
||||
{
|
||||
id: "child-1",
|
||||
identifier: "PAP-101",
|
||||
title: "Implement helper",
|
||||
status: "done",
|
||||
priority: "medium",
|
||||
summary: "Added the helper route and tests.",
|
||||
},
|
||||
],
|
||||
};
|
||||
|
||||
expect(JSON.parse(stringifyPaperclipWakePayload(payload) ?? "{}")).toMatchObject({
|
||||
continuationSummary: {
|
||||
body: expect.stringContaining("Continuation Summary"),
|
||||
},
|
||||
livenessContinuation: {
|
||||
attempt: 2,
|
||||
maxAttempts: 2,
|
||||
sourceRunId: "run-1",
|
||||
state: "plan_only",
|
||||
instruction: "Take the first concrete action now.",
|
||||
},
|
||||
childIssueSummaries: [
|
||||
{
|
||||
identifier: "PAP-101",
|
||||
summary: "Added the helper route and tests.",
|
||||
},
|
||||
],
|
||||
});
|
||||
|
||||
const prompt = renderPaperclipWakePrompt(payload);
|
||||
expect(prompt).toContain("Issue continuation summary:");
|
||||
expect(prompt).toContain("Integrate child outputs.");
|
||||
expect(prompt).toContain("Run liveness continuation:");
|
||||
expect(prompt).toContain("- attempt: 2/2");
|
||||
expect(prompt).toContain("- source run: run-1");
|
||||
expect(prompt).toContain("- liveness state: plan_only");
|
||||
expect(prompt).toContain("- reason: Run described future work without concrete action evidence");
|
||||
expect(prompt).toContain("- instruction: Take the first concrete action now.");
|
||||
expect(prompt).toContain("Direct child issue summaries:");
|
||||
expect(prompt).toContain("PAP-101 Implement helper (done)");
|
||||
expect(prompt).toContain("Added the helper route and tests.");
|
||||
});
|
||||
});
|
||||
|
||||
describe("applyPaperclipWorkspaceEnv", () => {
|
||||
it("adds shared workspace env vars including AGENT_HOME", () => {
|
||||
const env = applyPaperclipWorkspaceEnv(
|
||||
{},
|
||||
{
|
||||
workspaceCwd: "/tmp/workspace",
|
||||
workspaceSource: "project_primary",
|
||||
workspaceStrategy: "git_worktree",
|
||||
workspaceId: "workspace-1",
|
||||
workspaceRepoUrl: "https://github.com/paperclipai/paperclip.git",
|
||||
workspaceRepoRef: "main",
|
||||
workspaceBranch: "feature/test",
|
||||
workspaceWorktreePath: "/tmp/worktree",
|
||||
agentHome: "/tmp/agent-home",
|
||||
},
|
||||
);
|
||||
|
||||
expect(env).toEqual({
|
||||
PAPERCLIP_WORKSPACE_CWD: "/tmp/workspace",
|
||||
PAPERCLIP_WORKSPACE_SOURCE: "project_primary",
|
||||
PAPERCLIP_WORKSPACE_STRATEGY: "git_worktree",
|
||||
PAPERCLIP_WORKSPACE_ID: "workspace-1",
|
||||
PAPERCLIP_WORKSPACE_REPO_URL: "https://github.com/paperclipai/paperclip.git",
|
||||
PAPERCLIP_WORKSPACE_REPO_REF: "main",
|
||||
PAPERCLIP_WORKSPACE_BRANCH: "feature/test",
|
||||
PAPERCLIP_WORKSPACE_WORKTREE_PATH: "/tmp/worktree",
|
||||
AGENT_HOME: "/tmp/agent-home",
|
||||
});
|
||||
});
|
||||
|
||||
it("skips empty workspace env values", () => {
|
||||
const env = applyPaperclipWorkspaceEnv(
|
||||
{},
|
||||
{
|
||||
workspaceCwd: "",
|
||||
workspaceSource: null,
|
||||
agentHome: "",
|
||||
},
|
||||
);
|
||||
|
||||
expect(env).toEqual({});
|
||||
});
|
||||
});
|
||||
|
||||
describe("appendWithByteCap", () => {
|
||||
it("keeps valid UTF-8 when trimming through multibyte text", () => {
|
||||
const output = appendWithByteCap("prefix ", "hello — world", 7);
|
||||
|
||||
expect(output).not.toContain("\uFFFD");
|
||||
expect(Buffer.from(output, "utf8").toString("utf8")).toBe(output);
|
||||
expect(Buffer.byteLength(output, "utf8")).toBeLessThanOrEqual(7);
|
||||
});
|
||||
});
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
import { spawn, type ChildProcess } from "node:child_process";
|
||||
import { constants as fsConstants, promises as fs, type Dirent } from "node:fs";
|
||||
import path from "node:path";
|
||||
import { buildSshSpawnTarget, type SshRemoteExecutionSpec } from "./ssh.js";
|
||||
import type {
|
||||
AdapterSkillEntry,
|
||||
AdapterSkillSnapshot,
|
||||
@@ -16,6 +17,11 @@ export interface RunProcessResult {
|
||||
startedAt: string | null;
|
||||
}
|
||||
|
||||
export interface TerminalResultCleanupOptions {
|
||||
hasTerminalResult: (output: { stdout: string; stderr: string }) => boolean;
|
||||
graceMs?: number;
|
||||
}
|
||||
|
||||
interface RunningProcess {
|
||||
child: ChildProcess;
|
||||
graceSec: number;
|
||||
@@ -25,10 +31,18 @@ interface RunningProcess {
|
||||
interface SpawnTarget {
|
||||
command: string;
|
||||
args: string[];
|
||||
cwd?: string;
|
||||
cleanup?: () => Promise<void>;
|
||||
}
|
||||
|
||||
type RemoteExecutionSpec = SshRemoteExecutionSpec;
|
||||
|
||||
type ChildProcessWithEvents = ChildProcess & {
|
||||
on(event: "error", listener: (err: Error) => void): ChildProcess;
|
||||
on(
|
||||
event: "exit",
|
||||
listener: (code: number | null, signal: NodeJS.Signals | null) => void,
|
||||
): ChildProcess;
|
||||
on(
|
||||
event: "close",
|
||||
listener: (code: number | null, signal: NodeJS.Signals | null) => void,
|
||||
@@ -60,12 +74,30 @@ function signalRunningProcess(
|
||||
export const runningProcesses = new Map<string, RunningProcess>();
|
||||
export const MAX_CAPTURE_BYTES = 4 * 1024 * 1024;
|
||||
export const MAX_EXCERPT_BYTES = 32 * 1024;
|
||||
const TERMINAL_RESULT_SCAN_OVERLAP_CHARS = 64 * 1024;
|
||||
const SENSITIVE_ENV_KEY = /(key|token|secret|password|passwd|authorization|cookie)/i;
|
||||
const PAPERCLIP_SKILL_ROOT_RELATIVE_CANDIDATES = [
|
||||
"../../skills",
|
||||
"../../../../../skills",
|
||||
];
|
||||
|
||||
export const DEFAULT_PAPERCLIP_AGENT_PROMPT_TEMPLATE = [
|
||||
"You are agent {{agent.id}} ({{agent.name}}). Continue your Paperclip work.",
|
||||
"",
|
||||
"Execution contract:",
|
||||
"- Start actionable work in this heartbeat; do not stop at a plan unless the issue asks for planning.",
|
||||
"- Leave durable progress in comments, documents, or work products with a clear next action.",
|
||||
"- Prefer the smallest verification that proves the change; do not default to full workspace typecheck/build/test on every heartbeat unless the task scope warrants it.",
|
||||
"- Use child issues for parallel or long delegated work instead of polling agents, sessions, or processes.",
|
||||
"- If woken by a human comment on a dependency-blocked issue, respond or triage the comment without treating the blocked deliverable work as unblocked.",
|
||||
"- Create child issues directly when you know what needs to be done; use issue-thread interactions when the board/user must choose suggested tasks, answer structured questions, or confirm a proposal.",
|
||||
"- To ask for that input, create an interaction on the current issue with POST /api/issues/{issueId}/interactions using kind suggest_tasks, ask_user_questions, or request_confirmation. Use continuationPolicy wake_assignee when you need to resume after a response; for request_confirmation this resumes only after acceptance.",
|
||||
"- When you intentionally restart follow-up work on a completed assigned issue, include structured `resume: true` with the POST /api/issues/{issueId}/comments or PATCH /api/issues/{issueId} comment payload. Generic agent comments on closed issues are inert by default.",
|
||||
"- For plan approval, update the plan document first, then create request_confirmation targeting the latest plan revision with idempotencyKey confirmation:{issueId}:plan:{revisionId}. Wait for acceptance before creating implementation subtasks, and create a fresh confirmation after superseding board/user comments if approval is still needed.",
|
||||
"- If blocked, mark the issue blocked and name the unblock owner and action.",
|
||||
"- Respect budget, pause/cancel, approval gates, and company boundaries.",
|
||||
].join("\n");
|
||||
|
||||
export interface PaperclipSkillEntry {
|
||||
key: string;
|
||||
runtimeName: string;
|
||||
@@ -180,6 +212,22 @@ export function appendWithCap(prev: string, chunk: string, cap = MAX_CAPTURE_BYT
|
||||
return combined.length > cap ? combined.slice(combined.length - cap) : combined;
|
||||
}
|
||||
|
||||
export function appendWithByteCap(prev: string, chunk: string, cap = MAX_CAPTURE_BYTES) {
|
||||
const combined = prev + chunk;
|
||||
const bytes = Buffer.byteLength(combined, "utf8");
|
||||
if (bytes <= cap) return combined;
|
||||
|
||||
const buffer = Buffer.from(combined, "utf8");
|
||||
let start = Math.max(0, bytes - cap);
|
||||
while (start < buffer.length && (buffer[start]! & 0xc0) === 0x80) start += 1;
|
||||
return buffer.subarray(start).toString("utf8");
|
||||
}
|
||||
|
||||
function resumeReadable(readable: { resume: () => unknown; destroyed?: boolean } | null | undefined) {
|
||||
if (!readable || readable.destroyed) return;
|
||||
readable.resume();
|
||||
}
|
||||
|
||||
export function resolvePathValue(obj: Record<string, unknown>, dottedPath: string) {
|
||||
const parts = dottedPath.split(".");
|
||||
let cursor: unknown = obj;
|
||||
@@ -236,6 +284,9 @@ type PaperclipWakeExecutionStage = {
|
||||
stageType: string | null;
|
||||
currentParticipant: PaperclipWakeExecutionPrincipal | null;
|
||||
returnAssignee: PaperclipWakeExecutionPrincipal | null;
|
||||
reviewRequest: {
|
||||
instructions: string;
|
||||
} | null;
|
||||
lastDecisionOutcome: string | null;
|
||||
allowedActions: string[];
|
||||
};
|
||||
@@ -250,11 +301,61 @@ type PaperclipWakeComment = {
|
||||
authorId: string | null;
|
||||
};
|
||||
|
||||
type PaperclipWakeContinuationSummary = {
|
||||
key: string | null;
|
||||
title: string | null;
|
||||
body: string;
|
||||
bodyTruncated: boolean;
|
||||
updatedAt: string | null;
|
||||
};
|
||||
|
||||
type PaperclipWakeLivenessContinuation = {
|
||||
attempt: number | null;
|
||||
maxAttempts: number | null;
|
||||
sourceRunId: string | null;
|
||||
state: string | null;
|
||||
reason: string | null;
|
||||
instruction: string | null;
|
||||
};
|
||||
|
||||
type PaperclipWakeChildIssueSummary = {
|
||||
id: string | null;
|
||||
identifier: string | null;
|
||||
title: string | null;
|
||||
status: string | null;
|
||||
priority: string | null;
|
||||
summary: string | null;
|
||||
};
|
||||
|
||||
type PaperclipWakeBlockerSummary = {
|
||||
id: string | null;
|
||||
identifier: string | null;
|
||||
title: string | null;
|
||||
status: string | null;
|
||||
priority: string | null;
|
||||
};
|
||||
|
||||
type PaperclipWakeTreeHoldSummary = {
|
||||
holdId: string | null;
|
||||
rootIssueId: string | null;
|
||||
mode: string | null;
|
||||
reason: string | null;
|
||||
};
|
||||
|
||||
type PaperclipWakePayload = {
|
||||
reason: string | null;
|
||||
issue: PaperclipWakeIssue | null;
|
||||
checkedOutByHarness: boolean;
|
||||
dependencyBlockedInteraction: boolean;
|
||||
treeHoldInteraction: boolean;
|
||||
activeTreeHold: PaperclipWakeTreeHoldSummary | null;
|
||||
unresolvedBlockerIssueIds: string[];
|
||||
unresolvedBlockerSummaries: PaperclipWakeBlockerSummary[];
|
||||
executionStage: PaperclipWakeExecutionStage | null;
|
||||
continuationSummary: PaperclipWakeContinuationSummary | null;
|
||||
livenessContinuation: PaperclipWakeLivenessContinuation | null;
|
||||
childIssueSummaries: PaperclipWakeChildIssueSummary[];
|
||||
childIssueSummaryTruncated: boolean;
|
||||
commentIds: string[];
|
||||
latestCommentId: string | null;
|
||||
comments: PaperclipWakeComment[];
|
||||
@@ -298,6 +399,71 @@ function normalizePaperclipWakeComment(value: unknown): PaperclipWakeComment | n
|
||||
};
|
||||
}
|
||||
|
||||
function normalizePaperclipWakeContinuationSummary(value: unknown): PaperclipWakeContinuationSummary | null {
|
||||
const summary = parseObject(value);
|
||||
const body = asString(summary.body, "").trim();
|
||||
if (!body) return null;
|
||||
return {
|
||||
key: asString(summary.key, "").trim() || null,
|
||||
title: asString(summary.title, "").trim() || null,
|
||||
body,
|
||||
bodyTruncated: asBoolean(summary.bodyTruncated, false),
|
||||
updatedAt: asString(summary.updatedAt, "").trim() || null,
|
||||
};
|
||||
}
|
||||
|
||||
function normalizePaperclipWakeLivenessContinuation(value: unknown): PaperclipWakeLivenessContinuation | null {
|
||||
const continuation = parseObject(value);
|
||||
const attempt = asNumber(continuation.attempt, 0);
|
||||
const maxAttempts = asNumber(continuation.maxAttempts, 0);
|
||||
const sourceRunId = asString(continuation.sourceRunId, "").trim() || null;
|
||||
const state = asString(continuation.state, "").trim() || null;
|
||||
const reason = asString(continuation.reason, "").trim() || null;
|
||||
const instruction = asString(continuation.instruction, "").trim() || null;
|
||||
if (!attempt && !maxAttempts && !sourceRunId && !state && !reason && !instruction) return null;
|
||||
return {
|
||||
attempt: attempt > 0 ? attempt : null,
|
||||
maxAttempts: maxAttempts > 0 ? maxAttempts : null,
|
||||
sourceRunId,
|
||||
state,
|
||||
reason,
|
||||
instruction,
|
||||
};
|
||||
}
|
||||
|
||||
function normalizePaperclipWakeChildIssueSummary(value: unknown): PaperclipWakeChildIssueSummary | null {
|
||||
const child = parseObject(value);
|
||||
const id = asString(child.id, "").trim() || null;
|
||||
const identifier = asString(child.identifier, "").trim() || null;
|
||||
const title = asString(child.title, "").trim() || null;
|
||||
const status = asString(child.status, "").trim() || null;
|
||||
const priority = asString(child.priority, "").trim() || null;
|
||||
const summary = asString(child.summary, "").trim() || null;
|
||||
if (!id && !identifier && !title && !status && !summary) return null;
|
||||
return { id, identifier, title, status, priority, summary };
|
||||
}
|
||||
|
||||
function normalizePaperclipWakeBlockerSummary(value: unknown): PaperclipWakeBlockerSummary | null {
|
||||
const blocker = parseObject(value);
|
||||
const id = asString(blocker.id, "").trim() || null;
|
||||
const identifier = asString(blocker.identifier, "").trim() || null;
|
||||
const title = asString(blocker.title, "").trim() || null;
|
||||
const status = asString(blocker.status, "").trim() || null;
|
||||
const priority = asString(blocker.priority, "").trim() || null;
|
||||
if (!id && !identifier && !title && !status) return null;
|
||||
return { id, identifier, title, status, priority };
|
||||
}
|
||||
|
||||
function normalizePaperclipWakeTreeHoldSummary(value: unknown): PaperclipWakeTreeHoldSummary | null {
|
||||
const hold = parseObject(value);
|
||||
const holdId = asString(hold.holdId, "").trim() || null;
|
||||
const rootIssueId = asString(hold.rootIssueId, "").trim() || null;
|
||||
const mode = asString(hold.mode, "").trim() || null;
|
||||
const reason = asString(hold.reason, "").trim() || null;
|
||||
if (!holdId && !rootIssueId && !mode && !reason) return null;
|
||||
return { holdId, rootIssueId, mode, reason };
|
||||
}
|
||||
|
||||
function normalizePaperclipWakeExecutionPrincipal(value: unknown): PaperclipWakeExecutionPrincipal | null {
|
||||
const principal = parseObject(value);
|
||||
const typeRaw = asString(principal.type, "").trim().toLowerCase();
|
||||
@@ -323,11 +489,14 @@ function normalizePaperclipWakeExecutionStage(value: unknown): PaperclipWakeExec
|
||||
: [];
|
||||
const currentParticipant = normalizePaperclipWakeExecutionPrincipal(stage.currentParticipant);
|
||||
const returnAssignee = normalizePaperclipWakeExecutionPrincipal(stage.returnAssignee);
|
||||
const reviewRequestRaw = parseObject(stage.reviewRequest);
|
||||
const reviewInstructions = asString(reviewRequestRaw.instructions, "").trim();
|
||||
const reviewRequest = reviewInstructions ? { instructions: reviewInstructions } : null;
|
||||
const stageId = asString(stage.stageId, "").trim() || null;
|
||||
const stageType = asString(stage.stageType, "").trim() || null;
|
||||
const lastDecisionOutcome = asString(stage.lastDecisionOutcome, "").trim() || null;
|
||||
|
||||
if (!wakeRole && !stageId && !stageType && !currentParticipant && !returnAssignee && !lastDecisionOutcome && allowedActions.length === 0) {
|
||||
if (!wakeRole && !stageId && !stageType && !currentParticipant && !returnAssignee && !reviewRequest && !lastDecisionOutcome && allowedActions.length === 0) {
|
||||
return null;
|
||||
}
|
||||
|
||||
@@ -337,6 +506,7 @@ function normalizePaperclipWakeExecutionStage(value: unknown): PaperclipWakeExec
|
||||
stageType,
|
||||
currentParticipant,
|
||||
returnAssignee,
|
||||
reviewRequest,
|
||||
lastDecisionOutcome,
|
||||
allowedActions,
|
||||
};
|
||||
@@ -356,8 +526,26 @@ export function normalizePaperclipWakePayload(value: unknown): PaperclipWakePayl
|
||||
.map((entry) => entry.trim())
|
||||
: [];
|
||||
const executionStage = normalizePaperclipWakeExecutionStage(payload.executionStage);
|
||||
const continuationSummary = normalizePaperclipWakeContinuationSummary(payload.continuationSummary);
|
||||
const livenessContinuation = normalizePaperclipWakeLivenessContinuation(payload.livenessContinuation);
|
||||
const childIssueSummaries = Array.isArray(payload.childIssueSummaries)
|
||||
? payload.childIssueSummaries
|
||||
.map((entry) => normalizePaperclipWakeChildIssueSummary(entry))
|
||||
.filter((entry): entry is PaperclipWakeChildIssueSummary => Boolean(entry))
|
||||
: [];
|
||||
const unresolvedBlockerIssueIds = Array.isArray(payload.unresolvedBlockerIssueIds)
|
||||
? payload.unresolvedBlockerIssueIds
|
||||
.map((entry) => asString(entry, "").trim())
|
||||
.filter(Boolean)
|
||||
: [];
|
||||
const unresolvedBlockerSummaries = Array.isArray(payload.unresolvedBlockerSummaries)
|
||||
? payload.unresolvedBlockerSummaries
|
||||
.map((entry) => normalizePaperclipWakeBlockerSummary(entry))
|
||||
.filter((entry): entry is PaperclipWakeBlockerSummary => Boolean(entry))
|
||||
: [];
|
||||
|
||||
if (comments.length === 0 && commentIds.length === 0 && !executionStage && !normalizePaperclipWakeIssue(payload.issue)) {
|
||||
const activeTreeHold = normalizePaperclipWakeTreeHoldSummary(payload.activeTreeHold);
|
||||
if (comments.length === 0 && commentIds.length === 0 && childIssueSummaries.length === 0 && unresolvedBlockerIssueIds.length === 0 && unresolvedBlockerSummaries.length === 0 && !activeTreeHold && !executionStage && !continuationSummary && !livenessContinuation && !normalizePaperclipWakeIssue(payload.issue)) {
|
||||
return null;
|
||||
}
|
||||
|
||||
@@ -365,7 +553,16 @@ export function normalizePaperclipWakePayload(value: unknown): PaperclipWakePayl
|
||||
reason: asString(payload.reason, "").trim() || null,
|
||||
issue: normalizePaperclipWakeIssue(payload.issue),
|
||||
checkedOutByHarness: asBoolean(payload.checkedOutByHarness, false),
|
||||
dependencyBlockedInteraction: asBoolean(payload.dependencyBlockedInteraction, false),
|
||||
treeHoldInteraction: asBoolean(payload.treeHoldInteraction, false),
|
||||
activeTreeHold,
|
||||
unresolvedBlockerIssueIds,
|
||||
unresolvedBlockerSummaries,
|
||||
executionStage,
|
||||
continuationSummary,
|
||||
livenessContinuation,
|
||||
childIssueSummaries,
|
||||
childIssueSummaryTruncated: asBoolean(payload.childIssueSummaryTruncated, false),
|
||||
commentIds,
|
||||
latestCommentId: asString(payload.latestCommentId, "").trim() || null,
|
||||
comments,
|
||||
@@ -406,6 +603,8 @@ export function renderPaperclipWakePrompt(
|
||||
"Focus on the new wake delta below and continue the current task without restating the full heartbeat boilerplate.",
|
||||
"Fetch the API thread only when `fallbackFetchNeeded` is true or you need broader history than this batch.",
|
||||
"",
|
||||
"Execution contract: take concrete action in this heartbeat when the issue is actionable; do not stop at a plan unless planning was requested. Leave durable progress with a clear next action, use child issues instead of polling for long or parallel work, and mark blocked work with the unblock owner/action.",
|
||||
"",
|
||||
`- reason: ${normalized.reason ?? "unknown"}`,
|
||||
`- issue: ${normalized.issue?.identifier ?? normalized.issue?.id ?? "unknown"}${normalized.issue?.title ? ` ${normalized.issue.title}` : ""}`,
|
||||
`- pending comments: ${normalized.includedCount}/${normalized.requestedCount}`,
|
||||
@@ -421,6 +620,8 @@ export function renderPaperclipWakePrompt(
|
||||
"Use this inline wake data first before refetching the issue thread.",
|
||||
"Only fetch the API thread when `fallbackFetchNeeded` is true or you need broader history than this batch.",
|
||||
"",
|
||||
"Execution contract: take concrete action in this heartbeat when the issue is actionable; do not stop at a plan unless planning was requested. Leave durable progress with a clear next action, use child issues instead of polling for long or parallel work, and mark blocked work with the unblock owner/action.",
|
||||
"",
|
||||
`- reason: ${normalized.reason ?? "unknown"}`,
|
||||
`- issue: ${normalized.issue?.identifier ?? normalized.issue?.id ?? "unknown"}${normalized.issue?.title ? ` ${normalized.issue.title}` : ""}`,
|
||||
`- pending comments: ${normalized.includedCount}/${normalized.requestedCount}`,
|
||||
@@ -437,6 +638,26 @@ export function renderPaperclipWakePrompt(
|
||||
if (normalized.checkedOutByHarness) {
|
||||
lines.push("- checkout: already claimed by the harness for this run");
|
||||
}
|
||||
if (normalized.dependencyBlockedInteraction) {
|
||||
lines.push("- dependency-blocked interaction: yes");
|
||||
lines.push("- execution scope: respond or triage the human comment; do not treat blocker-dependent deliverable work as unblocked");
|
||||
if (normalized.unresolvedBlockerSummaries.length > 0) {
|
||||
const blockers = normalized.unresolvedBlockerSummaries
|
||||
.map((blocker) => `${blocker.identifier ?? blocker.id ?? "unknown"}${blocker.title ? ` ${blocker.title}` : ""}${blocker.status ? ` (${blocker.status})` : ""}`)
|
||||
.join("; ");
|
||||
lines.push(`- unresolved blockers: ${blockers}`);
|
||||
} else if (normalized.unresolvedBlockerIssueIds.length > 0) {
|
||||
lines.push(`- unresolved blocker issue ids: ${normalized.unresolvedBlockerIssueIds.join(", ")}`);
|
||||
}
|
||||
}
|
||||
if (normalized.treeHoldInteraction) {
|
||||
lines.push("- tree-hold interaction: yes");
|
||||
lines.push("- execution scope: respond or triage the human comment; the subtree remains paused until an explicit resume action");
|
||||
if (normalized.activeTreeHold) {
|
||||
const hold = normalized.activeTreeHold;
|
||||
lines.push(`- active tree hold: ${hold.holdId ?? "unknown"}${hold.rootIssueId ? ` rooted at ${hold.rootIssueId}` : ""}${hold.mode ? ` (${hold.mode})` : ""}`);
|
||||
}
|
||||
}
|
||||
if (normalized.missingCount > 0) {
|
||||
lines.push(`- omitted comments: ${normalized.missingCount}`);
|
||||
}
|
||||
@@ -452,6 +673,13 @@ export function renderPaperclipWakePrompt(
|
||||
if (executionStage.allowedActions.length > 0) {
|
||||
lines.push(`- allowed actions: ${executionStage.allowedActions.join(", ")}`);
|
||||
}
|
||||
if (executionStage.reviewRequest) {
|
||||
lines.push(
|
||||
"",
|
||||
"Review request instructions:",
|
||||
executionStage.reviewRequest.instructions,
|
||||
);
|
||||
}
|
||||
lines.push("");
|
||||
if (executionStage.wakeRole === "reviewer" || executionStage.wakeRole === "approver") {
|
||||
lines.push(
|
||||
@@ -470,6 +698,55 @@ export function renderPaperclipWakePrompt(
|
||||
}
|
||||
}
|
||||
|
||||
if (normalized.continuationSummary) {
|
||||
lines.push(
|
||||
"",
|
||||
"Issue continuation summary:",
|
||||
normalized.continuationSummary.body,
|
||||
);
|
||||
if (normalized.continuationSummary.bodyTruncated) {
|
||||
lines.push("[continuation summary truncated]");
|
||||
}
|
||||
}
|
||||
|
||||
if (normalized.livenessContinuation) {
|
||||
const continuation = normalized.livenessContinuation;
|
||||
lines.push("", "Run liveness continuation:");
|
||||
if (continuation.attempt) {
|
||||
lines.push(
|
||||
`- attempt: ${continuation.attempt}${continuation.maxAttempts ? `/${continuation.maxAttempts}` : ""}`,
|
||||
);
|
||||
}
|
||||
if (continuation.sourceRunId) {
|
||||
lines.push(`- source run: ${continuation.sourceRunId}`);
|
||||
}
|
||||
if (continuation.state) {
|
||||
lines.push(`- liveness state: ${continuation.state}`);
|
||||
}
|
||||
if (continuation.reason) {
|
||||
lines.push(`- reason: ${continuation.reason}`);
|
||||
}
|
||||
if (continuation.instruction) {
|
||||
lines.push(`- instruction: ${continuation.instruction}`);
|
||||
}
|
||||
}
|
||||
|
||||
if (normalized.childIssueSummaries.length > 0) {
|
||||
lines.push("", "Direct child issue summaries:");
|
||||
for (const child of normalized.childIssueSummaries) {
|
||||
const label = child.identifier ?? child.id ?? "unknown";
|
||||
lines.push(
|
||||
`- ${label}${child.title ? ` ${child.title}` : ""}${child.status ? ` (${child.status})` : ""}`,
|
||||
);
|
||||
if (child.summary) {
|
||||
lines.push(` ${child.summary}`);
|
||||
}
|
||||
}
|
||||
if (normalized.childIssueSummaryTruncated) {
|
||||
lines.push("[child issue summaries truncated]");
|
||||
}
|
||||
}
|
||||
|
||||
if (normalized.checkedOutByHarness) {
|
||||
lines.push(
|
||||
"",
|
||||
@@ -550,11 +827,61 @@ export function buildPaperclipEnv(agent: { id: string; companyId: string }): Rec
|
||||
process.env.PAPERCLIP_LISTEN_HOST ?? process.env.HOST ?? "localhost",
|
||||
);
|
||||
const runtimePort = process.env.PAPERCLIP_LISTEN_PORT ?? process.env.PORT ?? "3100";
|
||||
const apiUrl = process.env.PAPERCLIP_API_URL ?? `http://${runtimeHost}:${runtimePort}`;
|
||||
const apiUrl =
|
||||
process.env.PAPERCLIP_RUNTIME_API_URL ??
|
||||
process.env.PAPERCLIP_API_URL ??
|
||||
`http://${runtimeHost}:${runtimePort}`;
|
||||
vars.PAPERCLIP_API_URL = apiUrl;
|
||||
return vars;
|
||||
}
|
||||
|
||||
export function applyPaperclipWorkspaceEnv(
|
||||
env: Record<string, string>,
|
||||
input: {
|
||||
workspaceCwd?: string | null;
|
||||
workspaceSource?: string | null;
|
||||
workspaceStrategy?: string | null;
|
||||
workspaceId?: string | null;
|
||||
workspaceRepoUrl?: string | null;
|
||||
workspaceRepoRef?: string | null;
|
||||
workspaceBranch?: string | null;
|
||||
workspaceWorktreePath?: string | null;
|
||||
agentHome?: string | null;
|
||||
},
|
||||
): Record<string, string> {
|
||||
const mappings = [
|
||||
["PAPERCLIP_WORKSPACE_CWD", input.workspaceCwd],
|
||||
["PAPERCLIP_WORKSPACE_SOURCE", input.workspaceSource],
|
||||
["PAPERCLIP_WORKSPACE_STRATEGY", input.workspaceStrategy],
|
||||
["PAPERCLIP_WORKSPACE_ID", input.workspaceId],
|
||||
["PAPERCLIP_WORKSPACE_REPO_URL", input.workspaceRepoUrl],
|
||||
["PAPERCLIP_WORKSPACE_REPO_REF", input.workspaceRepoRef],
|
||||
["PAPERCLIP_WORKSPACE_BRANCH", input.workspaceBranch],
|
||||
["PAPERCLIP_WORKSPACE_WORKTREE_PATH", input.workspaceWorktreePath],
|
||||
["AGENT_HOME", input.agentHome],
|
||||
] as const;
|
||||
|
||||
for (const [key, value] of mappings) {
|
||||
if (typeof value === "string" && value.length > 0) {
|
||||
env[key] = value;
|
||||
}
|
||||
}
|
||||
|
||||
return env;
|
||||
}
|
||||
|
||||
export function sanitizeInheritedPaperclipEnv(baseEnv: NodeJS.ProcessEnv): NodeJS.ProcessEnv {
|
||||
const env: NodeJS.ProcessEnv = { ...baseEnv };
|
||||
for (const key of Object.keys(env)) {
|
||||
if (!key.startsWith("PAPERCLIP_")) continue;
|
||||
if (key === "PAPERCLIP_RUNTIME_API_URL") continue;
|
||||
if (key === "PAPERCLIP_LISTEN_HOST") continue;
|
||||
if (key === "PAPERCLIP_LISTEN_PORT") continue;
|
||||
delete env[key];
|
||||
}
|
||||
return env;
|
||||
}
|
||||
|
||||
export function defaultPathForPlatform() {
|
||||
if (process.platform === "win32") {
|
||||
return "C:\\Windows\\System32;C:\\Windows;C:\\Windows\\System32\\Wbem";
|
||||
@@ -603,7 +930,18 @@ async function resolveCommandPath(command: string, cwd: string, env: NodeJS.Proc
|
||||
return null;
|
||||
}
|
||||
|
||||
export async function resolveCommandForLogs(command: string, cwd: string, env: NodeJS.ProcessEnv): Promise<string> {
|
||||
export async function resolveCommandForLogs(
|
||||
command: string,
|
||||
cwd: string,
|
||||
env: NodeJS.ProcessEnv,
|
||||
options: {
|
||||
remoteExecution?: RemoteExecutionSpec | null;
|
||||
} = {},
|
||||
): Promise<string> {
|
||||
const remote = options.remoteExecution ?? null;
|
||||
if (remote) {
|
||||
return `ssh://${remote.username}@${remote.host}:${remote.port}/${remote.remoteCwd} :: ${command}`;
|
||||
}
|
||||
return (await resolveCommandPath(command, cwd, env)) ?? command;
|
||||
}
|
||||
|
||||
@@ -623,7 +961,33 @@ async function resolveSpawnTarget(
|
||||
args: string[],
|
||||
cwd: string,
|
||||
env: NodeJS.ProcessEnv,
|
||||
options: {
|
||||
remoteExecution?: RemoteExecutionSpec | null;
|
||||
remoteEnv?: Record<string, string> | null;
|
||||
} = {},
|
||||
): Promise<SpawnTarget> {
|
||||
const remote = options.remoteExecution ?? null;
|
||||
if (remote) {
|
||||
const sshResolved = await resolveCommandPath("ssh", process.cwd(), env);
|
||||
if (!sshResolved) {
|
||||
throw new Error('Command not found in PATH: "ssh"');
|
||||
}
|
||||
const spawnTarget = await buildSshSpawnTarget({
|
||||
spec: remote,
|
||||
command,
|
||||
args,
|
||||
env: Object.fromEntries(
|
||||
Object.entries(options.remoteEnv ?? {}).filter((entry): entry is [string, string] => typeof entry[1] === "string"),
|
||||
),
|
||||
});
|
||||
return {
|
||||
command: sshResolved,
|
||||
args: spawnTarget.args,
|
||||
cwd: process.cwd(),
|
||||
cleanup: spawnTarget.cleanup,
|
||||
};
|
||||
}
|
||||
|
||||
const resolved = await resolveCommandPath(command, cwd, env);
|
||||
const executable = resolved ?? command;
|
||||
|
||||
@@ -708,6 +1072,20 @@ export async function resolvePaperclipSkillsDir(
|
||||
return null;
|
||||
}
|
||||
|
||||
async function readSkillRequired(skillDir: string): Promise<boolean> {
|
||||
try {
|
||||
const content = await fs.readFile(path.join(skillDir, "SKILL.md"), "utf8");
|
||||
const normalized = content.replace(/\r\n/g, "\n");
|
||||
if (!normalized.startsWith("---\n")) return true;
|
||||
const closing = normalized.indexOf("\n---\n", 4);
|
||||
if (closing < 0) return true;
|
||||
const frontmatter = normalized.slice(4, closing);
|
||||
return !/^\s*required\s*:\s*false\s*$/m.test(frontmatter);
|
||||
} catch {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
export async function listPaperclipSkillEntries(
|
||||
moduleDir: string,
|
||||
additionalCandidates: string[] = [],
|
||||
@@ -717,15 +1095,20 @@ export async function listPaperclipSkillEntries(
|
||||
|
||||
try {
|
||||
const entries = await fs.readdir(root, { withFileTypes: true });
|
||||
return entries
|
||||
.filter((entry) => entry.isDirectory())
|
||||
.map((entry) => ({
|
||||
const dirs = entries.filter((entry) => entry.isDirectory());
|
||||
return Promise.all(dirs.map(async (entry) => {
|
||||
const skillDir = path.join(root, entry.name);
|
||||
const required = await readSkillRequired(skillDir);
|
||||
return {
|
||||
key: `paperclipai/paperclip/${entry.name}`,
|
||||
runtimeName: entry.name,
|
||||
source: path.join(root, entry.name),
|
||||
required: true,
|
||||
requiredReason: "Bundled Paperclip skills are always available for local adapters.",
|
||||
}));
|
||||
source: skillDir,
|
||||
required,
|
||||
requiredReason: required
|
||||
? "Bundled Paperclip skills are always available for local adapters."
|
||||
: null,
|
||||
};
|
||||
}));
|
||||
} catch {
|
||||
return [];
|
||||
}
|
||||
@@ -1050,7 +1433,19 @@ export async function removeMaintainerOnlySkillSymlinks(
|
||||
}
|
||||
}
|
||||
|
||||
export async function ensureCommandResolvable(command: string, cwd: string, env: NodeJS.ProcessEnv) {
|
||||
export async function ensureCommandResolvable(
|
||||
command: string,
|
||||
cwd: string,
|
||||
env: NodeJS.ProcessEnv,
|
||||
options: {
|
||||
remoteExecution?: RemoteExecutionSpec | null;
|
||||
} = {},
|
||||
) {
|
||||
if (options.remoteExecution) {
|
||||
const resolvedSsh = await resolveCommandPath("ssh", process.cwd(), env);
|
||||
if (resolvedSsh) return;
|
||||
throw new Error('Command not found in PATH: "ssh"');
|
||||
}
|
||||
const resolved = await resolveCommandPath(command, cwd, env);
|
||||
if (resolved) return;
|
||||
if (command.includes("/") || command.includes("\\")) {
|
||||
@@ -1072,13 +1467,17 @@ export async function runChildProcess(
|
||||
onLog: (stream: "stdout" | "stderr", chunk: string) => Promise<void>;
|
||||
onLogError?: (err: unknown, runId: string, message: string) => void;
|
||||
onSpawn?: (meta: { pid: number; processGroupId: number | null; startedAt: string }) => Promise<void>;
|
||||
terminalResultCleanup?: TerminalResultCleanupOptions;
|
||||
stdin?: string;
|
||||
remoteExecution?: RemoteExecutionSpec | null;
|
||||
},
|
||||
): Promise<RunProcessResult> {
|
||||
const onLogError = opts.onLogError ?? ((err, id, msg) => console.warn({ err, runId: id }, msg));
|
||||
|
||||
return new Promise<RunProcessResult>((resolve, reject) => {
|
||||
const rawMerged: NodeJS.ProcessEnv = { ...process.env, ...opts.env };
|
||||
const rawMerged: NodeJS.ProcessEnv = {
|
||||
...sanitizeInheritedPaperclipEnv(process.env),
|
||||
...opts.env,
|
||||
};
|
||||
|
||||
// Strip Claude Code nesting-guard env vars so spawned `claude` processes
|
||||
// don't refuse to start with "cannot be launched inside another session".
|
||||
@@ -1096,10 +1495,13 @@ export async function runChildProcess(
|
||||
}
|
||||
|
||||
const mergedEnv = ensurePathInEnv(rawMerged);
|
||||
void resolveSpawnTarget(command, args, opts.cwd, mergedEnv)
|
||||
void resolveSpawnTarget(command, args, opts.cwd, mergedEnv, {
|
||||
remoteExecution: opts.remoteExecution ?? null,
|
||||
remoteEnv: opts.remoteExecution ? opts.env : null,
|
||||
})
|
||||
.then((target) => {
|
||||
const child = spawn(target.command, target.args, {
|
||||
cwd: opts.cwd,
|
||||
cwd: target.cwd ?? opts.cwd,
|
||||
env: mergedEnv,
|
||||
detached: process.platform !== "win32",
|
||||
shell: false,
|
||||
@@ -1121,11 +1523,60 @@ export async function runChildProcess(
|
||||
let stdout = "";
|
||||
let stderr = "";
|
||||
let logChain: Promise<void> = Promise.resolve();
|
||||
let terminalResultSeen = false;
|
||||
let terminalCleanupStarted = false;
|
||||
let terminalCleanupTimer: NodeJS.Timeout | null = null;
|
||||
let terminalCleanupKillTimer: NodeJS.Timeout | null = null;
|
||||
let terminalResultStdoutScanOffset = 0;
|
||||
let terminalResultStderrScanOffset = 0;
|
||||
|
||||
const clearTerminalCleanupTimers = () => {
|
||||
if (terminalCleanupTimer) clearTimeout(terminalCleanupTimer);
|
||||
if (terminalCleanupKillTimer) clearTimeout(terminalCleanupKillTimer);
|
||||
terminalCleanupTimer = null;
|
||||
terminalCleanupKillTimer = null;
|
||||
};
|
||||
|
||||
const maybeArmTerminalResultCleanup = () => {
|
||||
const terminalCleanup = opts.terminalResultCleanup;
|
||||
if (!terminalCleanup || terminalCleanupStarted || timedOut) return;
|
||||
if (!terminalResultSeen) {
|
||||
const stdoutStart = Math.max(0, terminalResultStdoutScanOffset - TERMINAL_RESULT_SCAN_OVERLAP_CHARS);
|
||||
const stderrStart = Math.max(0, terminalResultStderrScanOffset - TERMINAL_RESULT_SCAN_OVERLAP_CHARS);
|
||||
const scanOutput = {
|
||||
stdout: stdout.slice(stdoutStart),
|
||||
stderr: stderr.slice(stderrStart),
|
||||
};
|
||||
terminalResultStdoutScanOffset = stdout.length;
|
||||
terminalResultStderrScanOffset = stderr.length;
|
||||
if (scanOutput.stdout.length === 0 && scanOutput.stderr.length === 0) return;
|
||||
try {
|
||||
terminalResultSeen = terminalCleanup.hasTerminalResult(scanOutput);
|
||||
} catch (err) {
|
||||
onLogError(err, runId, "failed to inspect terminal adapter output");
|
||||
}
|
||||
}
|
||||
if (!terminalResultSeen) return;
|
||||
|
||||
if (terminalCleanupTimer) return;
|
||||
const graceMs = Math.max(0, terminalCleanup.graceMs ?? 5_000);
|
||||
terminalCleanupTimer = setTimeout(() => {
|
||||
terminalCleanupTimer = null;
|
||||
if (terminalCleanupStarted || timedOut) return;
|
||||
terminalCleanupStarted = true;
|
||||
signalRunningProcess({ child, processGroupId }, "SIGTERM");
|
||||
terminalCleanupKillTimer = setTimeout(() => {
|
||||
terminalCleanupKillTimer = null;
|
||||
signalRunningProcess({ child, processGroupId }, "SIGKILL");
|
||||
}, Math.max(1, opts.graceSec) * 1000);
|
||||
}, graceMs);
|
||||
};
|
||||
|
||||
const timeout =
|
||||
opts.timeoutSec > 0
|
||||
? setTimeout(() => {
|
||||
timedOut = true;
|
||||
clearTerminalCleanupTimers();
|
||||
signalRunningProcess({ child, processGroupId }, "SIGTERM");
|
||||
setTimeout(() => {
|
||||
signalRunningProcess({ child, processGroupId }, "SIGKILL");
|
||||
@@ -1134,19 +1585,35 @@ export async function runChildProcess(
|
||||
: null;
|
||||
|
||||
child.stdout?.on("data", (chunk: unknown) => {
|
||||
const readable = child.stdout;
|
||||
if (!readable) return;
|
||||
readable.pause();
|
||||
const text = String(chunk);
|
||||
stdout = appendWithCap(stdout, text);
|
||||
maybeArmTerminalResultCleanup();
|
||||
logChain = logChain
|
||||
.then(() => opts.onLog("stdout", text))
|
||||
.catch((err) => onLogError(err, runId, "failed to append stdout log chunk"));
|
||||
.catch((err) => onLogError(err, runId, "failed to append stdout log chunk"))
|
||||
.finally(() => {
|
||||
maybeArmTerminalResultCleanup();
|
||||
resumeReadable(readable);
|
||||
});
|
||||
});
|
||||
|
||||
child.stderr?.on("data", (chunk: unknown) => {
|
||||
const readable = child.stderr;
|
||||
if (!readable) return;
|
||||
readable.pause();
|
||||
const text = String(chunk);
|
||||
stderr = appendWithCap(stderr, text);
|
||||
maybeArmTerminalResultCleanup();
|
||||
logChain = logChain
|
||||
.then(() => opts.onLog("stderr", text))
|
||||
.catch((err) => onLogError(err, runId, "failed to append stderr log chunk"));
|
||||
.catch((err) => onLogError(err, runId, "failed to append stderr log chunk"))
|
||||
.finally(() => {
|
||||
maybeArmTerminalResultCleanup();
|
||||
resumeReadable(readable);
|
||||
});
|
||||
});
|
||||
|
||||
const stdin = child.stdin;
|
||||
@@ -1160,7 +1627,9 @@ export async function runChildProcess(
|
||||
|
||||
child.on("error", (err: Error) => {
|
||||
if (timeout) clearTimeout(timeout);
|
||||
clearTerminalCleanupTimers();
|
||||
runningProcesses.delete(runId);
|
||||
void target.cleanup?.();
|
||||
const errno = (err as NodeJS.ErrnoException).code;
|
||||
const pathValue = mergedEnv.PATH ?? mergedEnv.Path ?? "";
|
||||
const msg =
|
||||
@@ -1170,19 +1639,28 @@ export async function runChildProcess(
|
||||
reject(new Error(msg));
|
||||
});
|
||||
|
||||
child.on("exit", () => {
|
||||
maybeArmTerminalResultCleanup();
|
||||
});
|
||||
|
||||
child.on("close", (code: number | null, signal: NodeJS.Signals | null) => {
|
||||
if (timeout) clearTimeout(timeout);
|
||||
clearTerminalCleanupTimers();
|
||||
runningProcesses.delete(runId);
|
||||
void logChain.finally(() => {
|
||||
resolve({
|
||||
exitCode: code,
|
||||
signal,
|
||||
timedOut,
|
||||
stdout,
|
||||
stderr,
|
||||
pid: child.pid ?? null,
|
||||
startedAt,
|
||||
});
|
||||
void Promise.resolve()
|
||||
.then(() => target.cleanup?.())
|
||||
.finally(() => {
|
||||
resolve({
|
||||
exitCode: code,
|
||||
signal,
|
||||
timedOut,
|
||||
stdout,
|
||||
stderr,
|
||||
pid: child.pid ?? null,
|
||||
startedAt,
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
})
|
||||
|
||||
275
packages/adapter-utils/src/ssh-fixture.test.ts
Normal file
275
packages/adapter-utils/src/ssh-fixture.test.ts
Normal file
@@ -0,0 +1,275 @@
|
||||
import { execFile } from "node:child_process";
|
||||
import { mkdir, mkdtemp, rm, symlink, writeFile } from "node:fs/promises";
|
||||
import os from "node:os";
|
||||
import path from "node:path";
|
||||
import { afterEach, describe, expect, it } from "vitest";
|
||||
import {
|
||||
buildSshSpawnTarget,
|
||||
buildSshEnvLabFixtureConfig,
|
||||
getSshEnvLabSupport,
|
||||
prepareWorkspaceForSshExecution,
|
||||
readSshEnvLabFixtureStatus,
|
||||
restoreWorkspaceFromSshExecution,
|
||||
runSshCommand,
|
||||
syncDirectoryToSsh,
|
||||
startSshEnvLabFixture,
|
||||
stopSshEnvLabFixture,
|
||||
} from "./ssh.js";
|
||||
|
||||
async function git(cwd: string, args: string[]): Promise<string> {
|
||||
return await new Promise((resolve, reject) => {
|
||||
execFile("git", ["-C", cwd, ...args], (error, stdout, stderr) => {
|
||||
if (error) {
|
||||
reject(new Error((stderr || stdout || error.message).trim()));
|
||||
return;
|
||||
}
|
||||
resolve(stdout.trim());
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
describe("ssh env-lab fixture", () => {
|
||||
const cleanupDirs: string[] = [];
|
||||
|
||||
afterEach(async () => {
|
||||
while (cleanupDirs.length > 0) {
|
||||
const dir = cleanupDirs.pop();
|
||||
if (!dir) continue;
|
||||
await rm(dir, { recursive: true, force: true }).catch(() => undefined);
|
||||
}
|
||||
});
|
||||
|
||||
it("starts an isolated sshd fixture and executes commands through it", async () => {
|
||||
const support = await getSshEnvLabSupport();
|
||||
if (!support.supported) {
|
||||
console.warn(
|
||||
`Skipping SSH env-lab fixture test: ${support.reason ?? "unsupported environment"}`,
|
||||
);
|
||||
return;
|
||||
}
|
||||
|
||||
const rootDir = await mkdtemp(path.join(os.tmpdir(), "paperclip-ssh-fixture-"));
|
||||
cleanupDirs.push(rootDir);
|
||||
const statePath = path.join(rootDir, "state.json");
|
||||
|
||||
const started = await startSshEnvLabFixture({ statePath });
|
||||
const config = await buildSshEnvLabFixtureConfig(started);
|
||||
const quotedWorkspace = JSON.stringify(started.workspaceDir);
|
||||
const result = await runSshCommand(
|
||||
config,
|
||||
`sh -lc 'cd ${quotedWorkspace} && pwd'`,
|
||||
);
|
||||
|
||||
expect(result.stdout.trim()).toBe(started.workspaceDir);
|
||||
const status = await readSshEnvLabFixtureStatus(statePath);
|
||||
expect(status.running).toBe(true);
|
||||
|
||||
await stopSshEnvLabFixture(statePath);
|
||||
|
||||
const stopped = await readSshEnvLabFixtureStatus(statePath);
|
||||
expect(stopped.running).toBe(false);
|
||||
});
|
||||
|
||||
it("does not treat an unrelated reused pid as the running fixture", async () => {
|
||||
const support = await getSshEnvLabSupport();
|
||||
if (!support.supported) {
|
||||
console.warn(
|
||||
`Skipping SSH env-lab fixture test: ${support.reason ?? "unsupported environment"}`,
|
||||
);
|
||||
return;
|
||||
}
|
||||
|
||||
const rootDir = await mkdtemp(path.join(os.tmpdir(), "paperclip-ssh-fixture-"));
|
||||
cleanupDirs.push(rootDir);
|
||||
const statePath = path.join(rootDir, "state.json");
|
||||
|
||||
const started = await startSshEnvLabFixture({ statePath });
|
||||
await stopSshEnvLabFixture(statePath);
|
||||
await mkdir(path.dirname(statePath), { recursive: true });
|
||||
|
||||
await writeFile(
|
||||
statePath,
|
||||
JSON.stringify({ ...started, pid: process.pid }, null, 2),
|
||||
{ mode: 0o600 },
|
||||
);
|
||||
|
||||
const staleStatus = await readSshEnvLabFixtureStatus(statePath);
|
||||
expect(staleStatus.running).toBe(false);
|
||||
|
||||
const restarted = await startSshEnvLabFixture({ statePath });
|
||||
expect(restarted.pid).not.toBe(process.pid);
|
||||
|
||||
await stopSshEnvLabFixture(statePath);
|
||||
});
|
||||
|
||||
it("rejects invalid environment variable keys when constructing SSH spawn targets", async () => {
|
||||
await expect(
|
||||
buildSshSpawnTarget({
|
||||
spec: {
|
||||
host: "ssh.example.test",
|
||||
port: 22,
|
||||
username: "ssh-user",
|
||||
remoteCwd: "/srv/paperclip/workspace",
|
||||
remoteWorkspacePath: "/srv/paperclip/workspace",
|
||||
privateKey: null,
|
||||
knownHosts: null,
|
||||
strictHostKeyChecking: true,
|
||||
},
|
||||
command: "env",
|
||||
args: [],
|
||||
env: {
|
||||
"BAD KEY": "value",
|
||||
},
|
||||
}),
|
||||
).rejects.toThrow("Invalid SSH environment variable key: BAD KEY");
|
||||
});
|
||||
|
||||
it("syncs a local directory into the remote fixture workspace", async () => {
|
||||
const support = await getSshEnvLabSupport();
|
||||
if (!support.supported) {
|
||||
console.warn(
|
||||
`Skipping SSH env-lab fixture test: ${support.reason ?? "unsupported environment"}`,
|
||||
);
|
||||
return;
|
||||
}
|
||||
|
||||
const rootDir = await mkdtemp(path.join(os.tmpdir(), "paperclip-ssh-fixture-"));
|
||||
cleanupDirs.push(rootDir);
|
||||
const statePath = path.join(rootDir, "state.json");
|
||||
const localDir = path.join(rootDir, "local-overlay");
|
||||
|
||||
await mkdir(localDir, { recursive: true });
|
||||
await writeFile(path.join(localDir, "message.txt"), "hello from paperclip\n", "utf8");
|
||||
await writeFile(path.join(localDir, "._message.txt"), "should never sync\n", "utf8");
|
||||
|
||||
const started = await startSshEnvLabFixture({ statePath });
|
||||
const config = await buildSshEnvLabFixtureConfig(started);
|
||||
const remoteDir = path.posix.join(started.workspaceDir, "overlay");
|
||||
|
||||
await syncDirectoryToSsh({
|
||||
spec: {
|
||||
...config,
|
||||
remoteCwd: started.workspaceDir,
|
||||
},
|
||||
localDir,
|
||||
remoteDir,
|
||||
});
|
||||
|
||||
const result = await runSshCommand(
|
||||
config,
|
||||
`sh -lc 'cat ${JSON.stringify(path.posix.join(remoteDir, "message.txt"))} && if [ -e ${JSON.stringify(path.posix.join(remoteDir, "._message.txt"))} ]; then echo appledouble-present; fi'`,
|
||||
);
|
||||
|
||||
expect(result.stdout).toContain("hello from paperclip");
|
||||
expect(result.stdout).not.toContain("appledouble-present");
|
||||
});
|
||||
|
||||
it("can dereference local symlinks while syncing to the remote fixture", async () => {
|
||||
const support = await getSshEnvLabSupport();
|
||||
if (!support.supported) {
|
||||
console.warn(
|
||||
`Skipping SSH symlink sync test: ${support.reason ?? "unsupported environment"}`,
|
||||
);
|
||||
return;
|
||||
}
|
||||
|
||||
const rootDir = await mkdtemp(path.join(os.tmpdir(), "paperclip-ssh-fixture-"));
|
||||
cleanupDirs.push(rootDir);
|
||||
const statePath = path.join(rootDir, "state.json");
|
||||
const sourceDir = path.join(rootDir, "source");
|
||||
const localDir = path.join(rootDir, "local-overlay");
|
||||
|
||||
await mkdir(sourceDir, { recursive: true });
|
||||
await mkdir(localDir, { recursive: true });
|
||||
await writeFile(path.join(sourceDir, "auth.json"), "{\"token\":\"secret\"}\n", "utf8");
|
||||
await symlink(path.join(sourceDir, "auth.json"), path.join(localDir, "auth.json"));
|
||||
|
||||
const started = await startSshEnvLabFixture({ statePath });
|
||||
const config = await buildSshEnvLabFixtureConfig(started);
|
||||
const remoteDir = path.posix.join(started.workspaceDir, "overlay-follow-links");
|
||||
|
||||
await syncDirectoryToSsh({
|
||||
spec: {
|
||||
...config,
|
||||
remoteCwd: started.workspaceDir,
|
||||
},
|
||||
localDir,
|
||||
remoteDir,
|
||||
followSymlinks: true,
|
||||
});
|
||||
|
||||
const result = await runSshCommand(
|
||||
config,
|
||||
`sh -lc 'if [ -L ${JSON.stringify(path.posix.join(remoteDir, "auth.json"))} ]; then echo symlink; else echo regular; fi && cat ${JSON.stringify(path.posix.join(remoteDir, "auth.json"))}'`,
|
||||
);
|
||||
|
||||
expect(result.stdout).toContain("regular");
|
||||
expect(result.stdout).toContain("{\"token\":\"secret\"}");
|
||||
});
|
||||
|
||||
it("round-trips a git workspace through the SSH fixture", async () => {
|
||||
const support = await getSshEnvLabSupport();
|
||||
if (!support.supported) {
|
||||
console.warn(
|
||||
`Skipping SSH workspace round-trip test: ${support.reason ?? "unsupported environment"}`,
|
||||
);
|
||||
return;
|
||||
}
|
||||
|
||||
const rootDir = await mkdtemp(path.join(os.tmpdir(), "paperclip-ssh-fixture-"));
|
||||
cleanupDirs.push(rootDir);
|
||||
const statePath = path.join(rootDir, "state.json");
|
||||
const localRepo = path.join(rootDir, "local-workspace");
|
||||
|
||||
await mkdir(localRepo, { recursive: true });
|
||||
await git(localRepo, ["init", "-b", "main"]);
|
||||
await git(localRepo, ["config", "user.name", "Paperclip Test"]);
|
||||
await git(localRepo, ["config", "user.email", "test@paperclip.dev"]);
|
||||
await writeFile(path.join(localRepo, "tracked.txt"), "base\n", "utf8");
|
||||
await writeFile(path.join(localRepo, "._tracked.txt"), "should stay local only\n", "utf8");
|
||||
await git(localRepo, ["add", "tracked.txt"]);
|
||||
await git(localRepo, ["commit", "-m", "initial"]);
|
||||
const originalHead = await git(localRepo, ["rev-parse", "HEAD"]);
|
||||
await writeFile(path.join(localRepo, "tracked.txt"), "dirty local\n", "utf8");
|
||||
await writeFile(path.join(localRepo, "untracked.txt"), "from local\n", "utf8");
|
||||
|
||||
const started = await startSshEnvLabFixture({ statePath });
|
||||
const config = await buildSshEnvLabFixtureConfig(started);
|
||||
const spec = {
|
||||
...config,
|
||||
remoteCwd: started.workspaceDir,
|
||||
} as const;
|
||||
|
||||
await prepareWorkspaceForSshExecution({
|
||||
spec,
|
||||
localDir: localRepo,
|
||||
remoteDir: started.workspaceDir,
|
||||
});
|
||||
|
||||
const remoteStatus = await runSshCommand(
|
||||
config,
|
||||
`sh -lc 'cd ${JSON.stringify(started.workspaceDir)} && git status --short'`,
|
||||
);
|
||||
expect(remoteStatus.stdout).toContain("M tracked.txt");
|
||||
expect(remoteStatus.stdout).toContain("?? untracked.txt");
|
||||
expect(remoteStatus.stdout).not.toContain("._tracked.txt");
|
||||
|
||||
await runSshCommand(
|
||||
config,
|
||||
`sh -lc 'cd ${JSON.stringify(started.workspaceDir)} && git config user.name "Paperclip SSH" && git config user.email "ssh@paperclip.dev" && git add tracked.txt untracked.txt && git commit -m "remote update" >/dev/null && printf "remote dirty\\n" > tracked.txt && printf "remote extra\\n" > remote-only.txt'`,
|
||||
{ timeoutMs: 30_000, maxBuffer: 256 * 1024 },
|
||||
);
|
||||
|
||||
await restoreWorkspaceFromSshExecution({
|
||||
spec,
|
||||
localDir: localRepo,
|
||||
remoteDir: started.workspaceDir,
|
||||
});
|
||||
|
||||
const restoredHead = await git(localRepo, ["rev-parse", "HEAD"]);
|
||||
expect(restoredHead).not.toBe(originalHead);
|
||||
expect(await git(localRepo, ["log", "-1", "--pretty=%s"])).toBe("remote update");
|
||||
expect(await git(localRepo, ["status", "--short"])).toContain("M tracked.txt");
|
||||
expect(await git(localRepo, ["status", "--short"])).not.toContain("._tracked.txt");
|
||||
});
|
||||
});
|
||||
1233
packages/adapter-utils/src/ssh.ts
Normal file
1233
packages/adapter-utils/src/ssh.ts
Normal file
File diff suppressed because it is too large
Load Diff
@@ -2,6 +2,9 @@
|
||||
// Minimal adapter-facing interfaces (no drizzle dependency)
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
import type { SshRemoteExecutionSpec } from "./ssh.js";
|
||||
import type { AdapterExecutionTarget } from "./execution-target.js";
|
||||
|
||||
export interface AdapterAgent {
|
||||
id: string;
|
||||
companyId: string;
|
||||
@@ -61,12 +64,16 @@ export interface AdapterRuntimeServiceReport {
|
||||
healthStatus?: "unknown" | "healthy" | "unhealthy";
|
||||
}
|
||||
|
||||
export type AdapterExecutionErrorFamily = "transient_upstream";
|
||||
|
||||
export interface AdapterExecutionResult {
|
||||
exitCode: number | null;
|
||||
signal: string | null;
|
||||
timedOut: boolean;
|
||||
errorMessage?: string | null;
|
||||
errorCode?: string | null;
|
||||
errorFamily?: AdapterExecutionErrorFamily | null;
|
||||
retryNotBefore?: string | null;
|
||||
errorMeta?: Record<string, unknown>;
|
||||
usage?: UsageSummary;
|
||||
/**
|
||||
@@ -118,6 +125,14 @@ export interface AdapterExecutionContext {
|
||||
runtime: AdapterRuntime;
|
||||
config: Record<string, unknown>;
|
||||
context: Record<string, unknown>;
|
||||
executionTarget?: AdapterExecutionTarget | null;
|
||||
/**
|
||||
* Legacy remote transport view. Prefer `executionTarget`, which is the
|
||||
* provider-neutral contract produced by core runtime code.
|
||||
*/
|
||||
executionTransport?: {
|
||||
remoteExecution?: Record<string, unknown> | null;
|
||||
};
|
||||
onLog: (stream: "stdout" | "stderr", chunk: string) => Promise<void>;
|
||||
onMeta?: (meta: AdapterInvocationMeta) => Promise<void>;
|
||||
onSpawn?: (meta: { pid: number; processGroupId: number | null; startedAt: string }) => Promise<void>;
|
||||
@@ -300,6 +315,13 @@ export interface ServerAdapterModule {
|
||||
supportsLocalAgentJwt?: boolean;
|
||||
models?: AdapterModel[];
|
||||
listModels?: () => Promise<AdapterModel[]>;
|
||||
/**
|
||||
* Optional explicit refresh hook for model discovery.
|
||||
* Use this when the adapter caches discovered models and needs a bypass path
|
||||
* so the UI can fetch newly released models without waiting for cache expiry
|
||||
* or a Paperclip code update.
|
||||
*/
|
||||
refreshModels?: () => Promise<AdapterModel[]>;
|
||||
agentConfigurationDoc?: string;
|
||||
/**
|
||||
* Optional lifecycle hook when an agent is approved/hired (join-request or hire_agent approval).
|
||||
@@ -417,6 +439,7 @@ export interface CreateConfigValues {
|
||||
workspaceBranchTemplate?: string;
|
||||
worktreeParentDir?: string;
|
||||
runtimeServicesJson?: string;
|
||||
defaultEnvironmentId?: string;
|
||||
maxTurnsPerRun: number;
|
||||
heartbeatEnabled: boolean;
|
||||
intervalSec: number;
|
||||
|
||||
@@ -2,6 +2,7 @@ export const type = "claude_local";
|
||||
export const label = "Claude Code (local)";
|
||||
|
||||
export const models = [
|
||||
{ id: "claude-opus-4-7", label: "Claude Opus 4.7" },
|
||||
{ id: "claude-opus-4-6", label: "Claude Opus 4.6" },
|
||||
{ id: "claude-sonnet-4-6", label: "Claude Sonnet 4.6" },
|
||||
{ id: "claude-haiku-4-6", label: "Claude Haiku 4.6" },
|
||||
|
||||
262
packages/adapters/claude-local/src/server/execute.remote.test.ts
Normal file
262
packages/adapters/claude-local/src/server/execute.remote.test.ts
Normal file
@@ -0,0 +1,262 @@
|
||||
import { mkdir, mkdtemp, rm, writeFile } from "node:fs/promises";
|
||||
import os from "node:os";
|
||||
import path from "node:path";
|
||||
import { afterEach, describe, expect, it, vi } from "vitest";
|
||||
|
||||
const {
|
||||
runChildProcess,
|
||||
ensureCommandResolvable,
|
||||
resolveCommandForLogs,
|
||||
prepareWorkspaceForSshExecution,
|
||||
restoreWorkspaceFromSshExecution,
|
||||
syncDirectoryToSsh,
|
||||
} = vi.hoisted(() => ({
|
||||
runChildProcess: vi.fn(async () => ({
|
||||
exitCode: 0,
|
||||
signal: null,
|
||||
timedOut: false,
|
||||
stdout: [
|
||||
JSON.stringify({ type: "system", subtype: "init", session_id: "claude-session-1", model: "claude-sonnet" }),
|
||||
JSON.stringify({ type: "assistant", session_id: "claude-session-1", message: { content: [{ type: "text", text: "hello" }] } }),
|
||||
JSON.stringify({ type: "result", session_id: "claude-session-1", result: "hello", usage: { input_tokens: 1, cache_read_input_tokens: 0, output_tokens: 1 } }),
|
||||
].join("\n"),
|
||||
stderr: "",
|
||||
pid: 123,
|
||||
startedAt: new Date().toISOString(),
|
||||
})),
|
||||
ensureCommandResolvable: vi.fn(async () => undefined),
|
||||
resolveCommandForLogs: vi.fn(async () => "ssh://fixture@127.0.0.1:2222/remote/workspace :: claude"),
|
||||
prepareWorkspaceForSshExecution: vi.fn(async () => undefined),
|
||||
restoreWorkspaceFromSshExecution: vi.fn(async () => undefined),
|
||||
syncDirectoryToSsh: vi.fn(async () => undefined),
|
||||
}));
|
||||
|
||||
vi.mock("@paperclipai/adapter-utils/server-utils", async () => {
|
||||
const actual = await vi.importActual<typeof import("@paperclipai/adapter-utils/server-utils")>(
|
||||
"@paperclipai/adapter-utils/server-utils",
|
||||
);
|
||||
return {
|
||||
...actual,
|
||||
ensureCommandResolvable,
|
||||
resolveCommandForLogs,
|
||||
runChildProcess,
|
||||
};
|
||||
});
|
||||
|
||||
vi.mock("@paperclipai/adapter-utils/ssh", async () => {
|
||||
const actual = await vi.importActual<typeof import("@paperclipai/adapter-utils/ssh")>(
|
||||
"@paperclipai/adapter-utils/ssh",
|
||||
);
|
||||
return {
|
||||
...actual,
|
||||
prepareWorkspaceForSshExecution,
|
||||
restoreWorkspaceFromSshExecution,
|
||||
syncDirectoryToSsh,
|
||||
};
|
||||
});
|
||||
|
||||
import { execute } from "./execute.js";
|
||||
|
||||
describe("claude remote execution", () => {
|
||||
const cleanupDirs: string[] = [];
|
||||
|
||||
afterEach(async () => {
|
||||
vi.clearAllMocks();
|
||||
while (cleanupDirs.length > 0) {
|
||||
const dir = cleanupDirs.pop();
|
||||
if (!dir) continue;
|
||||
await rm(dir, { recursive: true, force: true }).catch(() => undefined);
|
||||
}
|
||||
});
|
||||
|
||||
it("prepares the workspace, syncs Claude runtime assets, and restores workspace changes for remote SSH execution", async () => {
|
||||
const rootDir = await mkdtemp(path.join(os.tmpdir(), "paperclip-claude-remote-"));
|
||||
cleanupDirs.push(rootDir);
|
||||
const workspaceDir = path.join(rootDir, "workspace");
|
||||
const instructionsPath = path.join(rootDir, "instructions.md");
|
||||
await mkdir(workspaceDir, { recursive: true });
|
||||
await writeFile(instructionsPath, "Use the remote workspace.\n", "utf8");
|
||||
|
||||
await execute({
|
||||
runId: "run-1",
|
||||
agent: {
|
||||
id: "agent-1",
|
||||
companyId: "company-1",
|
||||
name: "Claude Coder",
|
||||
adapterType: "claude_local",
|
||||
adapterConfig: {},
|
||||
},
|
||||
runtime: {
|
||||
sessionId: null,
|
||||
sessionParams: null,
|
||||
sessionDisplayId: null,
|
||||
taskKey: null,
|
||||
},
|
||||
config: {
|
||||
command: "claude",
|
||||
instructionsFilePath: instructionsPath,
|
||||
},
|
||||
context: {
|
||||
paperclipWorkspace: {
|
||||
cwd: workspaceDir,
|
||||
source: "project_primary",
|
||||
},
|
||||
},
|
||||
executionTransport: {
|
||||
remoteExecution: {
|
||||
host: "127.0.0.1",
|
||||
port: 2222,
|
||||
username: "fixture",
|
||||
remoteWorkspacePath: "/remote/workspace",
|
||||
remoteCwd: "/remote/workspace",
|
||||
privateKey: "PRIVATE KEY",
|
||||
knownHosts: "[127.0.0.1]:2222 ssh-ed25519 AAAA",
|
||||
strictHostKeyChecking: true,
|
||||
paperclipApiUrl: "http://198.51.100.10:3102",
|
||||
},
|
||||
},
|
||||
onLog: async () => {},
|
||||
});
|
||||
|
||||
expect(prepareWorkspaceForSshExecution).toHaveBeenCalledTimes(1);
|
||||
expect(prepareWorkspaceForSshExecution).toHaveBeenCalledWith(expect.objectContaining({
|
||||
localDir: workspaceDir,
|
||||
remoteDir: "/remote/workspace",
|
||||
}));
|
||||
expect(syncDirectoryToSsh).toHaveBeenCalledTimes(1);
|
||||
expect(syncDirectoryToSsh).toHaveBeenCalledWith(expect.objectContaining({
|
||||
remoteDir: "/remote/workspace/.paperclip-runtime/claude/skills",
|
||||
followSymlinks: true,
|
||||
}));
|
||||
expect(runChildProcess).toHaveBeenCalledTimes(1);
|
||||
const call = runChildProcess.mock.calls[0] as unknown as
|
||||
| [string, string, string[], { env: Record<string, string>; remoteExecution?: { remoteCwd: string } | null }]
|
||||
| undefined;
|
||||
expect(call?.[2]).toContain("--append-system-prompt-file");
|
||||
expect(call?.[2]).toContain("/remote/workspace/.paperclip-runtime/claude/skills/agent-instructions.md");
|
||||
expect(call?.[2]).toContain("--add-dir");
|
||||
expect(call?.[2]).toContain("/remote/workspace/.paperclip-runtime/claude/skills");
|
||||
expect(call?.[3].env.PAPERCLIP_API_URL).toBe("http://198.51.100.10:3102");
|
||||
expect(call?.[3].remoteExecution?.remoteCwd).toBe("/remote/workspace");
|
||||
expect(restoreWorkspaceFromSshExecution).toHaveBeenCalledTimes(1);
|
||||
expect(restoreWorkspaceFromSshExecution).toHaveBeenCalledWith(expect.objectContaining({
|
||||
localDir: workspaceDir,
|
||||
remoteDir: "/remote/workspace",
|
||||
}));
|
||||
});
|
||||
|
||||
it("does not resume saved Claude sessions for remote SSH execution without a matching remote identity", async () => {
|
||||
const rootDir = await mkdtemp(path.join(os.tmpdir(), "paperclip-claude-remote-resume-"));
|
||||
cleanupDirs.push(rootDir);
|
||||
const workspaceDir = path.join(rootDir, "workspace");
|
||||
await mkdir(workspaceDir, { recursive: true });
|
||||
|
||||
await execute({
|
||||
runId: "run-ssh-no-resume",
|
||||
agent: {
|
||||
id: "agent-1",
|
||||
companyId: "company-1",
|
||||
name: "Claude Coder",
|
||||
adapterType: "claude_local",
|
||||
adapterConfig: {},
|
||||
},
|
||||
runtime: {
|
||||
sessionId: "session-123",
|
||||
sessionParams: {
|
||||
sessionId: "session-123",
|
||||
cwd: "/remote/workspace",
|
||||
},
|
||||
sessionDisplayId: "session-123",
|
||||
taskKey: null,
|
||||
},
|
||||
config: {
|
||||
command: "claude",
|
||||
},
|
||||
context: {
|
||||
paperclipWorkspace: {
|
||||
cwd: workspaceDir,
|
||||
source: "project_primary",
|
||||
},
|
||||
},
|
||||
executionTransport: {
|
||||
remoteExecution: {
|
||||
host: "127.0.0.1",
|
||||
port: 2222,
|
||||
username: "fixture",
|
||||
remoteWorkspacePath: "/remote/workspace",
|
||||
remoteCwd: "/remote/workspace",
|
||||
privateKey: "PRIVATE KEY",
|
||||
knownHosts: "[127.0.0.1]:2222 ssh-ed25519 AAAA",
|
||||
strictHostKeyChecking: true,
|
||||
},
|
||||
},
|
||||
onLog: async () => {},
|
||||
});
|
||||
|
||||
expect(runChildProcess).toHaveBeenCalledTimes(1);
|
||||
const call = runChildProcess.mock.calls[0] as unknown as [string, string, string[]] | undefined;
|
||||
expect(call?.[2]).not.toContain("--resume");
|
||||
});
|
||||
|
||||
it("resumes saved Claude sessions for remote SSH execution when the remote identity matches", async () => {
|
||||
const rootDir = await mkdtemp(path.join(os.tmpdir(), "paperclip-claude-remote-resume-match-"));
|
||||
cleanupDirs.push(rootDir);
|
||||
const workspaceDir = path.join(rootDir, "workspace");
|
||||
await mkdir(workspaceDir, { recursive: true });
|
||||
|
||||
await execute({
|
||||
runId: "run-ssh-resume",
|
||||
agent: {
|
||||
id: "agent-1",
|
||||
companyId: "company-1",
|
||||
name: "Claude Coder",
|
||||
adapterType: "claude_local",
|
||||
adapterConfig: {},
|
||||
},
|
||||
runtime: {
|
||||
sessionId: "session-123",
|
||||
sessionParams: {
|
||||
sessionId: "session-123",
|
||||
cwd: "/remote/workspace",
|
||||
remoteExecution: {
|
||||
transport: "ssh",
|
||||
host: "127.0.0.1",
|
||||
port: 2222,
|
||||
username: "fixture",
|
||||
remoteCwd: "/remote/workspace",
|
||||
},
|
||||
},
|
||||
sessionDisplayId: "session-123",
|
||||
taskKey: null,
|
||||
},
|
||||
config: {
|
||||
command: "claude",
|
||||
},
|
||||
context: {
|
||||
paperclipWorkspace: {
|
||||
cwd: workspaceDir,
|
||||
source: "project_primary",
|
||||
},
|
||||
},
|
||||
executionTransport: {
|
||||
remoteExecution: {
|
||||
host: "127.0.0.1",
|
||||
port: 2222,
|
||||
username: "fixture",
|
||||
remoteWorkspacePath: "/remote/workspace",
|
||||
remoteCwd: "/remote/workspace",
|
||||
privateKey: "PRIVATE KEY",
|
||||
knownHosts: "[127.0.0.1]:2222 ssh-ed25519 AAAA",
|
||||
strictHostKeyChecking: true,
|
||||
},
|
||||
},
|
||||
onLog: async () => {},
|
||||
});
|
||||
|
||||
expect(runChildProcess).toHaveBeenCalledTimes(1);
|
||||
const call = runChildProcess.mock.calls[0] as unknown as [string, string, string[]] | undefined;
|
||||
expect(call?.[2]).toContain("--resume");
|
||||
expect(call?.[2]).toContain("session-123");
|
||||
});
|
||||
|
||||
});
|
||||
@@ -3,6 +3,20 @@ import path from "node:path";
|
||||
import { fileURLToPath } from "node:url";
|
||||
import type { AdapterExecutionContext, AdapterExecutionResult } from "@paperclipai/adapter-utils";
|
||||
import type { RunProcessResult } from "@paperclipai/adapter-utils/server-utils";
|
||||
import {
|
||||
adapterExecutionTargetIsRemote,
|
||||
adapterExecutionTargetPaperclipApiUrl,
|
||||
adapterExecutionTargetRemoteCwd,
|
||||
adapterExecutionTargetSessionIdentity,
|
||||
adapterExecutionTargetSessionMatches,
|
||||
adapterExecutionTargetUsesManagedHome,
|
||||
describeAdapterExecutionTarget,
|
||||
ensureAdapterExecutionTargetCommandResolvable,
|
||||
prepareAdapterExecutionTargetRuntime,
|
||||
readAdapterExecutionTarget,
|
||||
resolveAdapterExecutionTargetCommandForLogs,
|
||||
runAdapterExecutionTargetProcess,
|
||||
} from "@paperclipai/adapter-utils/execution-target";
|
||||
import {
|
||||
asString,
|
||||
asNumber,
|
||||
@@ -10,24 +24,25 @@ import {
|
||||
asStringArray,
|
||||
parseObject,
|
||||
parseJson,
|
||||
applyPaperclipWorkspaceEnv,
|
||||
buildPaperclipEnv,
|
||||
readPaperclipRuntimeSkillEntries,
|
||||
joinPromptSections,
|
||||
buildInvocationEnvForLogs,
|
||||
ensureAbsoluteDirectory,
|
||||
ensureCommandResolvable,
|
||||
ensurePathInEnv,
|
||||
resolveCommandForLogs,
|
||||
renderTemplate,
|
||||
renderPaperclipWakePrompt,
|
||||
stringifyPaperclipWakePayload,
|
||||
runChildProcess,
|
||||
DEFAULT_PAPERCLIP_AGENT_PROMPT_TEMPLATE,
|
||||
} from "@paperclipai/adapter-utils/server-utils";
|
||||
import {
|
||||
parseClaudeStreamJson,
|
||||
describeClaudeFailure,
|
||||
detectClaudeLoginRequired,
|
||||
extractClaudeRetryNotBefore,
|
||||
isClaudeMaxTurnsResult,
|
||||
isClaudeTransientUpstreamError,
|
||||
isClaudeUnknownSessionError,
|
||||
} from "./parse.js";
|
||||
import { resolveClaudeDesiredSkillNames } from "./skills.js";
|
||||
@@ -41,6 +56,7 @@ interface ClaudeExecutionInput {
|
||||
agent: AdapterExecutionContext["agent"];
|
||||
config: Record<string, unknown>;
|
||||
context: Record<string, unknown>;
|
||||
executionTarget?: ReturnType<typeof readAdapterExecutionTarget>;
|
||||
authToken?: string;
|
||||
}
|
||||
|
||||
@@ -91,7 +107,7 @@ function resolveClaudeBillingType(env: Record<string, string>): "api" | "subscri
|
||||
}
|
||||
|
||||
async function buildClaudeRuntimeConfig(input: ClaudeExecutionInput): Promise<ClaudeRuntimeConfig> {
|
||||
const { runId, agent, config, context, authToken } = input;
|
||||
const { runId, agent, config, context, executionTarget, authToken } = input;
|
||||
|
||||
const command = asString(config.command, "claude");
|
||||
const workspaceContext = parseObject(context.paperclipWorkspace);
|
||||
@@ -178,33 +194,17 @@ async function buildClaudeRuntimeConfig(input: ClaudeExecutionInput): Promise<Cl
|
||||
if (wakePayloadJson) {
|
||||
env.PAPERCLIP_WAKE_PAYLOAD_JSON = wakePayloadJson;
|
||||
}
|
||||
if (effectiveWorkspaceCwd) {
|
||||
env.PAPERCLIP_WORKSPACE_CWD = effectiveWorkspaceCwd;
|
||||
}
|
||||
if (workspaceSource) {
|
||||
env.PAPERCLIP_WORKSPACE_SOURCE = workspaceSource;
|
||||
}
|
||||
if (workspaceStrategy) {
|
||||
env.PAPERCLIP_WORKSPACE_STRATEGY = workspaceStrategy;
|
||||
}
|
||||
if (workspaceId) {
|
||||
env.PAPERCLIP_WORKSPACE_ID = workspaceId;
|
||||
}
|
||||
if (workspaceRepoUrl) {
|
||||
env.PAPERCLIP_WORKSPACE_REPO_URL = workspaceRepoUrl;
|
||||
}
|
||||
if (workspaceRepoRef) {
|
||||
env.PAPERCLIP_WORKSPACE_REPO_REF = workspaceRepoRef;
|
||||
}
|
||||
if (workspaceBranch) {
|
||||
env.PAPERCLIP_WORKSPACE_BRANCH = workspaceBranch;
|
||||
}
|
||||
if (workspaceWorktreePath) {
|
||||
env.PAPERCLIP_WORKSPACE_WORKTREE_PATH = workspaceWorktreePath;
|
||||
}
|
||||
if (agentHome) {
|
||||
env.AGENT_HOME = agentHome;
|
||||
}
|
||||
applyPaperclipWorkspaceEnv(env, {
|
||||
workspaceCwd: effectiveWorkspaceCwd,
|
||||
workspaceSource,
|
||||
workspaceStrategy,
|
||||
workspaceId,
|
||||
workspaceRepoUrl,
|
||||
workspaceRepoRef,
|
||||
workspaceBranch,
|
||||
workspaceWorktreePath,
|
||||
agentHome,
|
||||
});
|
||||
if (workspaceHints.length > 0) {
|
||||
env.PAPERCLIP_WORKSPACES_JSON = JSON.stringify(workspaceHints);
|
||||
}
|
||||
@@ -217,6 +217,10 @@ async function buildClaudeRuntimeConfig(input: ClaudeExecutionInput): Promise<Cl
|
||||
if (runtimePrimaryUrl) {
|
||||
env.PAPERCLIP_RUNTIME_PRIMARY_URL = runtimePrimaryUrl;
|
||||
}
|
||||
const targetPaperclipApiUrl = adapterExecutionTargetPaperclipApiUrl(executionTarget);
|
||||
if (targetPaperclipApiUrl) {
|
||||
env.PAPERCLIP_API_URL = targetPaperclipApiUrl;
|
||||
}
|
||||
|
||||
for (const [key, value] of Object.entries(envConfig)) {
|
||||
if (typeof value === "string") env[key] = value;
|
||||
@@ -227,8 +231,8 @@ async function buildClaudeRuntimeConfig(input: ClaudeExecutionInput): Promise<Cl
|
||||
}
|
||||
|
||||
const runtimeEnv = ensurePathInEnv({ ...process.env, ...env });
|
||||
await ensureCommandResolvable(command, cwd, runtimeEnv);
|
||||
const resolvedCommand = await resolveCommandForLogs(command, cwd, runtimeEnv);
|
||||
await ensureAdapterExecutionTargetCommandResolvable(command, executionTarget, cwd, runtimeEnv);
|
||||
const resolvedCommand = await resolveAdapterExecutionTargetCommandForLogs(command, executionTarget, cwd, runtimeEnv);
|
||||
const loggedEnv = buildInvocationEnvForLogs(env, {
|
||||
runtimeEnv,
|
||||
includeRuntimeKeys: ["HOME", "CLAUDE_CONFIG_DIR"],
|
||||
@@ -275,7 +279,7 @@ export async function runClaudeLogin(input: {
|
||||
authToken: input.authToken,
|
||||
});
|
||||
|
||||
const proc = await runChildProcess(input.runId, runtime.command, ["login"], {
|
||||
const proc = await runAdapterExecutionTargetProcess(input.runId, null, runtime.command, ["login"], {
|
||||
cwd: runtime.cwd,
|
||||
env: runtime.env,
|
||||
timeoutSec: runtime.timeoutSec,
|
||||
@@ -297,10 +301,15 @@ export async function runClaudeLogin(input: {
|
||||
|
||||
export async function execute(ctx: AdapterExecutionContext): Promise<AdapterExecutionResult> {
|
||||
const { runId, agent, runtime, config, context, onLog, onMeta, onSpawn, authToken } = ctx;
|
||||
const executionTarget = readAdapterExecutionTarget({
|
||||
executionTarget: ctx.executionTarget,
|
||||
legacyRemoteExecution: ctx.executionTransport?.remoteExecution,
|
||||
});
|
||||
const executionTargetIsRemote = adapterExecutionTargetIsRemote(executionTarget);
|
||||
|
||||
const promptTemplate = asString(
|
||||
config.promptTemplate,
|
||||
"You are agent {{agent.id}} ({{agent.name}}). Continue your Paperclip work.",
|
||||
DEFAULT_PAPERCLIP_AGENT_PROMPT_TEMPLATE,
|
||||
);
|
||||
const model = asString(config.model, "");
|
||||
const effort = asString(config.effort, "");
|
||||
@@ -314,6 +323,7 @@ export async function execute(ctx: AdapterExecutionContext): Promise<AdapterExec
|
||||
agent,
|
||||
config,
|
||||
context,
|
||||
executionTarget,
|
||||
authToken,
|
||||
});
|
||||
const {
|
||||
@@ -329,6 +339,11 @@ export async function execute(ctx: AdapterExecutionContext): Promise<AdapterExec
|
||||
graceSec,
|
||||
extraArgs,
|
||||
} = runtimeConfig;
|
||||
const effectiveExecutionCwd = adapterExecutionTargetRemoteCwd(executionTarget, cwd);
|
||||
const terminalResultCleanupGraceMs = Math.max(
|
||||
0,
|
||||
asNumber(config.terminalResultCleanupGraceMs, 5_000),
|
||||
);
|
||||
const effectiveEnv = Object.fromEntries(
|
||||
Object.entries({ ...process.env, ...env }).filter(
|
||||
(entry): entry is [string, string] => typeof entry[1] === "string",
|
||||
@@ -364,27 +379,74 @@ export async function execute(ctx: AdapterExecutionContext): Promise<AdapterExec
|
||||
instructionsContents: combinedInstructionsContents,
|
||||
onLog,
|
||||
});
|
||||
const effectiveInstructionsFilePath = promptBundle.instructionsFilePath ?? undefined;
|
||||
const preparedExecutionTargetRuntime = executionTargetIsRemote
|
||||
? await (async () => {
|
||||
await onLog(
|
||||
"stdout",
|
||||
`[paperclip] Syncing workspace and Claude runtime assets to ${describeAdapterExecutionTarget(executionTarget)}.\n`,
|
||||
);
|
||||
return await prepareAdapterExecutionTargetRuntime({
|
||||
target: executionTarget,
|
||||
adapterKey: "claude",
|
||||
workspaceLocalDir: cwd,
|
||||
assets: [
|
||||
{
|
||||
key: "skills",
|
||||
localDir: promptBundle.addDir,
|
||||
followSymlinks: true,
|
||||
},
|
||||
],
|
||||
});
|
||||
})()
|
||||
: null;
|
||||
const restoreRemoteWorkspace = preparedExecutionTargetRuntime
|
||||
? () => preparedExecutionTargetRuntime.restoreWorkspace()
|
||||
: null;
|
||||
const effectivePromptBundleAddDir = executionTargetIsRemote
|
||||
? preparedExecutionTargetRuntime?.assetDirs.skills ??
|
||||
path.posix.join(effectiveExecutionCwd, ".paperclip-runtime", "claude", "skills")
|
||||
: promptBundle.addDir;
|
||||
const effectiveInstructionsFilePath = promptBundle.instructionsFilePath
|
||||
? executionTargetIsRemote
|
||||
? path.posix.join(effectivePromptBundleAddDir, path.basename(promptBundle.instructionsFilePath))
|
||||
: promptBundle.instructionsFilePath
|
||||
: undefined;
|
||||
|
||||
const runtimeSessionParams = parseObject(runtime.sessionParams);
|
||||
const runtimeSessionId = asString(runtimeSessionParams.sessionId, runtime.sessionId ?? "");
|
||||
const runtimeSessionCwd = asString(runtimeSessionParams.cwd, "");
|
||||
const runtimeRemoteExecution = parseObject(runtimeSessionParams.remoteExecution);
|
||||
const runtimePromptBundleKey = asString(runtimeSessionParams.promptBundleKey, "");
|
||||
const hasMatchingPromptBundle =
|
||||
runtimePromptBundleKey.length === 0 || runtimePromptBundleKey === promptBundle.bundleKey;
|
||||
const canResumeSession =
|
||||
runtimeSessionId.length > 0 &&
|
||||
hasMatchingPromptBundle &&
|
||||
(runtimeSessionCwd.length === 0 || path.resolve(runtimeSessionCwd) === path.resolve(cwd));
|
||||
(runtimeSessionCwd.length === 0 || path.resolve(runtimeSessionCwd) === path.resolve(effectiveExecutionCwd)) &&
|
||||
adapterExecutionTargetSessionMatches(runtimeRemoteExecution, executionTarget);
|
||||
const sessionId = canResumeSession ? runtimeSessionId : null;
|
||||
if (
|
||||
executionTargetIsRemote &&
|
||||
runtimeSessionId &&
|
||||
runtimeSessionCwd.length > 0 &&
|
||||
path.resolve(runtimeSessionCwd) !== path.resolve(cwd)
|
||||
!canResumeSession
|
||||
) {
|
||||
await onLog(
|
||||
"stdout",
|
||||
`[paperclip] Claude session "${runtimeSessionId}" was saved for cwd "${runtimeSessionCwd}" and will not be resumed in "${cwd}".\n`,
|
||||
`[paperclip] Claude session "${runtimeSessionId}" does not match the current remote execution identity and will not be resumed in "${effectiveExecutionCwd}". Starting a fresh remote session.\n`,
|
||||
);
|
||||
} else if (
|
||||
runtimeSessionId &&
|
||||
runtimeSessionCwd.length > 0 &&
|
||||
path.resolve(runtimeSessionCwd) !== path.resolve(effectiveExecutionCwd)
|
||||
) {
|
||||
await onLog(
|
||||
"stdout",
|
||||
`[paperclip] Claude session "${runtimeSessionId}" does not match the current remote execution identity and will not be resumed in "${effectiveExecutionCwd}". Starting a fresh remote session.\n`,
|
||||
);
|
||||
} else if (runtimeSessionId && !canResumeSession) {
|
||||
await onLog(
|
||||
"stdout",
|
||||
`[paperclip] Claude session "${runtimeSessionId}" was saved for cwd "${runtimeSessionCwd}" and will not be resumed in "${effectiveExecutionCwd}".\n`,
|
||||
);
|
||||
}
|
||||
if (runtimeSessionId && runtimePromptBundleKey.length > 0 && runtimePromptBundleKey !== promptBundle.bundleKey) {
|
||||
@@ -411,10 +473,12 @@ export async function execute(ctx: AdapterExecutionContext): Promise<AdapterExec
|
||||
const shouldUseResumeDeltaPrompt = Boolean(sessionId) && wakePrompt.length > 0;
|
||||
const renderedPrompt = shouldUseResumeDeltaPrompt ? "" : renderTemplate(promptTemplate, templateData);
|
||||
const sessionHandoffNote = asString(context.paperclipSessionHandoffMarkdown, "").trim();
|
||||
const taskContextNote = asString(context.paperclipTaskMarkdown, "").trim();
|
||||
const prompt = joinPromptSections([
|
||||
renderedBootstrapPrompt,
|
||||
wakePrompt,
|
||||
sessionHandoffNote,
|
||||
taskContextNote,
|
||||
renderedPrompt,
|
||||
]);
|
||||
const promptMetrics = {
|
||||
@@ -422,6 +486,7 @@ export async function execute(ctx: AdapterExecutionContext): Promise<AdapterExec
|
||||
bootstrapPromptChars: renderedBootstrapPrompt.length,
|
||||
wakePromptChars: wakePrompt.length,
|
||||
sessionHandoffChars: sessionHandoffNote.length,
|
||||
taskContextChars: taskContextNote.length,
|
||||
heartbeatPromptChars: renderedPrompt.length,
|
||||
};
|
||||
|
||||
@@ -447,7 +512,7 @@ export async function execute(ctx: AdapterExecutionContext): Promise<AdapterExec
|
||||
if (attemptInstructionsFilePath && !resumeSessionId) {
|
||||
args.push("--append-system-prompt-file", attemptInstructionsFilePath);
|
||||
}
|
||||
args.push("--add-dir", promptBundle.addDir);
|
||||
args.push("--add-dir", effectivePromptBundleAddDir);
|
||||
if (extraArgs.length > 0) args.push(...extraArgs);
|
||||
return args;
|
||||
};
|
||||
@@ -484,7 +549,7 @@ export async function execute(ctx: AdapterExecutionContext): Promise<AdapterExec
|
||||
await onMeta({
|
||||
adapterType: "claude_local",
|
||||
command: resolvedCommand,
|
||||
cwd,
|
||||
cwd: effectiveExecutionCwd,
|
||||
commandArgs: args,
|
||||
commandNotes,
|
||||
env: loggedEnv,
|
||||
@@ -494,7 +559,7 @@ export async function execute(ctx: AdapterExecutionContext): Promise<AdapterExec
|
||||
});
|
||||
}
|
||||
|
||||
const proc = await runChildProcess(runId, command, args, {
|
||||
const proc = await runAdapterExecutionTargetProcess(runId, executionTarget, command, args, {
|
||||
cwd,
|
||||
env,
|
||||
stdin: prompt,
|
||||
@@ -502,6 +567,10 @@ export async function execute(ctx: AdapterExecutionContext): Promise<AdapterExec
|
||||
graceSec,
|
||||
onSpawn,
|
||||
onLog,
|
||||
terminalResultCleanup: {
|
||||
graceMs: terminalResultCleanupGraceMs,
|
||||
hasTerminalResult: ({ stdout }) => parseClaudeStreamJson(stdout).resultJson !== null,
|
||||
},
|
||||
});
|
||||
|
||||
const parsedStream = parseClaudeStreamJson(proc.stdout);
|
||||
@@ -543,16 +612,48 @@ export async function execute(ctx: AdapterExecutionContext): Promise<AdapterExec
|
||||
}
|
||||
|
||||
if (!parsed) {
|
||||
const fallbackErrorMessage = parseFallbackErrorMessage(proc);
|
||||
const transientUpstream =
|
||||
!loginMeta.requiresLogin &&
|
||||
(proc.exitCode ?? 0) !== 0 &&
|
||||
isClaudeTransientUpstreamError({
|
||||
parsed: null,
|
||||
stdout: proc.stdout,
|
||||
stderr: proc.stderr,
|
||||
errorMessage: fallbackErrorMessage,
|
||||
});
|
||||
const transientRetryNotBefore = transientUpstream
|
||||
? extractClaudeRetryNotBefore({
|
||||
parsed: null,
|
||||
stdout: proc.stdout,
|
||||
stderr: proc.stderr,
|
||||
errorMessage: fallbackErrorMessage,
|
||||
})
|
||||
: null;
|
||||
const errorCode = loginMeta.requiresLogin
|
||||
? "claude_auth_required"
|
||||
: transientUpstream
|
||||
? "claude_transient_upstream"
|
||||
: null;
|
||||
return {
|
||||
exitCode: proc.exitCode,
|
||||
signal: proc.signal,
|
||||
timedOut: false,
|
||||
errorMessage: parseFallbackErrorMessage(proc),
|
||||
errorCode: loginMeta.requiresLogin ? "claude_auth_required" : null,
|
||||
errorMessage: fallbackErrorMessage,
|
||||
errorCode,
|
||||
errorFamily: transientUpstream ? "transient_upstream" : null,
|
||||
retryNotBefore: transientRetryNotBefore ? transientRetryNotBefore.toISOString() : null,
|
||||
errorMeta,
|
||||
resultJson: {
|
||||
stdout: proc.stdout,
|
||||
stderr: proc.stderr,
|
||||
...(transientUpstream ? { errorFamily: "transient_upstream" } : {}),
|
||||
...(transientRetryNotBefore
|
||||
? { retryNotBefore: transientRetryNotBefore.toISOString() }
|
||||
: {}),
|
||||
...(transientRetryNotBefore
|
||||
? { transientRetryNotBefore: transientRetryNotBefore.toISOString() }
|
||||
: {}),
|
||||
},
|
||||
clearSession: Boolean(opts.clearSessionOnMissingSession),
|
||||
};
|
||||
@@ -575,24 +676,61 @@ export async function execute(ctx: AdapterExecutionContext): Promise<AdapterExec
|
||||
const resolvedSessionParams = resolvedSessionId
|
||||
? ({
|
||||
sessionId: resolvedSessionId,
|
||||
cwd,
|
||||
cwd: effectiveExecutionCwd,
|
||||
promptBundleKey: promptBundle.bundleKey,
|
||||
...(executionTargetIsRemote
|
||||
? {
|
||||
remoteExecution: adapterExecutionTargetSessionIdentity(executionTarget),
|
||||
}
|
||||
: {}),
|
||||
...(workspaceId ? { workspaceId } : {}),
|
||||
...(workspaceRepoUrl ? { repoUrl: workspaceRepoUrl } : {}),
|
||||
...(workspaceRepoRef ? { repoRef: workspaceRepoRef } : {}),
|
||||
} as Record<string, unknown>)
|
||||
: null;
|
||||
const clearSessionForMaxTurns = isClaudeMaxTurnsResult(parsed);
|
||||
const parsedIsError = asBoolean(parsed.is_error, false);
|
||||
const failed = (proc.exitCode ?? 0) !== 0 || parsedIsError;
|
||||
const errorMessage = failed
|
||||
? describeClaudeFailure(parsed) ?? `Claude exited with code ${proc.exitCode ?? -1}`
|
||||
: null;
|
||||
const transientUpstream =
|
||||
failed &&
|
||||
!loginMeta.requiresLogin &&
|
||||
isClaudeTransientUpstreamError({
|
||||
parsed,
|
||||
stdout: proc.stdout,
|
||||
stderr: proc.stderr,
|
||||
errorMessage,
|
||||
});
|
||||
const transientRetryNotBefore = transientUpstream
|
||||
? extractClaudeRetryNotBefore({
|
||||
parsed,
|
||||
stdout: proc.stdout,
|
||||
stderr: proc.stderr,
|
||||
errorMessage,
|
||||
})
|
||||
: null;
|
||||
const resolvedErrorCode = loginMeta.requiresLogin
|
||||
? "claude_auth_required"
|
||||
: transientUpstream
|
||||
? "claude_transient_upstream"
|
||||
: null;
|
||||
const mergedResultJson: Record<string, unknown> = {
|
||||
...parsed,
|
||||
...(transientUpstream ? { errorFamily: "transient_upstream" } : {}),
|
||||
...(transientRetryNotBefore ? { retryNotBefore: transientRetryNotBefore.toISOString() } : {}),
|
||||
...(transientRetryNotBefore ? { transientRetryNotBefore: transientRetryNotBefore.toISOString() } : {}),
|
||||
};
|
||||
|
||||
return {
|
||||
exitCode: proc.exitCode,
|
||||
signal: proc.signal,
|
||||
timedOut: false,
|
||||
errorMessage:
|
||||
(proc.exitCode ?? 0) === 0
|
||||
? null
|
||||
: describeClaudeFailure(parsed) ?? `Claude exited with code ${proc.exitCode ?? -1}`,
|
||||
errorCode: loginMeta.requiresLogin ? "claude_auth_required" : null,
|
||||
errorMessage,
|
||||
errorCode: resolvedErrorCode,
|
||||
errorFamily: transientUpstream ? "transient_upstream" : null,
|
||||
retryNotBefore: transientRetryNotBefore ? transientRetryNotBefore.toISOString() : null,
|
||||
errorMeta,
|
||||
usage,
|
||||
sessionId: resolvedSessionId,
|
||||
@@ -603,27 +741,37 @@ export async function execute(ctx: AdapterExecutionContext): Promise<AdapterExec
|
||||
model: parsedStream.model || asString(parsed.model, model),
|
||||
billingType,
|
||||
costUsd: parsedStream.costUsd ?? asNumber(parsed.total_cost_usd, 0),
|
||||
resultJson: parsed,
|
||||
resultJson: mergedResultJson,
|
||||
summary: parsedStream.summary || asString(parsed.result, ""),
|
||||
clearSession: clearSessionForMaxTurns || Boolean(opts.clearSessionOnMissingSession && !resolvedSessionId),
|
||||
};
|
||||
};
|
||||
|
||||
const initial = await runAttempt(sessionId ?? null);
|
||||
if (
|
||||
sessionId &&
|
||||
!initial.proc.timedOut &&
|
||||
(initial.proc.exitCode ?? 0) !== 0 &&
|
||||
initial.parsed &&
|
||||
isClaudeUnknownSessionError(initial.parsed)
|
||||
) {
|
||||
await onLog(
|
||||
"stdout",
|
||||
`[paperclip] Claude resume session "${sessionId}" is unavailable; retrying with a fresh session.\n`,
|
||||
);
|
||||
const retry = await runAttempt(null);
|
||||
return toAdapterResult(retry, { fallbackSessionId: null, clearSessionOnMissingSession: true });
|
||||
}
|
||||
try {
|
||||
const initial = await runAttempt(sessionId ?? null);
|
||||
if (
|
||||
sessionId &&
|
||||
!initial.proc.timedOut &&
|
||||
(initial.proc.exitCode ?? 0) !== 0 &&
|
||||
initial.parsed &&
|
||||
isClaudeUnknownSessionError(initial.parsed)
|
||||
) {
|
||||
await onLog(
|
||||
"stdout",
|
||||
`[paperclip] Claude resume session "${sessionId}" is unavailable; retrying with a fresh session.\n`,
|
||||
);
|
||||
const retry = await runAttempt(null);
|
||||
return toAdapterResult(retry, { fallbackSessionId: null, clearSessionOnMissingSession: true });
|
||||
}
|
||||
|
||||
return toAdapterResult(initial, { fallbackSessionId: runtimeSessionId || runtime.sessionId });
|
||||
return toAdapterResult(initial, { fallbackSessionId: runtimeSessionId || runtime.sessionId });
|
||||
} finally {
|
||||
if (restoreRemoteWorkspace) {
|
||||
await onLog(
|
||||
"stdout",
|
||||
`[paperclip] Restoring workspace changes from ${describeAdapterExecutionTarget(executionTarget)}.\n`,
|
||||
);
|
||||
await restoreRemoteWorkspace();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
123
packages/adapters/claude-local/src/server/parse.test.ts
Normal file
123
packages/adapters/claude-local/src/server/parse.test.ts
Normal file
@@ -0,0 +1,123 @@
|
||||
import { describe, expect, it } from "vitest";
|
||||
import {
|
||||
extractClaudeRetryNotBefore,
|
||||
isClaudeTransientUpstreamError,
|
||||
} from "./parse.js";
|
||||
|
||||
describe("isClaudeTransientUpstreamError", () => {
|
||||
it("classifies the 'out of extra usage' subscription window failure as transient", () => {
|
||||
expect(
|
||||
isClaudeTransientUpstreamError({
|
||||
errorMessage: "You're out of extra usage · resets 4pm (America/Chicago)",
|
||||
}),
|
||||
).toBe(true);
|
||||
expect(
|
||||
isClaudeTransientUpstreamError({
|
||||
parsed: {
|
||||
is_error: true,
|
||||
result: "You're out of extra usage. Resets at 4pm (America/Chicago).",
|
||||
},
|
||||
}),
|
||||
).toBe(true);
|
||||
});
|
||||
|
||||
it("classifies Anthropic API rate_limit_error and overloaded_error as transient", () => {
|
||||
expect(
|
||||
isClaudeTransientUpstreamError({
|
||||
parsed: {
|
||||
is_error: true,
|
||||
errors: [{ type: "rate_limit_error", message: "Rate limit reached for requests." }],
|
||||
},
|
||||
}),
|
||||
).toBe(true);
|
||||
expect(
|
||||
isClaudeTransientUpstreamError({
|
||||
parsed: {
|
||||
is_error: true,
|
||||
errors: [{ type: "overloaded_error", message: "Overloaded" }],
|
||||
},
|
||||
}),
|
||||
).toBe(true);
|
||||
expect(
|
||||
isClaudeTransientUpstreamError({
|
||||
stderr: "HTTP 429: Too Many Requests",
|
||||
}),
|
||||
).toBe(true);
|
||||
expect(
|
||||
isClaudeTransientUpstreamError({
|
||||
stderr: "Bedrock ThrottlingException: slow down",
|
||||
}),
|
||||
).toBe(true);
|
||||
});
|
||||
|
||||
it("classifies the subscription 5-hour / weekly limit wording", () => {
|
||||
expect(
|
||||
isClaudeTransientUpstreamError({
|
||||
errorMessage: "Claude usage limit reached — weekly limit reached. Try again in 2 days.",
|
||||
}),
|
||||
).toBe(true);
|
||||
expect(
|
||||
isClaudeTransientUpstreamError({
|
||||
errorMessage: "5-hour limit reached.",
|
||||
}),
|
||||
).toBe(true);
|
||||
});
|
||||
|
||||
it("does not classify login/auth failures as transient", () => {
|
||||
expect(
|
||||
isClaudeTransientUpstreamError({
|
||||
stderr: "Please log in. Run `claude login` first.",
|
||||
}),
|
||||
).toBe(false);
|
||||
});
|
||||
|
||||
it("does not classify max-turns or unknown-session as transient", () => {
|
||||
expect(
|
||||
isClaudeTransientUpstreamError({
|
||||
parsed: { subtype: "error_max_turns", result: "Maximum turns reached." },
|
||||
}),
|
||||
).toBe(false);
|
||||
expect(
|
||||
isClaudeTransientUpstreamError({
|
||||
parsed: {
|
||||
result: "No conversation found with session id abc-123",
|
||||
errors: [{ message: "No conversation found with session id abc-123" }],
|
||||
},
|
||||
}),
|
||||
).toBe(false);
|
||||
});
|
||||
|
||||
it("does not classify deterministic validation errors as transient", () => {
|
||||
expect(
|
||||
isClaudeTransientUpstreamError({
|
||||
errorMessage: "Invalid request_error: Unknown parameter 'foo'.",
|
||||
}),
|
||||
).toBe(false);
|
||||
});
|
||||
});
|
||||
|
||||
describe("extractClaudeRetryNotBefore", () => {
|
||||
it("parses the 'resets 4pm' hint in its explicit timezone", () => {
|
||||
const now = new Date("2026-04-22T15:15:00.000Z");
|
||||
const extracted = extractClaudeRetryNotBefore(
|
||||
{ errorMessage: "You're out of extra usage · resets 4pm (America/Chicago)" },
|
||||
now,
|
||||
);
|
||||
expect(extracted?.toISOString()).toBe("2026-04-22T21:00:00.000Z");
|
||||
});
|
||||
|
||||
it("rolls forward past midnight when the reset time has already passed today", () => {
|
||||
const now = new Date("2026-04-22T23:30:00.000Z");
|
||||
const extracted = extractClaudeRetryNotBefore(
|
||||
{ errorMessage: "Usage limit reached. Resets at 3:15 AM (UTC)." },
|
||||
now,
|
||||
);
|
||||
expect(extracted?.toISOString()).toBe("2026-04-23T03:15:00.000Z");
|
||||
});
|
||||
|
||||
it("returns null when no reset hint is present", () => {
|
||||
expect(
|
||||
extractClaudeRetryNotBefore({ errorMessage: "Overloaded. Try again later." }, new Date()),
|
||||
).toBeNull();
|
||||
});
|
||||
});
|
||||
@@ -1,9 +1,19 @@
|
||||
import type { UsageSummary } from "@paperclipai/adapter-utils";
|
||||
import { asString, asNumber, parseObject, parseJson } from "@paperclipai/adapter-utils/server-utils";
|
||||
import {
|
||||
asString,
|
||||
asNumber,
|
||||
parseObject,
|
||||
parseJson,
|
||||
} from "@paperclipai/adapter-utils/server-utils";
|
||||
|
||||
const CLAUDE_AUTH_REQUIRED_RE = /(?:not\s+logged\s+in|please\s+log\s+in|please\s+run\s+`?claude\s+login`?|login\s+required|requires\s+login|unauthorized|authentication\s+required)/i;
|
||||
const URL_RE = /(https?:\/\/[^\s'"`<>()[\]{};,!?]+[^\s'"`<>()[\]{};,!.?:]+)/gi;
|
||||
|
||||
const CLAUDE_TRANSIENT_UPSTREAM_RE =
|
||||
/(?:rate[-\s]?limit(?:ed)?|rate_limit_error|too\s+many\s+requests|\b429\b|overloaded(?:_error)?|server\s+overloaded|service\s+unavailable|\b503\b|\b529\b|high\s+demand|try\s+again\s+later|temporarily\s+unavailable|throttl(?:ed|ing)|throttlingexception|servicequotaexceededexception|out\s+of\s+extra\s+usage|extra\s+usage\b|claude\s+usage\s+limit\s+reached|5[-\s]?hour\s+limit\s+reached|weekly\s+limit\s+reached|usage\s+limit\s+reached|usage\s+cap\s+reached)/i;
|
||||
const CLAUDE_EXTRA_USAGE_RESET_RE =
|
||||
/(?:out\s+of\s+extra\s+usage|extra\s+usage|usage\s+limit\s+reached|usage\s+cap\s+reached|5[-\s]?hour\s+limit\s+reached|weekly\s+limit\s+reached|claude\s+usage\s+limit\s+reached)[\s\S]{0,80}?\bresets?\s+(?:at\s+)?([^\n()]+?)(?:\s*\(([^)]+)\))?(?:[.!]|\n|$)/i;
|
||||
|
||||
export function parseClaudeStreamJson(stdout: string) {
|
||||
let sessionId: string | null = null;
|
||||
let model = "";
|
||||
@@ -177,3 +187,197 @@ export function isClaudeUnknownSessionError(parsed: Record<string, unknown>): bo
|
||||
/no conversation found with session id|unknown session|session .* not found/i.test(msg),
|
||||
);
|
||||
}
|
||||
|
||||
function buildClaudeTransientHaystack(input: {
|
||||
parsed?: Record<string, unknown> | null;
|
||||
stdout?: string | null;
|
||||
stderr?: string | null;
|
||||
errorMessage?: string | null;
|
||||
}): string {
|
||||
const parsed = input.parsed ?? null;
|
||||
const resultText = parsed ? asString(parsed.result, "") : "";
|
||||
const parsedErrors = parsed ? extractClaudeErrorMessages(parsed) : [];
|
||||
return [
|
||||
input.errorMessage ?? "",
|
||||
resultText,
|
||||
...parsedErrors,
|
||||
input.stdout ?? "",
|
||||
input.stderr ?? "",
|
||||
]
|
||||
.join("\n")
|
||||
.split(/\r?\n/)
|
||||
.map((line) => line.trim())
|
||||
.filter(Boolean)
|
||||
.join("\n");
|
||||
}
|
||||
|
||||
function readTimeZoneParts(date: Date, timeZone: string) {
|
||||
const values = new Map(
|
||||
new Intl.DateTimeFormat("en-US", {
|
||||
timeZone,
|
||||
hourCycle: "h23",
|
||||
year: "numeric",
|
||||
month: "2-digit",
|
||||
day: "2-digit",
|
||||
hour: "2-digit",
|
||||
minute: "2-digit",
|
||||
}).formatToParts(date).map((part) => [part.type, part.value]),
|
||||
);
|
||||
return {
|
||||
year: Number.parseInt(values.get("year") ?? "", 10),
|
||||
month: Number.parseInt(values.get("month") ?? "", 10),
|
||||
day: Number.parseInt(values.get("day") ?? "", 10),
|
||||
hour: Number.parseInt(values.get("hour") ?? "", 10),
|
||||
minute: Number.parseInt(values.get("minute") ?? "", 10),
|
||||
};
|
||||
}
|
||||
|
||||
function normalizeResetTimeZone(timeZoneHint: string | null | undefined): string | null {
|
||||
const normalized = timeZoneHint?.trim();
|
||||
if (!normalized) return null;
|
||||
if (/^(?:utc|gmt)$/i.test(normalized)) return "UTC";
|
||||
|
||||
try {
|
||||
new Intl.DateTimeFormat("en-US", { timeZone: normalized }).format(new Date(0));
|
||||
return normalized;
|
||||
} catch {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
function dateFromTimeZoneWallClock(input: {
|
||||
year: number;
|
||||
month: number;
|
||||
day: number;
|
||||
hour: number;
|
||||
minute: number;
|
||||
timeZone: string;
|
||||
}): Date | null {
|
||||
let candidate = new Date(Date.UTC(input.year, input.month - 1, input.day, input.hour, input.minute, 0, 0));
|
||||
const targetUtc = Date.UTC(input.year, input.month - 1, input.day, input.hour, input.minute, 0, 0);
|
||||
|
||||
for (let attempt = 0; attempt < 4; attempt += 1) {
|
||||
const actual = readTimeZoneParts(candidate, input.timeZone);
|
||||
const actualUtc = Date.UTC(actual.year, actual.month - 1, actual.day, actual.hour, actual.minute, 0, 0);
|
||||
const offsetMs = targetUtc - actualUtc;
|
||||
if (offsetMs === 0) break;
|
||||
candidate = new Date(candidate.getTime() + offsetMs);
|
||||
}
|
||||
|
||||
const verified = readTimeZoneParts(candidate, input.timeZone);
|
||||
if (
|
||||
verified.year !== input.year ||
|
||||
verified.month !== input.month ||
|
||||
verified.day !== input.day ||
|
||||
verified.hour !== input.hour ||
|
||||
verified.minute !== input.minute
|
||||
) {
|
||||
return null;
|
||||
}
|
||||
|
||||
return candidate;
|
||||
}
|
||||
|
||||
function nextClockTimeInTimeZone(input: {
|
||||
now: Date;
|
||||
hour: number;
|
||||
minute: number;
|
||||
timeZoneHint: string;
|
||||
}): Date | null {
|
||||
const timeZone = normalizeResetTimeZone(input.timeZoneHint);
|
||||
if (!timeZone) return null;
|
||||
|
||||
const nowParts = readTimeZoneParts(input.now, timeZone);
|
||||
let retryAt = dateFromTimeZoneWallClock({
|
||||
year: nowParts.year,
|
||||
month: nowParts.month,
|
||||
day: nowParts.day,
|
||||
hour: input.hour,
|
||||
minute: input.minute,
|
||||
timeZone,
|
||||
});
|
||||
if (!retryAt) return null;
|
||||
|
||||
if (retryAt.getTime() <= input.now.getTime()) {
|
||||
const nextDay = new Date(Date.UTC(nowParts.year, nowParts.month - 1, nowParts.day + 1, 0, 0, 0, 0));
|
||||
retryAt = dateFromTimeZoneWallClock({
|
||||
year: nextDay.getUTCFullYear(),
|
||||
month: nextDay.getUTCMonth() + 1,
|
||||
day: nextDay.getUTCDate(),
|
||||
hour: input.hour,
|
||||
minute: input.minute,
|
||||
timeZone,
|
||||
});
|
||||
}
|
||||
|
||||
return retryAt;
|
||||
}
|
||||
|
||||
function parseClaudeResetClockTime(clockText: string, now: Date, timeZoneHint?: string | null): Date | null {
|
||||
const normalized = clockText.trim().replace(/\s+/g, " ");
|
||||
const match = normalized.match(/^(\d{1,2})(?::(\d{2}))?\s*([ap])\.?\s*m\.?/i);
|
||||
if (!match) return null;
|
||||
|
||||
const hour12 = Number.parseInt(match[1] ?? "", 10);
|
||||
const minute = Number.parseInt(match[2] ?? "0", 10);
|
||||
if (!Number.isInteger(hour12) || hour12 < 1 || hour12 > 12) return null;
|
||||
if (!Number.isInteger(minute) || minute < 0 || minute > 59) return null;
|
||||
|
||||
let hour24 = hour12 % 12;
|
||||
if ((match[3] ?? "").toLowerCase() === "p") hour24 += 12;
|
||||
|
||||
if (timeZoneHint) {
|
||||
const explicitRetryAt = nextClockTimeInTimeZone({
|
||||
now,
|
||||
hour: hour24,
|
||||
minute,
|
||||
timeZoneHint,
|
||||
});
|
||||
if (explicitRetryAt) return explicitRetryAt;
|
||||
}
|
||||
|
||||
const retryAt = new Date(now);
|
||||
retryAt.setHours(hour24, minute, 0, 0);
|
||||
if (retryAt.getTime() <= now.getTime()) {
|
||||
retryAt.setDate(retryAt.getDate() + 1);
|
||||
}
|
||||
return retryAt;
|
||||
}
|
||||
|
||||
export function extractClaudeRetryNotBefore(
|
||||
input: {
|
||||
parsed?: Record<string, unknown> | null;
|
||||
stdout?: string | null;
|
||||
stderr?: string | null;
|
||||
errorMessage?: string | null;
|
||||
},
|
||||
now = new Date(),
|
||||
): Date | null {
|
||||
const haystack = buildClaudeTransientHaystack(input);
|
||||
const match = haystack.match(CLAUDE_EXTRA_USAGE_RESET_RE);
|
||||
if (!match) return null;
|
||||
return parseClaudeResetClockTime(match[1] ?? "", now, match[2]);
|
||||
}
|
||||
|
||||
export function isClaudeTransientUpstreamError(input: {
|
||||
parsed?: Record<string, unknown> | null;
|
||||
stdout?: string | null;
|
||||
stderr?: string | null;
|
||||
errorMessage?: string | null;
|
||||
}): boolean {
|
||||
const parsed = input.parsed ?? null;
|
||||
// Deterministic failures are handled by their own classifiers.
|
||||
if (parsed && (isClaudeMaxTurnsResult(parsed) || isClaudeUnknownSessionError(parsed))) {
|
||||
return false;
|
||||
}
|
||||
const loginMeta = detectClaudeLoginRequired({
|
||||
parsed,
|
||||
stdout: input.stdout ?? "",
|
||||
stderr: input.stderr ?? "",
|
||||
});
|
||||
if (loginMeta.requiresLogin) return false;
|
||||
|
||||
const haystack = buildClaudeTransientHaystack(input);
|
||||
if (!haystack) return false;
|
||||
return CLAUDE_TRANSIENT_UPSTREAM_RE.test(haystack);
|
||||
}
|
||||
|
||||
7
packages/adapters/claude-local/vitest.config.ts
Normal file
7
packages/adapters/claude-local/vitest.config.ts
Normal file
@@ -0,0 +1,7 @@
|
||||
import { defineConfig } from "vitest/config";
|
||||
|
||||
export default defineConfig({
|
||||
test: {
|
||||
environment: "node",
|
||||
},
|
||||
});
|
||||
@@ -4,7 +4,23 @@ export const DEFAULT_CODEX_LOCAL_MODEL = "gpt-5.3-codex";
|
||||
export const DEFAULT_CODEX_LOCAL_BYPASS_APPROVALS_AND_SANDBOX = true;
|
||||
export const CODEX_LOCAL_FAST_MODE_SUPPORTED_MODELS = ["gpt-5.4"] as const;
|
||||
|
||||
function normalizeModelId(model: string | null | undefined): string {
|
||||
return typeof model === "string" ? model.trim() : "";
|
||||
}
|
||||
|
||||
export function isCodexLocalKnownModel(model: string | null | undefined): boolean {
|
||||
const normalizedModel = normalizeModelId(model);
|
||||
if (!normalizedModel) return false;
|
||||
return models.some((entry) => entry.id === normalizedModel);
|
||||
}
|
||||
|
||||
export function isCodexLocalManualModel(model: string | null | undefined): boolean {
|
||||
const normalizedModel = normalizeModelId(model);
|
||||
return Boolean(normalizedModel) && !isCodexLocalKnownModel(normalizedModel);
|
||||
}
|
||||
|
||||
export function isCodexLocalFastModeSupported(model: string | null | undefined): boolean {
|
||||
if (isCodexLocalManualModel(model)) return true;
|
||||
const normalizedModel = typeof model === "string" ? model.trim() : "";
|
||||
return CODEX_LOCAL_FAST_MODE_SUPPORTED_MODELS.includes(
|
||||
normalizedModel as (typeof CODEX_LOCAL_FAST_MODE_SUPPORTED_MODELS)[number],
|
||||
@@ -35,7 +51,7 @@ Core fields:
|
||||
- modelReasoningEffort (string, optional): reasoning effort override (minimal|low|medium|high|xhigh) passed via -c model_reasoning_effort=...
|
||||
- promptTemplate (string, optional): run prompt template
|
||||
- search (boolean, optional): run codex with --search
|
||||
- fastMode (boolean, optional): enable Codex Fast mode; currently supported on GPT-5.4 only and consumes credits faster
|
||||
- fastMode (boolean, optional): enable Codex Fast mode; supported on GPT-5.4 and passed through for manual model IDs
|
||||
- dangerouslyBypassApprovalsAndSandbox (boolean, optional): run with bypass flag
|
||||
- command (string, optional): defaults to "codex"
|
||||
- extraArgs (string[], optional): additional CLI args
|
||||
@@ -54,6 +70,6 @@ Notes:
|
||||
- Paperclip injects desired local skills into the effective CODEX_HOME/skills/ directory at execution time so Codex can discover "$paperclip" and related skills without polluting the project working directory. In managed-home mode (the default) this is ~/.paperclip/instances/<id>/companies/<companyId>/codex-home/skills/; when CODEX_HOME is explicitly overridden in adapter config, that override is used instead.
|
||||
- Unless explicitly overridden in adapter config, Paperclip runs Codex with a per-company managed CODEX_HOME under the active Paperclip instance and seeds auth/config from the shared Codex home (the CODEX_HOME env var, when set, or ~/.codex).
|
||||
- Some model/tool combinations reject certain effort levels (for example minimal with web search enabled).
|
||||
- Fast mode is currently supported on GPT-5.4 only. When enabled, Paperclip applies \`service_tier="fast"\` and \`features.fast_mode=true\`.
|
||||
- Fast mode is supported on GPT-5.4 and manual model IDs. When enabled for those models, Paperclip applies \`service_tier="fast"\` and \`features.fast_mode=true\`.
|
||||
- When Paperclip realizes a workspace/runtime for a run, it injects PAPERCLIP_WORKSPACE_* and PAPERCLIP_RUNTIME_* env vars for agent-side tooling.
|
||||
`;
|
||||
|
||||
@@ -26,6 +26,28 @@ describe("buildCodexExecArgs", () => {
|
||||
]);
|
||||
});
|
||||
|
||||
it("enables Codex fast mode overrides for manual models", () => {
|
||||
const result = buildCodexExecArgs({
|
||||
model: "gpt-5.5",
|
||||
fastMode: true,
|
||||
});
|
||||
|
||||
expect(result.fastModeRequested).toBe(true);
|
||||
expect(result.fastModeApplied).toBe(true);
|
||||
expect(result.fastModeIgnoredReason).toBeNull();
|
||||
expect(result.args).toEqual([
|
||||
"exec",
|
||||
"--json",
|
||||
"--model",
|
||||
"gpt-5.5",
|
||||
"-c",
|
||||
'service_tier="fast"',
|
||||
"-c",
|
||||
"features.fast_mode=true",
|
||||
"-",
|
||||
]);
|
||||
});
|
||||
|
||||
it("ignores fast mode for unsupported models", () => {
|
||||
const result = buildCodexExecArgs({
|
||||
model: "gpt-5.3-codex",
|
||||
@@ -34,7 +56,9 @@ describe("buildCodexExecArgs", () => {
|
||||
|
||||
expect(result.fastModeRequested).toBe(true);
|
||||
expect(result.fastModeApplied).toBe(false);
|
||||
expect(result.fastModeIgnoredReason).toContain("currently only supported on gpt-5.4");
|
||||
expect(result.fastModeIgnoredReason).toContain(
|
||||
"currently only supported on gpt-5.4 or manually configured model IDs",
|
||||
);
|
||||
expect(result.args).toEqual([
|
||||
"exec",
|
||||
"--json",
|
||||
|
||||
@@ -25,7 +25,7 @@ function asRecord(value: unknown): Record<string, unknown> {
|
||||
}
|
||||
|
||||
function formatFastModeSupportedModels(): string {
|
||||
return CODEX_LOCAL_FAST_MODE_SUPPORTED_MODELS.join(", ");
|
||||
return `${CODEX_LOCAL_FAST_MODE_SUPPORTED_MODELS.join(", ")} or manually configured model IDs`;
|
||||
}
|
||||
|
||||
export function buildCodexExecArgs(
|
||||
|
||||
359
packages/adapters/codex-local/src/server/execute.remote.test.ts
Normal file
359
packages/adapters/codex-local/src/server/execute.remote.test.ts
Normal file
@@ -0,0 +1,359 @@
|
||||
import { mkdir, mkdtemp, rm, writeFile } from "node:fs/promises";
|
||||
import os from "node:os";
|
||||
import path from "node:path";
|
||||
import { afterEach, describe, expect, it, vi } from "vitest";
|
||||
|
||||
const {
|
||||
runChildProcess,
|
||||
ensureCommandResolvable,
|
||||
resolveCommandForLogs,
|
||||
prepareWorkspaceForSshExecution,
|
||||
restoreWorkspaceFromSshExecution,
|
||||
syncDirectoryToSsh,
|
||||
} = vi.hoisted(() => ({
|
||||
runChildProcess: vi.fn(async () => ({
|
||||
exitCode: 1,
|
||||
signal: null,
|
||||
timedOut: false,
|
||||
stdout: "",
|
||||
stderr: "remote failure",
|
||||
pid: 123,
|
||||
startedAt: new Date().toISOString(),
|
||||
})),
|
||||
ensureCommandResolvable: vi.fn(async () => undefined),
|
||||
resolveCommandForLogs: vi.fn(async () => "/usr/bin/codex"),
|
||||
prepareWorkspaceForSshExecution: vi.fn(async () => undefined),
|
||||
restoreWorkspaceFromSshExecution: vi.fn(async () => undefined),
|
||||
syncDirectoryToSsh: vi.fn(async () => undefined),
|
||||
}));
|
||||
|
||||
vi.mock("@paperclipai/adapter-utils/server-utils", async () => {
|
||||
const actual = await vi.importActual<typeof import("@paperclipai/adapter-utils/server-utils")>(
|
||||
"@paperclipai/adapter-utils/server-utils",
|
||||
);
|
||||
return {
|
||||
...actual,
|
||||
ensureCommandResolvable,
|
||||
resolveCommandForLogs,
|
||||
runChildProcess,
|
||||
};
|
||||
});
|
||||
|
||||
vi.mock("@paperclipai/adapter-utils/ssh", async () => {
|
||||
const actual = await vi.importActual<typeof import("@paperclipai/adapter-utils/ssh")>(
|
||||
"@paperclipai/adapter-utils/ssh",
|
||||
);
|
||||
return {
|
||||
...actual,
|
||||
prepareWorkspaceForSshExecution,
|
||||
restoreWorkspaceFromSshExecution,
|
||||
syncDirectoryToSsh,
|
||||
};
|
||||
});
|
||||
|
||||
import { execute } from "./execute.js";
|
||||
|
||||
describe("codex remote execution", () => {
|
||||
const cleanupDirs: string[] = [];
|
||||
|
||||
afterEach(async () => {
|
||||
vi.clearAllMocks();
|
||||
while (cleanupDirs.length > 0) {
|
||||
const dir = cleanupDirs.pop();
|
||||
if (!dir) continue;
|
||||
await rm(dir, { recursive: true, force: true }).catch(() => undefined);
|
||||
}
|
||||
});
|
||||
|
||||
it("prepares the workspace, syncs CODEX_HOME, and restores workspace changes for remote SSH execution", async () => {
|
||||
const rootDir = await mkdtemp(path.join(os.tmpdir(), "paperclip-codex-remote-"));
|
||||
cleanupDirs.push(rootDir);
|
||||
const workspaceDir = path.join(rootDir, "workspace");
|
||||
const codexHomeDir = path.join(rootDir, "codex-home");
|
||||
await mkdir(workspaceDir, { recursive: true });
|
||||
await mkdir(codexHomeDir, { recursive: true });
|
||||
await writeFile(path.join(rootDir, "instructions.md"), "Use the remote workspace.\n", "utf8");
|
||||
await writeFile(path.join(codexHomeDir, "auth.json"), "{}", "utf8");
|
||||
|
||||
await execute({
|
||||
runId: "run-1",
|
||||
agent: {
|
||||
id: "agent-1",
|
||||
companyId: "company-1",
|
||||
name: "CodexCoder",
|
||||
adapterType: "codex_local",
|
||||
adapterConfig: {},
|
||||
},
|
||||
runtime: {
|
||||
sessionId: null,
|
||||
sessionParams: null,
|
||||
sessionDisplayId: null,
|
||||
taskKey: null,
|
||||
},
|
||||
config: {
|
||||
command: "codex",
|
||||
env: {
|
||||
CODEX_HOME: codexHomeDir,
|
||||
},
|
||||
},
|
||||
context: {
|
||||
paperclipWorkspace: {
|
||||
cwd: workspaceDir,
|
||||
source: "project_primary",
|
||||
},
|
||||
},
|
||||
executionTransport: {
|
||||
remoteExecution: {
|
||||
host: "127.0.0.1",
|
||||
port: 2222,
|
||||
username: "fixture",
|
||||
remoteWorkspacePath: "/remote/workspace",
|
||||
remoteCwd: "/remote/workspace",
|
||||
privateKey: "PRIVATE KEY",
|
||||
knownHosts: "[127.0.0.1]:2222 ssh-ed25519 AAAA",
|
||||
strictHostKeyChecking: true,
|
||||
},
|
||||
},
|
||||
onLog: async () => {},
|
||||
});
|
||||
|
||||
expect(prepareWorkspaceForSshExecution).toHaveBeenCalledTimes(1);
|
||||
expect(prepareWorkspaceForSshExecution).toHaveBeenCalledWith(expect.objectContaining({
|
||||
localDir: workspaceDir,
|
||||
remoteDir: "/remote/workspace",
|
||||
}));
|
||||
expect(syncDirectoryToSsh).toHaveBeenCalledTimes(1);
|
||||
expect(syncDirectoryToSsh).toHaveBeenCalledWith(expect.objectContaining({
|
||||
localDir: codexHomeDir,
|
||||
remoteDir: "/remote/workspace/.paperclip-runtime/codex/home",
|
||||
followSymlinks: true,
|
||||
}));
|
||||
|
||||
expect(runChildProcess).toHaveBeenCalledTimes(1);
|
||||
const call = runChildProcess.mock.calls[0] as unknown as
|
||||
| [string, string, string[], { env: Record<string, string>; remoteExecution?: { remoteCwd: string } | null }]
|
||||
| undefined;
|
||||
expect(call?.[3].env.CODEX_HOME).toBe("/remote/workspace/.paperclip-runtime/codex/home");
|
||||
expect(call?.[3].remoteExecution?.remoteCwd).toBe("/remote/workspace");
|
||||
expect(restoreWorkspaceFromSshExecution).toHaveBeenCalledTimes(1);
|
||||
expect(restoreWorkspaceFromSshExecution).toHaveBeenCalledWith(expect.objectContaining({
|
||||
localDir: workspaceDir,
|
||||
remoteDir: "/remote/workspace",
|
||||
}));
|
||||
});
|
||||
|
||||
it("does not resume saved Codex sessions for remote SSH execution without a matching remote identity", async () => {
|
||||
const rootDir = await mkdtemp(path.join(os.tmpdir(), "paperclip-codex-remote-resume-"));
|
||||
cleanupDirs.push(rootDir);
|
||||
const workspaceDir = path.join(rootDir, "workspace");
|
||||
const codexHomeDir = path.join(rootDir, "codex-home");
|
||||
await mkdir(workspaceDir, { recursive: true });
|
||||
await mkdir(codexHomeDir, { recursive: true });
|
||||
await writeFile(path.join(codexHomeDir, "auth.json"), "{}", "utf8");
|
||||
|
||||
await execute({
|
||||
runId: "run-ssh-no-resume",
|
||||
agent: {
|
||||
id: "agent-1",
|
||||
companyId: "company-1",
|
||||
name: "CodexCoder",
|
||||
adapterType: "codex_local",
|
||||
adapterConfig: {},
|
||||
},
|
||||
runtime: {
|
||||
sessionId: "session-123",
|
||||
sessionParams: {
|
||||
sessionId: "session-123",
|
||||
cwd: "/remote/workspace",
|
||||
},
|
||||
sessionDisplayId: "session-123",
|
||||
taskKey: null,
|
||||
},
|
||||
config: {
|
||||
command: "codex",
|
||||
env: {
|
||||
CODEX_HOME: codexHomeDir,
|
||||
},
|
||||
},
|
||||
context: {
|
||||
paperclipWorkspace: {
|
||||
cwd: workspaceDir,
|
||||
source: "project_primary",
|
||||
},
|
||||
},
|
||||
executionTransport: {
|
||||
remoteExecution: {
|
||||
host: "127.0.0.1",
|
||||
port: 2222,
|
||||
username: "fixture",
|
||||
remoteWorkspacePath: "/remote/workspace",
|
||||
remoteCwd: "/remote/workspace",
|
||||
privateKey: "PRIVATE KEY",
|
||||
knownHosts: "[127.0.0.1]:2222 ssh-ed25519 AAAA",
|
||||
strictHostKeyChecking: true,
|
||||
},
|
||||
},
|
||||
onLog: async () => {},
|
||||
});
|
||||
|
||||
expect(runChildProcess).toHaveBeenCalledTimes(1);
|
||||
const call = runChildProcess.mock.calls[0] as unknown as [string, string, string[]] | undefined;
|
||||
expect(call?.[2]).toEqual([
|
||||
"exec",
|
||||
"--json",
|
||||
"-",
|
||||
]);
|
||||
});
|
||||
|
||||
it("resumes saved Codex sessions for remote SSH execution when the remote identity matches", async () => {
|
||||
const rootDir = await mkdtemp(path.join(os.tmpdir(), "paperclip-codex-remote-resume-match-"));
|
||||
cleanupDirs.push(rootDir);
|
||||
const workspaceDir = path.join(rootDir, "workspace");
|
||||
const codexHomeDir = path.join(rootDir, "codex-home");
|
||||
await mkdir(workspaceDir, { recursive: true });
|
||||
await mkdir(codexHomeDir, { recursive: true });
|
||||
await writeFile(path.join(codexHomeDir, "auth.json"), "{}", "utf8");
|
||||
|
||||
await execute({
|
||||
runId: "run-ssh-resume",
|
||||
agent: {
|
||||
id: "agent-1",
|
||||
companyId: "company-1",
|
||||
name: "CodexCoder",
|
||||
adapterType: "codex_local",
|
||||
adapterConfig: {},
|
||||
},
|
||||
runtime: {
|
||||
sessionId: "session-123",
|
||||
sessionParams: {
|
||||
sessionId: "session-123",
|
||||
cwd: "/remote/workspace",
|
||||
remoteExecution: {
|
||||
transport: "ssh",
|
||||
host: "127.0.0.1",
|
||||
port: 2222,
|
||||
username: "fixture",
|
||||
remoteCwd: "/remote/workspace",
|
||||
},
|
||||
},
|
||||
sessionDisplayId: "session-123",
|
||||
taskKey: null,
|
||||
},
|
||||
config: {
|
||||
command: "codex",
|
||||
env: {
|
||||
CODEX_HOME: codexHomeDir,
|
||||
},
|
||||
},
|
||||
context: {
|
||||
paperclipWorkspace: {
|
||||
cwd: workspaceDir,
|
||||
source: "project_primary",
|
||||
},
|
||||
},
|
||||
executionTransport: {
|
||||
remoteExecution: {
|
||||
host: "127.0.0.1",
|
||||
port: 2222,
|
||||
username: "fixture",
|
||||
remoteWorkspacePath: "/remote/workspace",
|
||||
remoteCwd: "/remote/workspace",
|
||||
privateKey: "PRIVATE KEY",
|
||||
knownHosts: "[127.0.0.1]:2222 ssh-ed25519 AAAA",
|
||||
strictHostKeyChecking: true,
|
||||
},
|
||||
},
|
||||
onLog: async () => {},
|
||||
});
|
||||
|
||||
expect(runChildProcess).toHaveBeenCalledTimes(1);
|
||||
const call = runChildProcess.mock.calls[0] as unknown as [string, string, string[]] | undefined;
|
||||
expect(call?.[2]).toEqual([
|
||||
"exec",
|
||||
"--json",
|
||||
"resume",
|
||||
"session-123",
|
||||
"-",
|
||||
]);
|
||||
});
|
||||
|
||||
it("uses the provider-neutral execution target contract for remote SSH execution", async () => {
|
||||
const rootDir = await mkdtemp(path.join(os.tmpdir(), "paperclip-codex-target-"));
|
||||
cleanupDirs.push(rootDir);
|
||||
const workspaceDir = path.join(rootDir, "workspace");
|
||||
const codexHomeDir = path.join(rootDir, "codex-home");
|
||||
await mkdir(workspaceDir, { recursive: true });
|
||||
await mkdir(codexHomeDir, { recursive: true });
|
||||
await writeFile(path.join(codexHomeDir, "auth.json"), "{}", "utf8");
|
||||
|
||||
await execute({
|
||||
runId: "run-target",
|
||||
agent: {
|
||||
id: "agent-1",
|
||||
companyId: "company-1",
|
||||
name: "CodexCoder",
|
||||
adapterType: "codex_local",
|
||||
adapterConfig: {},
|
||||
},
|
||||
runtime: {
|
||||
sessionId: "session-123",
|
||||
sessionParams: {
|
||||
sessionId: "session-123",
|
||||
cwd: "/remote/workspace",
|
||||
remoteExecution: {
|
||||
transport: "ssh",
|
||||
host: "127.0.0.1",
|
||||
port: 2222,
|
||||
username: "fixture",
|
||||
remoteCwd: "/remote/workspace",
|
||||
},
|
||||
},
|
||||
sessionDisplayId: "session-123",
|
||||
taskKey: null,
|
||||
},
|
||||
config: {
|
||||
command: "codex",
|
||||
env: {
|
||||
CODEX_HOME: codexHomeDir,
|
||||
},
|
||||
},
|
||||
context: {
|
||||
paperclipWorkspace: {
|
||||
cwd: workspaceDir,
|
||||
source: "project_primary",
|
||||
},
|
||||
},
|
||||
executionTarget: {
|
||||
kind: "remote",
|
||||
transport: "ssh",
|
||||
remoteCwd: "/remote/workspace",
|
||||
spec: {
|
||||
host: "127.0.0.1",
|
||||
port: 2222,
|
||||
username: "fixture",
|
||||
remoteWorkspacePath: "/remote/workspace",
|
||||
remoteCwd: "/remote/workspace",
|
||||
privateKey: "PRIVATE KEY",
|
||||
knownHosts: "[127.0.0.1]:2222 ssh-ed25519 AAAA",
|
||||
strictHostKeyChecking: true,
|
||||
},
|
||||
},
|
||||
onLog: async () => {},
|
||||
});
|
||||
|
||||
expect(syncDirectoryToSsh).toHaveBeenCalledTimes(1);
|
||||
expect(runChildProcess).toHaveBeenCalledTimes(1);
|
||||
const call = runChildProcess.mock.calls[0] as unknown as
|
||||
| [string, string, string[], { env: Record<string, string>; remoteExecution?: { remoteCwd: string } | null }]
|
||||
| undefined;
|
||||
expect(call?.[2]).toEqual([
|
||||
"exec",
|
||||
"--json",
|
||||
"resume",
|
||||
"session-123",
|
||||
"-",
|
||||
]);
|
||||
expect(call?.[3].env.CODEX_HOME).toBe("/remote/workspace/.paperclip-runtime/codex/home");
|
||||
expect(call?.[3].remoteExecution?.remoteCwd).toBe("/remote/workspace");
|
||||
});
|
||||
});
|
||||
@@ -2,26 +2,43 @@ import fs from "node:fs/promises";
|
||||
import path from "node:path";
|
||||
import { fileURLToPath } from "node:url";
|
||||
import { inferOpenAiCompatibleBiller, type AdapterExecutionContext, type AdapterExecutionResult } from "@paperclipai/adapter-utils";
|
||||
import {
|
||||
adapterExecutionTargetIsRemote,
|
||||
adapterExecutionTargetPaperclipApiUrl,
|
||||
adapterExecutionTargetRemoteCwd,
|
||||
adapterExecutionTargetSessionIdentity,
|
||||
adapterExecutionTargetSessionMatches,
|
||||
describeAdapterExecutionTarget,
|
||||
ensureAdapterExecutionTargetCommandResolvable,
|
||||
prepareAdapterExecutionTargetRuntime,
|
||||
readAdapterExecutionTarget,
|
||||
resolveAdapterExecutionTargetCommandForLogs,
|
||||
runAdapterExecutionTargetProcess,
|
||||
} from "@paperclipai/adapter-utils/execution-target";
|
||||
import {
|
||||
asString,
|
||||
asNumber,
|
||||
parseObject,
|
||||
applyPaperclipWorkspaceEnv,
|
||||
buildPaperclipEnv,
|
||||
buildInvocationEnvForLogs,
|
||||
ensureAbsoluteDirectory,
|
||||
ensureCommandResolvable,
|
||||
ensurePaperclipSkillSymlink,
|
||||
ensurePathInEnv,
|
||||
readPaperclipRuntimeSkillEntries,
|
||||
resolveCommandForLogs,
|
||||
resolvePaperclipDesiredSkillNames,
|
||||
renderTemplate,
|
||||
renderPaperclipWakePrompt,
|
||||
stringifyPaperclipWakePayload,
|
||||
DEFAULT_PAPERCLIP_AGENT_PROMPT_TEMPLATE,
|
||||
joinPromptSections,
|
||||
runChildProcess,
|
||||
} from "@paperclipai/adapter-utils/server-utils";
|
||||
import { parseCodexJsonl, isCodexUnknownSessionError } from "./parse.js";
|
||||
import {
|
||||
parseCodexJsonl,
|
||||
extractCodexRetryNotBefore,
|
||||
isCodexTransientUpstreamError,
|
||||
isCodexUnknownSessionError,
|
||||
} from "./parse.js";
|
||||
import { pathExists, prepareManagedCodexHome, resolveManagedCodexHomeDir, resolveSharedCodexHomeDir } from "./codex-home.js";
|
||||
import { resolveCodexDesiredSkillNames } from "./skills.js";
|
||||
import { buildCodexExecArgs } from "./codex-args.js";
|
||||
@@ -148,6 +165,52 @@ type EnsureCodexSkillsInjectedOptions = {
|
||||
linkSkill?: (source: string, target: string) => Promise<void>;
|
||||
};
|
||||
|
||||
type CodexTransientFallbackMode =
|
||||
| "same_session"
|
||||
| "safer_invocation"
|
||||
| "fresh_session"
|
||||
| "fresh_session_safer_invocation";
|
||||
|
||||
function readCodexTransientFallbackMode(context: Record<string, unknown>): CodexTransientFallbackMode | null {
|
||||
const value = asString(context.codexTransientFallbackMode, "").trim();
|
||||
switch (value) {
|
||||
case "same_session":
|
||||
case "safer_invocation":
|
||||
case "fresh_session":
|
||||
case "fresh_session_safer_invocation":
|
||||
return value;
|
||||
default:
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
function fallbackModeUsesSaferInvocation(mode: CodexTransientFallbackMode | null): boolean {
|
||||
return mode === "safer_invocation" || mode === "fresh_session_safer_invocation";
|
||||
}
|
||||
|
||||
function fallbackModeUsesFreshSession(mode: CodexTransientFallbackMode | null): boolean {
|
||||
return mode === "fresh_session" || mode === "fresh_session_safer_invocation";
|
||||
}
|
||||
|
||||
function buildCodexTransientHandoffNote(input: {
|
||||
previousSessionId: string | null;
|
||||
fallbackMode: CodexTransientFallbackMode;
|
||||
continuationSummaryBody: string | null;
|
||||
}): string {
|
||||
return [
|
||||
"Paperclip session handoff:",
|
||||
input.previousSessionId ? `- Previous session: ${input.previousSessionId}` : "",
|
||||
"- Rotation reason: repeated Codex transient remote-compaction failures",
|
||||
`- Fallback mode: ${input.fallbackMode}`,
|
||||
input.continuationSummaryBody
|
||||
? `- Issue continuation summary: ${input.continuationSummaryBody.slice(0, 1_500)}`
|
||||
: "",
|
||||
"Continue from the current task state. Rebuild only the minimum context you need.",
|
||||
]
|
||||
.filter(Boolean)
|
||||
.join("\n");
|
||||
}
|
||||
|
||||
export async function ensureCodexSkillsInjected(
|
||||
onLog: AdapterExecutionContext["onLog"],
|
||||
options: EnsureCodexSkillsInjectedOptions = {},
|
||||
@@ -218,7 +281,7 @@ export async function execute(ctx: AdapterExecutionContext): Promise<AdapterExec
|
||||
|
||||
const promptTemplate = asString(
|
||||
config.promptTemplate,
|
||||
"You are agent {{agent.id}} ({{agent.name}}). Continue your Paperclip work.",
|
||||
DEFAULT_PAPERCLIP_AGENT_PROMPT_TEMPLATE,
|
||||
);
|
||||
const command = asString(config.command, "codex");
|
||||
const model = asString(config.model, "");
|
||||
@@ -254,6 +317,11 @@ export async function execute(ctx: AdapterExecutionContext): Promise<AdapterExec
|
||||
const effectiveWorkspaceCwd = useConfiguredInsteadOfAgentHome ? "" : workspaceCwd;
|
||||
const cwd = effectiveWorkspaceCwd || configuredCwd || process.cwd();
|
||||
const envConfig = parseObject(config.env);
|
||||
const executionTarget = readAdapterExecutionTarget({
|
||||
executionTarget: ctx.executionTarget,
|
||||
legacyRemoteExecution: ctx.executionTransport?.remoteExecution,
|
||||
});
|
||||
const executionTargetIsRemote = adapterExecutionTargetIsRemote(executionTarget);
|
||||
const configuredCodexHome =
|
||||
typeof envConfig.CODEX_HOME === "string" && envConfig.CODEX_HOME.trim().length > 0
|
||||
? path.resolve(envConfig.CODEX_HOME.trim())
|
||||
@@ -277,10 +345,37 @@ export async function execute(ctx: AdapterExecutionContext): Promise<AdapterExec
|
||||
desiredSkillNames,
|
||||
},
|
||||
);
|
||||
const effectiveExecutionCwd = adapterExecutionTargetRemoteCwd(executionTarget, cwd);
|
||||
const preparedExecutionTargetRuntime = executionTargetIsRemote
|
||||
? await (async () => {
|
||||
await onLog(
|
||||
"stdout",
|
||||
`[paperclip] Syncing workspace and CODEX_HOME to ${describeAdapterExecutionTarget(executionTarget)}.\n`,
|
||||
);
|
||||
return await prepareAdapterExecutionTargetRuntime({
|
||||
target: executionTarget,
|
||||
adapterKey: "codex",
|
||||
workspaceLocalDir: cwd,
|
||||
assets: [
|
||||
{
|
||||
key: "home",
|
||||
localDir: effectiveCodexHome,
|
||||
followSymlinks: true,
|
||||
},
|
||||
],
|
||||
});
|
||||
})()
|
||||
: null;
|
||||
const restoreRemoteWorkspace = preparedExecutionTargetRuntime
|
||||
? () => preparedExecutionTargetRuntime.restoreWorkspace()
|
||||
: null;
|
||||
const remoteCodexHome = executionTargetIsRemote
|
||||
? preparedExecutionTargetRuntime?.assetDirs.home ??
|
||||
path.posix.join(effectiveExecutionCwd, ".paperclip-runtime", "codex", "home")
|
||||
: null;
|
||||
const hasExplicitApiKey =
|
||||
typeof envConfig.PAPERCLIP_API_KEY === "string" && envConfig.PAPERCLIP_API_KEY.trim().length > 0;
|
||||
const env: Record<string, string> = { ...buildPaperclipEnv(agent) };
|
||||
env.CODEX_HOME = effectiveCodexHome;
|
||||
env.PAPERCLIP_RUN_ID = runId;
|
||||
const wakeTaskId =
|
||||
(typeof context.taskId === "string" && context.taskId.trim().length > 0 && context.taskId.trim()) ||
|
||||
@@ -327,33 +422,17 @@ export async function execute(ctx: AdapterExecutionContext): Promise<AdapterExec
|
||||
if (wakePayloadJson) {
|
||||
env.PAPERCLIP_WAKE_PAYLOAD_JSON = wakePayloadJson;
|
||||
}
|
||||
if (effectiveWorkspaceCwd) {
|
||||
env.PAPERCLIP_WORKSPACE_CWD = effectiveWorkspaceCwd;
|
||||
}
|
||||
if (workspaceSource) {
|
||||
env.PAPERCLIP_WORKSPACE_SOURCE = workspaceSource;
|
||||
}
|
||||
if (workspaceStrategy) {
|
||||
env.PAPERCLIP_WORKSPACE_STRATEGY = workspaceStrategy;
|
||||
}
|
||||
if (workspaceId) {
|
||||
env.PAPERCLIP_WORKSPACE_ID = workspaceId;
|
||||
}
|
||||
if (workspaceRepoUrl) {
|
||||
env.PAPERCLIP_WORKSPACE_REPO_URL = workspaceRepoUrl;
|
||||
}
|
||||
if (workspaceRepoRef) {
|
||||
env.PAPERCLIP_WORKSPACE_REPO_REF = workspaceRepoRef;
|
||||
}
|
||||
if (workspaceBranch) {
|
||||
env.PAPERCLIP_WORKSPACE_BRANCH = workspaceBranch;
|
||||
}
|
||||
if (workspaceWorktreePath) {
|
||||
env.PAPERCLIP_WORKSPACE_WORKTREE_PATH = workspaceWorktreePath;
|
||||
}
|
||||
if (agentHome) {
|
||||
env.AGENT_HOME = agentHome;
|
||||
}
|
||||
applyPaperclipWorkspaceEnv(env, {
|
||||
workspaceCwd: effectiveWorkspaceCwd,
|
||||
workspaceSource,
|
||||
workspaceStrategy,
|
||||
workspaceId,
|
||||
workspaceRepoUrl,
|
||||
workspaceRepoRef,
|
||||
workspaceBranch,
|
||||
workspaceWorktreePath,
|
||||
agentHome,
|
||||
});
|
||||
if (workspaceHints.length > 0) {
|
||||
env.PAPERCLIP_WORKSPACES_JSON = JSON.stringify(workspaceHints);
|
||||
}
|
||||
@@ -366,9 +445,14 @@ export async function execute(ctx: AdapterExecutionContext): Promise<AdapterExec
|
||||
if (runtimePrimaryUrl) {
|
||||
env.PAPERCLIP_RUNTIME_PRIMARY_URL = runtimePrimaryUrl;
|
||||
}
|
||||
const targetPaperclipApiUrl = adapterExecutionTargetPaperclipApiUrl(executionTarget);
|
||||
if (targetPaperclipApiUrl) {
|
||||
env.PAPERCLIP_API_URL = targetPaperclipApiUrl;
|
||||
}
|
||||
for (const [k, v] of Object.entries(envConfig)) {
|
||||
if (typeof v === "string") env[k] = v;
|
||||
}
|
||||
env.CODEX_HOME = remoteCodexHome ?? effectiveCodexHome;
|
||||
if (!hasExplicitApiKey && authToken) {
|
||||
env.PAPERCLIP_API_KEY = authToken;
|
||||
}
|
||||
@@ -379,8 +463,8 @@ export async function execute(ctx: AdapterExecutionContext): Promise<AdapterExec
|
||||
);
|
||||
const billingType = resolveCodexBillingType(effectiveEnv);
|
||||
const runtimeEnv = ensurePathInEnv(effectiveEnv);
|
||||
await ensureCommandResolvable(command, cwd, runtimeEnv);
|
||||
const resolvedCommand = await resolveCommandForLogs(command, cwd, runtimeEnv);
|
||||
await ensureAdapterExecutionTargetCommandResolvable(command, executionTarget, cwd, runtimeEnv);
|
||||
const resolvedCommand = await resolveAdapterExecutionTargetCommandForLogs(command, executionTarget, cwd, runtimeEnv);
|
||||
const loggedEnv = buildInvocationEnvForLogs(env, {
|
||||
runtimeEnv,
|
||||
includeRuntimeKeys: ["HOME"],
|
||||
@@ -393,14 +477,24 @@ export async function execute(ctx: AdapterExecutionContext): Promise<AdapterExec
|
||||
const runtimeSessionParams = parseObject(runtime.sessionParams);
|
||||
const runtimeSessionId = asString(runtimeSessionParams.sessionId, runtime.sessionId ?? "");
|
||||
const runtimeSessionCwd = asString(runtimeSessionParams.cwd, "");
|
||||
const runtimeRemoteExecution = parseObject(runtimeSessionParams.remoteExecution);
|
||||
const canResumeSession =
|
||||
runtimeSessionId.length > 0 &&
|
||||
(runtimeSessionCwd.length === 0 || path.resolve(runtimeSessionCwd) === path.resolve(cwd));
|
||||
const sessionId = canResumeSession ? runtimeSessionId : null;
|
||||
if (runtimeSessionId && !canResumeSession) {
|
||||
(runtimeSessionCwd.length === 0 || path.resolve(runtimeSessionCwd) === path.resolve(effectiveExecutionCwd)) &&
|
||||
adapterExecutionTargetSessionMatches(runtimeRemoteExecution, executionTarget);
|
||||
const codexTransientFallbackMode = readCodexTransientFallbackMode(context);
|
||||
const forceSaferInvocation = fallbackModeUsesSaferInvocation(codexTransientFallbackMode);
|
||||
const forceFreshSession = fallbackModeUsesFreshSession(codexTransientFallbackMode);
|
||||
const sessionId = canResumeSession && !forceFreshSession ? runtimeSessionId : null;
|
||||
if (executionTargetIsRemote && runtimeSessionId && !canResumeSession) {
|
||||
await onLog(
|
||||
"stdout",
|
||||
`[paperclip] Codex session "${runtimeSessionId}" was saved for cwd "${runtimeSessionCwd}" and will not be resumed in "${cwd}".\n`,
|
||||
`[paperclip] Codex session "${runtimeSessionId}" does not match the current remote execution identity and will not be resumed in "${effectiveExecutionCwd}". Starting a fresh remote session.\n`,
|
||||
);
|
||||
} else if (runtimeSessionId && !canResumeSession) {
|
||||
await onLog(
|
||||
"stdout",
|
||||
`[paperclip] Codex session "${runtimeSessionId}" was saved for cwd "${runtimeSessionCwd}" and will not be resumed in "${effectiveExecutionCwd}".\n`,
|
||||
);
|
||||
}
|
||||
const instructionsFilePath = asString(config.instructionsFilePath, "").trim();
|
||||
@@ -443,28 +537,66 @@ export async function execute(ctx: AdapterExecutionContext): Promise<AdapterExec
|
||||
const shouldUseResumeDeltaPrompt = Boolean(sessionId) && wakePrompt.length > 0;
|
||||
const promptInstructionsPrefix = shouldUseResumeDeltaPrompt ? "" : instructionsPrefix;
|
||||
instructionsChars = promptInstructionsPrefix.length;
|
||||
const continuationSummary = parseObject(context.paperclipContinuationSummary);
|
||||
const continuationSummaryBody = asString(continuationSummary.body, "").trim() || null;
|
||||
const codexFallbackHandoffNote =
|
||||
forceFreshSession
|
||||
? buildCodexTransientHandoffNote({
|
||||
previousSessionId: runtimeSessionId || runtime.sessionId || null,
|
||||
fallbackMode: codexTransientFallbackMode ?? "fresh_session",
|
||||
continuationSummaryBody,
|
||||
})
|
||||
: "";
|
||||
const commandNotes = (() => {
|
||||
if (!instructionsFilePath) {
|
||||
return [repoAgentsNote];
|
||||
const notes = [repoAgentsNote];
|
||||
if (forceSaferInvocation) {
|
||||
notes.push("Codex transient fallback requested safer invocation settings for this retry.");
|
||||
}
|
||||
if (forceFreshSession) {
|
||||
notes.push("Codex transient fallback forced a fresh session with a continuation handoff.");
|
||||
}
|
||||
return notes;
|
||||
}
|
||||
if (instructionsPrefix.length > 0) {
|
||||
if (shouldUseResumeDeltaPrompt) {
|
||||
return [
|
||||
const notes = [
|
||||
`Loaded agent instructions from ${instructionsFilePath}`,
|
||||
"Skipped stdin instruction reinjection because an existing Codex session is being resumed with a wake delta.",
|
||||
repoAgentsNote,
|
||||
];
|
||||
if (forceSaferInvocation) {
|
||||
notes.push("Codex transient fallback requested safer invocation settings for this retry.");
|
||||
}
|
||||
if (forceFreshSession) {
|
||||
notes.push("Codex transient fallback forced a fresh session with a continuation handoff.");
|
||||
}
|
||||
return notes;
|
||||
}
|
||||
return [
|
||||
const notes = [
|
||||
`Loaded agent instructions from ${instructionsFilePath}`,
|
||||
`Prepended instructions + path directive to stdin prompt (relative references from ${instructionsDir}).`,
|
||||
repoAgentsNote,
|
||||
];
|
||||
if (forceSaferInvocation) {
|
||||
notes.push("Codex transient fallback requested safer invocation settings for this retry.");
|
||||
}
|
||||
if (forceFreshSession) {
|
||||
notes.push("Codex transient fallback forced a fresh session with a continuation handoff.");
|
||||
}
|
||||
return notes;
|
||||
}
|
||||
return [
|
||||
const notes = [
|
||||
`Configured instructionsFilePath ${instructionsFilePath}, but file could not be read; continuing without injected instructions.`,
|
||||
repoAgentsNote,
|
||||
];
|
||||
if (forceSaferInvocation) {
|
||||
notes.push("Codex transient fallback requested safer invocation settings for this retry.");
|
||||
}
|
||||
if (forceFreshSession) {
|
||||
notes.push("Codex transient fallback forced a fresh session with a continuation handoff.");
|
||||
}
|
||||
return notes;
|
||||
})();
|
||||
const renderedPrompt = shouldUseResumeDeltaPrompt ? "" : renderTemplate(promptTemplate, templateData);
|
||||
const sessionHandoffNote = asString(context.paperclipSessionHandoffMarkdown, "").trim();
|
||||
@@ -472,6 +604,7 @@ export async function execute(ctx: AdapterExecutionContext): Promise<AdapterExec
|
||||
promptInstructionsPrefix,
|
||||
renderedBootstrapPrompt,
|
||||
wakePrompt,
|
||||
codexFallbackHandoffNote,
|
||||
sessionHandoffNote,
|
||||
renderedPrompt,
|
||||
]);
|
||||
@@ -485,7 +618,10 @@ export async function execute(ctx: AdapterExecutionContext): Promise<AdapterExec
|
||||
};
|
||||
|
||||
const runAttempt = async (resumeSessionId: string | null) => {
|
||||
const execArgs = buildCodexExecArgs(config, { resumeSessionId });
|
||||
const execArgs = buildCodexExecArgs(
|
||||
forceSaferInvocation ? { ...config, fastMode: false } : config,
|
||||
{ resumeSessionId },
|
||||
);
|
||||
const args = execArgs.args;
|
||||
const commandNotesWithFastMode =
|
||||
execArgs.fastModeIgnoredReason == null
|
||||
@@ -495,7 +631,7 @@ export async function execute(ctx: AdapterExecutionContext): Promise<AdapterExec
|
||||
await onMeta({
|
||||
adapterType: "codex_local",
|
||||
command: resolvedCommand,
|
||||
cwd,
|
||||
cwd: effectiveExecutionCwd,
|
||||
commandNotes: commandNotesWithFastMode,
|
||||
commandArgs: args.map((value, idx) => {
|
||||
if (idx === args.length - 1 && value !== "-") return `<prompt ${prompt.length} chars>`;
|
||||
@@ -508,7 +644,7 @@ export async function execute(ctx: AdapterExecutionContext): Promise<AdapterExec
|
||||
});
|
||||
}
|
||||
|
||||
const proc = await runChildProcess(runId, command, args, {
|
||||
const proc = await runAdapterExecutionTargetProcess(runId, executionTarget, command, args, {
|
||||
cwd,
|
||||
env,
|
||||
stdin: prompt,
|
||||
@@ -539,6 +675,7 @@ export async function execute(ctx: AdapterExecutionContext): Promise<AdapterExec
|
||||
const toResult = (
|
||||
attempt: { proc: { exitCode: number | null; signal: string | null; timedOut: boolean; stdout: string; stderr: string }; rawStderr: string; parsed: ReturnType<typeof parseCodexJsonl> },
|
||||
clearSessionOnMissingSession = false,
|
||||
isRetry = false,
|
||||
): AdapterExecutionResult => {
|
||||
if (attempt.proc.timedOut) {
|
||||
return {
|
||||
@@ -550,11 +687,19 @@ export async function execute(ctx: AdapterExecutionContext): Promise<AdapterExec
|
||||
};
|
||||
}
|
||||
|
||||
const resolvedSessionId = attempt.parsed.sessionId ?? runtimeSessionId ?? runtime.sessionId ?? null;
|
||||
const canFallbackToRuntimeSession = !isRetry && !forceFreshSession;
|
||||
const resolvedSessionId =
|
||||
attempt.parsed.sessionId ??
|
||||
(canFallbackToRuntimeSession ? (runtimeSessionId ?? runtime.sessionId ?? null) : null);
|
||||
const resolvedSessionParams = resolvedSessionId
|
||||
? ({
|
||||
sessionId: resolvedSessionId,
|
||||
cwd,
|
||||
cwd: effectiveExecutionCwd,
|
||||
...(executionTargetIsRemote
|
||||
? {
|
||||
remoteExecution: adapterExecutionTargetSessionIdentity(executionTarget),
|
||||
}
|
||||
: {}),
|
||||
...(workspaceId ? { workspaceId } : {}),
|
||||
...(workspaceRepoUrl ? { repoUrl: workspaceRepoUrl } : {}),
|
||||
...(workspaceRepoRef ? { repoRef: workspaceRepoRef } : {}),
|
||||
@@ -566,6 +711,21 @@ export async function execute(ctx: AdapterExecutionContext): Promise<AdapterExec
|
||||
parsedError ||
|
||||
stderrLine ||
|
||||
`Codex exited with code ${attempt.proc.exitCode ?? -1}`;
|
||||
const transientRetryNotBefore =
|
||||
(attempt.proc.exitCode ?? 0) !== 0
|
||||
? extractCodexRetryNotBefore({
|
||||
stdout: attempt.proc.stdout,
|
||||
stderr: attempt.proc.stderr,
|
||||
errorMessage: fallbackErrorMessage,
|
||||
})
|
||||
: null;
|
||||
const transientUpstream =
|
||||
(attempt.proc.exitCode ?? 0) !== 0 &&
|
||||
isCodexTransientUpstreamError({
|
||||
stdout: attempt.proc.stdout,
|
||||
stderr: attempt.proc.stderr,
|
||||
errorMessage: fallbackErrorMessage,
|
||||
});
|
||||
|
||||
return {
|
||||
exitCode: attempt.proc.exitCode,
|
||||
@@ -575,6 +735,12 @@ export async function execute(ctx: AdapterExecutionContext): Promise<AdapterExec
|
||||
(attempt.proc.exitCode ?? 0) === 0
|
||||
? null
|
||||
: fallbackErrorMessage,
|
||||
errorCode:
|
||||
transientUpstream
|
||||
? "codex_transient_upstream"
|
||||
: null,
|
||||
errorFamily: transientUpstream ? "transient_upstream" : null,
|
||||
retryNotBefore: transientRetryNotBefore ? transientRetryNotBefore.toISOString() : null,
|
||||
usage: attempt.parsed.usage,
|
||||
sessionId: resolvedSessionId,
|
||||
sessionParams: resolvedSessionParams,
|
||||
@@ -587,26 +753,39 @@ export async function execute(ctx: AdapterExecutionContext): Promise<AdapterExec
|
||||
resultJson: {
|
||||
stdout: attempt.proc.stdout,
|
||||
stderr: attempt.proc.stderr,
|
||||
...(transientUpstream ? { errorFamily: "transient_upstream" } : {}),
|
||||
...(transientRetryNotBefore ? { retryNotBefore: transientRetryNotBefore.toISOString() } : {}),
|
||||
...(transientRetryNotBefore ? { transientRetryNotBefore: transientRetryNotBefore.toISOString() } : {}),
|
||||
},
|
||||
summary: attempt.parsed.summary,
|
||||
clearSession: Boolean(clearSessionOnMissingSession && !resolvedSessionId),
|
||||
clearSession: Boolean((clearSessionOnMissingSession || forceFreshSession) && !resolvedSessionId),
|
||||
};
|
||||
};
|
||||
|
||||
const initial = await runAttempt(sessionId);
|
||||
if (
|
||||
sessionId &&
|
||||
!initial.proc.timedOut &&
|
||||
(initial.proc.exitCode ?? 0) !== 0 &&
|
||||
isCodexUnknownSessionError(initial.proc.stdout, initial.rawStderr)
|
||||
) {
|
||||
await onLog(
|
||||
"stdout",
|
||||
`[paperclip] Codex resume session "${sessionId}" is unavailable; retrying with a fresh session.\n`,
|
||||
);
|
||||
const retry = await runAttempt(null);
|
||||
return toResult(retry, true);
|
||||
}
|
||||
try {
|
||||
const initial = await runAttempt(sessionId);
|
||||
if (
|
||||
sessionId &&
|
||||
!initial.proc.timedOut &&
|
||||
(initial.proc.exitCode ?? 0) !== 0 &&
|
||||
isCodexUnknownSessionError(initial.proc.stdout, initial.rawStderr)
|
||||
) {
|
||||
await onLog(
|
||||
"stdout",
|
||||
`[paperclip] Codex resume session "${sessionId}" is unavailable; retrying with a fresh session.\n`,
|
||||
);
|
||||
const retry = await runAttempt(null);
|
||||
return toResult(retry, true, true);
|
||||
}
|
||||
|
||||
return toResult(initial);
|
||||
return toResult(initial, false, false);
|
||||
} finally {
|
||||
if (restoreRemoteWorkspace) {
|
||||
await onLog(
|
||||
"stdout",
|
||||
`[paperclip] Restoring workspace changes from ${describeAdapterExecutionTarget(executionTarget)}.\n`,
|
||||
);
|
||||
await restoreRemoteWorkspace();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
export { execute, ensureCodexSkillsInjected } from "./execute.js";
|
||||
export { listCodexSkills, syncCodexSkills } from "./skills.js";
|
||||
export { testEnvironment } from "./test.js";
|
||||
export { parseCodexJsonl, isCodexUnknownSessionError } from "./parse.js";
|
||||
export { parseCodexJsonl, isCodexTransientUpstreamError, isCodexUnknownSessionError } from "./parse.js";
|
||||
export {
|
||||
getQuotaWindows,
|
||||
readCodexAuthInfo,
|
||||
|
||||
@@ -1,5 +1,10 @@
|
||||
import { describe, expect, it } from "vitest";
|
||||
import { isCodexUnknownSessionError, parseCodexJsonl } from "./parse.js";
|
||||
import {
|
||||
extractCodexRetryNotBefore,
|
||||
isCodexTransientUpstreamError,
|
||||
isCodexUnknownSessionError,
|
||||
parseCodexJsonl,
|
||||
} from "./parse.js";
|
||||
|
||||
describe("parseCodexJsonl", () => {
|
||||
it("captures session id, assistant summary, usage, and error message", () => {
|
||||
@@ -81,3 +86,55 @@ describe("isCodexUnknownSessionError", () => {
|
||||
expect(isCodexUnknownSessionError("", "model overloaded")).toBe(false);
|
||||
});
|
||||
});
|
||||
|
||||
describe("isCodexTransientUpstreamError", () => {
|
||||
it("classifies the remote-compaction high-demand failure as transient upstream", () => {
|
||||
expect(
|
||||
isCodexTransientUpstreamError({
|
||||
errorMessage:
|
||||
"Error running remote compact task: We're currently experiencing high demand, which may cause temporary errors.",
|
||||
}),
|
||||
).toBe(true);
|
||||
expect(
|
||||
isCodexTransientUpstreamError({
|
||||
stderr: "We're currently experiencing high demand, which may cause temporary errors.",
|
||||
}),
|
||||
).toBe(true);
|
||||
});
|
||||
|
||||
it("classifies usage-limit windows as transient and extracts the retry time", () => {
|
||||
const errorMessage = "You've hit your usage limit for GPT-5.3-Codex-Spark. Switch to another model now, or try again at 11:31 PM.";
|
||||
const now = new Date(2026, 3, 22, 22, 29, 2);
|
||||
|
||||
expect(isCodexTransientUpstreamError({ errorMessage })).toBe(true);
|
||||
expect(extractCodexRetryNotBefore({ errorMessage }, now)?.getTime()).toBe(
|
||||
new Date(2026, 3, 22, 23, 31, 0, 0).getTime(),
|
||||
);
|
||||
});
|
||||
|
||||
it("parses explicit timezone hints on usage-limit retry windows", () => {
|
||||
const errorMessage = "You've hit your usage limit for GPT-5.3-Codex-Spark. Switch to another model now, or try again at 11:31 PM (America/Chicago).";
|
||||
const now = new Date("2026-04-23T03:29:02.000Z");
|
||||
|
||||
expect(extractCodexRetryNotBefore({ errorMessage }, now)?.toISOString()).toBe(
|
||||
"2026-04-23T04:31:00.000Z",
|
||||
);
|
||||
});
|
||||
|
||||
it("does not classify deterministic compaction errors as transient", () => {
|
||||
expect(
|
||||
isCodexTransientUpstreamError({
|
||||
errorMessage: [
|
||||
"Error running remote compact task: {",
|
||||
' "error": {',
|
||||
' "message": "Unknown parameter: \'prompt_cache_retention\'.",',
|
||||
' "type": "invalid_request_error",',
|
||||
' "param": "prompt_cache_retention",',
|
||||
' "code": "unknown_parameter"',
|
||||
" }",
|
||||
"}",
|
||||
].join("\n"),
|
||||
}),
|
||||
).toBe(false);
|
||||
});
|
||||
});
|
||||
|
||||
@@ -1,4 +1,15 @@
|
||||
import { asString, asNumber, parseObject, parseJson } from "@paperclipai/adapter-utils/server-utils";
|
||||
import {
|
||||
asString,
|
||||
asNumber,
|
||||
parseObject,
|
||||
parseJson,
|
||||
} from "@paperclipai/adapter-utils/server-utils";
|
||||
|
||||
const CODEX_TRANSIENT_UPSTREAM_RE =
|
||||
/(?:we(?:'|’)re\s+currently\s+experiencing\s+high\s+demand|temporary\s+errors|rate[-\s]?limit(?:ed)?|too\s+many\s+requests|\b429\b|server\s+overloaded|service\s+unavailable|try\s+again\s+later)/i;
|
||||
const CODEX_REMOTE_COMPACTION_RE = /remote\s+compact\s+task/i;
|
||||
const CODEX_USAGE_LIMIT_RE =
|
||||
/you(?:'|’)ve hit your usage limit for .+\.\s+switch to another model now,\s+or try again at\s+([^.!\n]+)(?:[.!]|\n|$)/i;
|
||||
|
||||
export function parseCodexJsonl(stdout: string) {
|
||||
let sessionId: string | null = null;
|
||||
@@ -71,3 +82,180 @@ export function isCodexUnknownSessionError(stdout: string, stderr: string): bool
|
||||
haystack,
|
||||
);
|
||||
}
|
||||
|
||||
function buildCodexErrorHaystack(input: {
|
||||
stdout?: string | null;
|
||||
stderr?: string | null;
|
||||
errorMessage?: string | null;
|
||||
}): string {
|
||||
return [
|
||||
input.errorMessage ?? "",
|
||||
input.stdout ?? "",
|
||||
input.stderr ?? "",
|
||||
]
|
||||
.join("\n")
|
||||
.split(/\r?\n/)
|
||||
.map((line) => line.trim())
|
||||
.filter(Boolean)
|
||||
.join("\n");
|
||||
}
|
||||
|
||||
function readTimeZoneParts(date: Date, timeZone: string) {
|
||||
const values = new Map(
|
||||
new Intl.DateTimeFormat("en-US", {
|
||||
timeZone,
|
||||
hourCycle: "h23",
|
||||
year: "numeric",
|
||||
month: "2-digit",
|
||||
day: "2-digit",
|
||||
hour: "2-digit",
|
||||
minute: "2-digit",
|
||||
}).formatToParts(date).map((part) => [part.type, part.value]),
|
||||
);
|
||||
return {
|
||||
year: Number.parseInt(values.get("year") ?? "", 10),
|
||||
month: Number.parseInt(values.get("month") ?? "", 10),
|
||||
day: Number.parseInt(values.get("day") ?? "", 10),
|
||||
hour: Number.parseInt(values.get("hour") ?? "", 10),
|
||||
minute: Number.parseInt(values.get("minute") ?? "", 10),
|
||||
};
|
||||
}
|
||||
|
||||
function normalizeResetTimeZone(timeZoneHint: string | null | undefined): string | null {
|
||||
const normalized = timeZoneHint?.trim();
|
||||
if (!normalized) return null;
|
||||
if (/^(?:utc|gmt)$/i.test(normalized)) return "UTC";
|
||||
|
||||
try {
|
||||
new Intl.DateTimeFormat("en-US", { timeZone: normalized }).format(new Date(0));
|
||||
return normalized;
|
||||
} catch {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
function dateFromTimeZoneWallClock(input: {
|
||||
year: number;
|
||||
month: number;
|
||||
day: number;
|
||||
hour: number;
|
||||
minute: number;
|
||||
timeZone: string;
|
||||
}): Date | null {
|
||||
let candidate = new Date(Date.UTC(input.year, input.month - 1, input.day, input.hour, input.minute, 0, 0));
|
||||
const targetUtc = Date.UTC(input.year, input.month - 1, input.day, input.hour, input.minute, 0, 0);
|
||||
|
||||
for (let attempt = 0; attempt < 4; attempt += 1) {
|
||||
const actual = readTimeZoneParts(candidate, input.timeZone);
|
||||
const actualUtc = Date.UTC(actual.year, actual.month - 1, actual.day, actual.hour, actual.minute, 0, 0);
|
||||
const offsetMs = targetUtc - actualUtc;
|
||||
if (offsetMs === 0) break;
|
||||
candidate = new Date(candidate.getTime() + offsetMs);
|
||||
}
|
||||
|
||||
const verified = readTimeZoneParts(candidate, input.timeZone);
|
||||
if (
|
||||
verified.year !== input.year ||
|
||||
verified.month !== input.month ||
|
||||
verified.day !== input.day ||
|
||||
verified.hour !== input.hour ||
|
||||
verified.minute !== input.minute
|
||||
) {
|
||||
return null;
|
||||
}
|
||||
|
||||
return candidate;
|
||||
}
|
||||
|
||||
function nextClockTimeInTimeZone(input: {
|
||||
now: Date;
|
||||
hour: number;
|
||||
minute: number;
|
||||
timeZoneHint: string;
|
||||
}): Date | null {
|
||||
const timeZone = normalizeResetTimeZone(input.timeZoneHint);
|
||||
if (!timeZone) return null;
|
||||
|
||||
const nowParts = readTimeZoneParts(input.now, timeZone);
|
||||
let retryAt = dateFromTimeZoneWallClock({
|
||||
year: nowParts.year,
|
||||
month: nowParts.month,
|
||||
day: nowParts.day,
|
||||
hour: input.hour,
|
||||
minute: input.minute,
|
||||
timeZone,
|
||||
});
|
||||
if (!retryAt) return null;
|
||||
|
||||
if (retryAt.getTime() <= input.now.getTime()) {
|
||||
const nextDay = new Date(Date.UTC(nowParts.year, nowParts.month - 1, nowParts.day + 1, 0, 0, 0, 0));
|
||||
retryAt = dateFromTimeZoneWallClock({
|
||||
year: nextDay.getUTCFullYear(),
|
||||
month: nextDay.getUTCMonth() + 1,
|
||||
day: nextDay.getUTCDate(),
|
||||
hour: input.hour,
|
||||
minute: input.minute,
|
||||
timeZone,
|
||||
});
|
||||
}
|
||||
|
||||
return retryAt;
|
||||
}
|
||||
|
||||
function parseLocalClockTime(clockText: string, now: Date): Date | null {
|
||||
const normalized = clockText.trim();
|
||||
const match = normalized.match(/^(\d{1,2})(?::(\d{2}))?\s*([ap])\.?\s*m\.?(?:\s*\(([^)]+)\)|\s+([A-Z]{2,5}))?$/i);
|
||||
if (!match) return null;
|
||||
|
||||
const hour12 = Number.parseInt(match[1] ?? "", 10);
|
||||
const minute = Number.parseInt(match[2] ?? "0", 10);
|
||||
if (!Number.isInteger(hour12) || hour12 < 1 || hour12 > 12) return null;
|
||||
if (!Number.isInteger(minute) || minute < 0 || minute > 59) return null;
|
||||
|
||||
let hour24 = hour12 % 12;
|
||||
if ((match[3] ?? "").toLowerCase() === "p") hour24 += 12;
|
||||
|
||||
const timeZoneHint = match[4] ?? match[5];
|
||||
if (timeZoneHint) {
|
||||
const explicitRetryAt = nextClockTimeInTimeZone({
|
||||
now,
|
||||
hour: hour24,
|
||||
minute,
|
||||
timeZoneHint,
|
||||
});
|
||||
if (explicitRetryAt) return explicitRetryAt;
|
||||
}
|
||||
|
||||
const retryAt = new Date(now);
|
||||
retryAt.setHours(hour24, minute, 0, 0);
|
||||
if (retryAt.getTime() <= now.getTime()) {
|
||||
retryAt.setDate(retryAt.getDate() + 1);
|
||||
}
|
||||
return retryAt;
|
||||
}
|
||||
|
||||
export function extractCodexRetryNotBefore(input: {
|
||||
stdout?: string | null;
|
||||
stderr?: string | null;
|
||||
errorMessage?: string | null;
|
||||
}, now = new Date()): Date | null {
|
||||
const haystack = buildCodexErrorHaystack(input);
|
||||
const usageLimitMatch = haystack.match(CODEX_USAGE_LIMIT_RE);
|
||||
if (!usageLimitMatch) return null;
|
||||
return parseLocalClockTime(usageLimitMatch[1] ?? "", now);
|
||||
}
|
||||
|
||||
export function isCodexTransientUpstreamError(input: {
|
||||
stdout?: string | null;
|
||||
stderr?: string | null;
|
||||
errorMessage?: string | null;
|
||||
}): boolean {
|
||||
const haystack = buildCodexErrorHaystack(input);
|
||||
|
||||
if (extractCodexRetryNotBefore(input) != null) return true;
|
||||
if (!CODEX_TRANSIENT_UPSTREAM_RE.test(haystack)) return false;
|
||||
// Keep automatic retries scoped to the observed remote-compaction/high-demand
|
||||
// failure shape, plus explicit usage-limit windows that tell us when retrying
|
||||
// becomes safe again.
|
||||
return CODEX_REMOTE_COMPACTION_RE.test(haystack) || /high\s+demand|temporary\s+errors/i.test(haystack);
|
||||
}
|
||||
|
||||
@@ -146,7 +146,7 @@ export async function testEnvironment(
|
||||
code: "codex_fast_mode_unsupported_model",
|
||||
level: "warn",
|
||||
message: execArgs.fastModeIgnoredReason,
|
||||
hint: "Switch the agent model to GPT-5.4 to enable Codex Fast mode.",
|
||||
hint: "Switch the agent model to GPT-5.4 or enter a manual model ID to enable Codex Fast mode.",
|
||||
});
|
||||
}
|
||||
|
||||
|
||||
268
packages/adapters/cursor-local/src/server/execute.remote.test.ts
Normal file
268
packages/adapters/cursor-local/src/server/execute.remote.test.ts
Normal file
@@ -0,0 +1,268 @@
|
||||
import { mkdir, mkdtemp, rm } from "node:fs/promises";
|
||||
import os from "node:os";
|
||||
import path from "node:path";
|
||||
import { afterEach, describe, expect, it, vi } from "vitest";
|
||||
|
||||
const {
|
||||
runChildProcess,
|
||||
ensureCommandResolvable,
|
||||
resolveCommandForLogs,
|
||||
prepareWorkspaceForSshExecution,
|
||||
restoreWorkspaceFromSshExecution,
|
||||
runSshCommand,
|
||||
syncDirectoryToSsh,
|
||||
} = vi.hoisted(() => ({
|
||||
runChildProcess: vi.fn(async () => ({
|
||||
exitCode: 0,
|
||||
signal: null,
|
||||
timedOut: false,
|
||||
stdout: [
|
||||
JSON.stringify({ type: "system", session_id: "cursor-session-1" }),
|
||||
JSON.stringify({ type: "assistant", text: "hello" }),
|
||||
JSON.stringify({ type: "result", is_error: false, result: "hello", session_id: "cursor-session-1" }),
|
||||
].join("\n"),
|
||||
stderr: "",
|
||||
pid: 123,
|
||||
startedAt: new Date().toISOString(),
|
||||
})),
|
||||
ensureCommandResolvable: vi.fn(async () => undefined),
|
||||
resolveCommandForLogs: vi.fn(async () => "ssh://fixture@127.0.0.1:2222/remote/workspace :: agent"),
|
||||
prepareWorkspaceForSshExecution: vi.fn(async () => undefined),
|
||||
restoreWorkspaceFromSshExecution: vi.fn(async () => undefined),
|
||||
runSshCommand: vi.fn(async () => ({
|
||||
stdout: "/home/agent",
|
||||
stderr: "",
|
||||
exitCode: 0,
|
||||
})),
|
||||
syncDirectoryToSsh: vi.fn(async () => undefined),
|
||||
}));
|
||||
|
||||
vi.mock("@paperclipai/adapter-utils/server-utils", async () => {
|
||||
const actual = await vi.importActual<typeof import("@paperclipai/adapter-utils/server-utils")>(
|
||||
"@paperclipai/adapter-utils/server-utils",
|
||||
);
|
||||
return {
|
||||
...actual,
|
||||
ensureCommandResolvable,
|
||||
resolveCommandForLogs,
|
||||
runChildProcess,
|
||||
};
|
||||
});
|
||||
|
||||
vi.mock("@paperclipai/adapter-utils/ssh", async () => {
|
||||
const actual = await vi.importActual<typeof import("@paperclipai/adapter-utils/ssh")>(
|
||||
"@paperclipai/adapter-utils/ssh",
|
||||
);
|
||||
return {
|
||||
...actual,
|
||||
prepareWorkspaceForSshExecution,
|
||||
restoreWorkspaceFromSshExecution,
|
||||
runSshCommand,
|
||||
syncDirectoryToSsh,
|
||||
};
|
||||
});
|
||||
|
||||
import { execute } from "./execute.js";
|
||||
|
||||
describe("cursor remote execution", () => {
|
||||
const cleanupDirs: string[] = [];
|
||||
|
||||
afterEach(async () => {
|
||||
vi.clearAllMocks();
|
||||
while (cleanupDirs.length > 0) {
|
||||
const dir = cleanupDirs.pop();
|
||||
if (!dir) continue;
|
||||
await rm(dir, { recursive: true, force: true }).catch(() => undefined);
|
||||
}
|
||||
});
|
||||
|
||||
it("prepares the workspace, syncs Cursor skills, and restores workspace changes for remote SSH execution", async () => {
|
||||
const rootDir = await mkdtemp(path.join(os.tmpdir(), "paperclip-cursor-remote-"));
|
||||
cleanupDirs.push(rootDir);
|
||||
const workspaceDir = path.join(rootDir, "workspace");
|
||||
await mkdir(workspaceDir, { recursive: true });
|
||||
|
||||
const result = await execute({
|
||||
runId: "run-1",
|
||||
agent: {
|
||||
id: "agent-1",
|
||||
companyId: "company-1",
|
||||
name: "Cursor Builder",
|
||||
adapterType: "cursor",
|
||||
adapterConfig: {},
|
||||
},
|
||||
runtime: {
|
||||
sessionId: null,
|
||||
sessionParams: null,
|
||||
sessionDisplayId: null,
|
||||
taskKey: null,
|
||||
},
|
||||
config: {
|
||||
command: "agent",
|
||||
},
|
||||
context: {
|
||||
paperclipWorkspace: {
|
||||
cwd: workspaceDir,
|
||||
source: "project_primary",
|
||||
},
|
||||
},
|
||||
executionTransport: {
|
||||
remoteExecution: {
|
||||
host: "127.0.0.1",
|
||||
port: 2222,
|
||||
username: "fixture",
|
||||
remoteWorkspacePath: "/remote/workspace",
|
||||
remoteCwd: "/remote/workspace",
|
||||
privateKey: "PRIVATE KEY",
|
||||
knownHosts: "[127.0.0.1]:2222 ssh-ed25519 AAAA",
|
||||
strictHostKeyChecking: true,
|
||||
paperclipApiUrl: "http://198.51.100.10:3102",
|
||||
},
|
||||
},
|
||||
onLog: async () => {},
|
||||
});
|
||||
|
||||
expect(result.sessionParams).toMatchObject({
|
||||
sessionId: "cursor-session-1",
|
||||
cwd: "/remote/workspace",
|
||||
remoteExecution: {
|
||||
transport: "ssh",
|
||||
host: "127.0.0.1",
|
||||
port: 2222,
|
||||
username: "fixture",
|
||||
remoteCwd: "/remote/workspace",
|
||||
paperclipApiUrl: "http://198.51.100.10:3102",
|
||||
},
|
||||
});
|
||||
expect(prepareWorkspaceForSshExecution).toHaveBeenCalledTimes(1);
|
||||
expect(syncDirectoryToSsh).toHaveBeenCalledTimes(1);
|
||||
expect(syncDirectoryToSsh).toHaveBeenCalledWith(expect.objectContaining({
|
||||
remoteDir: "/remote/workspace/.paperclip-runtime/cursor/skills",
|
||||
followSymlinks: true,
|
||||
}));
|
||||
expect(runSshCommand).toHaveBeenCalledWith(
|
||||
expect.anything(),
|
||||
expect.stringContaining(".cursor/skills"),
|
||||
expect.anything(),
|
||||
);
|
||||
const call = runChildProcess.mock.calls[0] as unknown as
|
||||
| [string, string, string[], { env: Record<string, string>; remoteExecution?: { remoteCwd: string } | null }]
|
||||
| undefined;
|
||||
expect(call?.[2]).toContain("--workspace");
|
||||
expect(call?.[2]).toContain("/remote/workspace");
|
||||
expect(call?.[3].env.PAPERCLIP_API_URL).toBe("http://198.51.100.10:3102");
|
||||
expect(call?.[3].remoteExecution?.remoteCwd).toBe("/remote/workspace");
|
||||
expect(restoreWorkspaceFromSshExecution).toHaveBeenCalledTimes(1);
|
||||
});
|
||||
|
||||
it("resumes saved Cursor sessions for remote SSH execution only when the identity matches", async () => {
|
||||
const rootDir = await mkdtemp(path.join(os.tmpdir(), "paperclip-cursor-remote-resume-"));
|
||||
cleanupDirs.push(rootDir);
|
||||
const workspaceDir = path.join(rootDir, "workspace");
|
||||
await mkdir(workspaceDir, { recursive: true });
|
||||
|
||||
await execute({
|
||||
runId: "run-ssh-resume",
|
||||
agent: {
|
||||
id: "agent-1",
|
||||
companyId: "company-1",
|
||||
name: "Cursor Builder",
|
||||
adapterType: "cursor",
|
||||
adapterConfig: {},
|
||||
},
|
||||
runtime: {
|
||||
sessionId: "session-123",
|
||||
sessionParams: {
|
||||
sessionId: "session-123",
|
||||
cwd: "/remote/workspace",
|
||||
remoteExecution: {
|
||||
transport: "ssh",
|
||||
host: "127.0.0.1",
|
||||
port: 2222,
|
||||
username: "fixture",
|
||||
remoteCwd: "/remote/workspace",
|
||||
},
|
||||
},
|
||||
sessionDisplayId: "session-123",
|
||||
taskKey: null,
|
||||
},
|
||||
config: {
|
||||
command: "agent",
|
||||
},
|
||||
context: {
|
||||
paperclipWorkspace: {
|
||||
cwd: workspaceDir,
|
||||
source: "project_primary",
|
||||
},
|
||||
},
|
||||
executionTransport: {
|
||||
remoteExecution: {
|
||||
host: "127.0.0.1",
|
||||
port: 2222,
|
||||
username: "fixture",
|
||||
remoteWorkspacePath: "/remote/workspace",
|
||||
remoteCwd: "/remote/workspace",
|
||||
privateKey: "PRIVATE KEY",
|
||||
knownHosts: "[127.0.0.1]:2222 ssh-ed25519 AAAA",
|
||||
strictHostKeyChecking: true,
|
||||
},
|
||||
},
|
||||
onLog: async () => {},
|
||||
});
|
||||
|
||||
const call = runChildProcess.mock.calls[0] as unknown as [string, string, string[]] | undefined;
|
||||
expect(call?.[2]).toContain("--resume");
|
||||
expect(call?.[2]).toContain("session-123");
|
||||
});
|
||||
|
||||
it("restores the remote workspace if skills sync fails after workspace prep", async () => {
|
||||
const rootDir = await mkdtemp(path.join(os.tmpdir(), "paperclip-cursor-remote-sync-fail-"));
|
||||
cleanupDirs.push(rootDir);
|
||||
const workspaceDir = path.join(rootDir, "workspace");
|
||||
await mkdir(workspaceDir, { recursive: true });
|
||||
syncDirectoryToSsh.mockRejectedValueOnce(new Error("sync failed"));
|
||||
|
||||
await expect(execute({
|
||||
runId: "run-sync-fail",
|
||||
agent: {
|
||||
id: "agent-1",
|
||||
companyId: "company-1",
|
||||
name: "Cursor Builder",
|
||||
adapterType: "cursor",
|
||||
adapterConfig: {},
|
||||
},
|
||||
runtime: {
|
||||
sessionId: null,
|
||||
sessionParams: null,
|
||||
sessionDisplayId: null,
|
||||
taskKey: null,
|
||||
},
|
||||
config: {
|
||||
command: "agent",
|
||||
},
|
||||
context: {
|
||||
paperclipWorkspace: {
|
||||
cwd: workspaceDir,
|
||||
source: "project_primary",
|
||||
},
|
||||
},
|
||||
executionTransport: {
|
||||
remoteExecution: {
|
||||
host: "127.0.0.1",
|
||||
port: 2222,
|
||||
username: "fixture",
|
||||
remoteWorkspacePath: "/remote/workspace",
|
||||
remoteCwd: "/remote/workspace",
|
||||
privateKey: "PRIVATE KEY",
|
||||
knownHosts: "[127.0.0.1]:2222 ssh-ed25519 AAAA",
|
||||
strictHostKeyChecking: true,
|
||||
},
|
||||
},
|
||||
onLog: async () => {},
|
||||
})).rejects.toThrow("sync failed");
|
||||
|
||||
expect(prepareWorkspaceForSshExecution).toHaveBeenCalledTimes(1);
|
||||
expect(restoreWorkspaceFromSshExecution).toHaveBeenCalledTimes(1);
|
||||
expect(runChildProcess).not.toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
@@ -3,26 +3,41 @@ import os from "node:os";
|
||||
import path from "node:path";
|
||||
import { fileURLToPath } from "node:url";
|
||||
import { inferOpenAiCompatibleBiller, type AdapterExecutionContext, type AdapterExecutionResult } from "@paperclipai/adapter-utils";
|
||||
import {
|
||||
adapterExecutionTargetIsRemote,
|
||||
adapterExecutionTargetPaperclipApiUrl,
|
||||
adapterExecutionTargetRemoteCwd,
|
||||
adapterExecutionTargetSessionIdentity,
|
||||
adapterExecutionTargetSessionMatches,
|
||||
adapterExecutionTargetUsesManagedHome,
|
||||
describeAdapterExecutionTarget,
|
||||
ensureAdapterExecutionTargetCommandResolvable,
|
||||
prepareAdapterExecutionTargetRuntime,
|
||||
readAdapterExecutionTarget,
|
||||
readAdapterExecutionTargetHomeDir,
|
||||
resolveAdapterExecutionTargetCommandForLogs,
|
||||
runAdapterExecutionTargetProcess,
|
||||
runAdapterExecutionTargetShellCommand,
|
||||
} from "@paperclipai/adapter-utils/execution-target";
|
||||
import {
|
||||
asString,
|
||||
asNumber,
|
||||
asStringArray,
|
||||
parseObject,
|
||||
applyPaperclipWorkspaceEnv,
|
||||
buildPaperclipEnv,
|
||||
buildInvocationEnvForLogs,
|
||||
ensureAbsoluteDirectory,
|
||||
ensureCommandResolvable,
|
||||
ensurePaperclipSkillSymlink,
|
||||
ensurePathInEnv,
|
||||
readPaperclipRuntimeSkillEntries,
|
||||
resolveCommandForLogs,
|
||||
resolvePaperclipDesiredSkillNames,
|
||||
removeMaintainerOnlySkillSymlinks,
|
||||
renderTemplate,
|
||||
renderPaperclipWakePrompt,
|
||||
stringifyPaperclipWakePayload,
|
||||
DEFAULT_PAPERCLIP_AGENT_PROMPT_TEMPLATE,
|
||||
joinPromptSections,
|
||||
runChildProcess,
|
||||
} from "@paperclipai/adapter-utils/server-utils";
|
||||
import { DEFAULT_CURSOR_LOCAL_MODEL } from "../index.js";
|
||||
import { parseCursorJsonl, isCursorUnknownSessionError } from "./parse.js";
|
||||
@@ -96,6 +111,19 @@ function cursorSkillsHome(): string {
|
||||
return path.join(os.homedir(), ".cursor", "skills");
|
||||
}
|
||||
|
||||
async function buildCursorSkillsDir(config: Record<string, unknown>): Promise<string> {
|
||||
const tmp = await fs.mkdtemp(path.join(os.tmpdir(), "paperclip-cursor-skills-"));
|
||||
const target = path.join(tmp, "skills");
|
||||
await fs.mkdir(target, { recursive: true });
|
||||
const availableEntries = await readPaperclipRuntimeSkillEntries(config, __moduleDir);
|
||||
const desiredNames = new Set(resolvePaperclipDesiredSkillNames(config, availableEntries));
|
||||
for (const entry of availableEntries) {
|
||||
if (!desiredNames.has(entry.key)) continue;
|
||||
await fs.symlink(entry.source, path.join(target, entry.runtimeName));
|
||||
}
|
||||
return target;
|
||||
}
|
||||
|
||||
type EnsureCursorSkillsInjectedOptions = {
|
||||
skillsDir?: string | null;
|
||||
skillsEntries?: Array<{ key: string; runtimeName: string; source: string }>;
|
||||
@@ -161,10 +189,15 @@ export async function ensureCursorSkillsInjected(
|
||||
|
||||
export async function execute(ctx: AdapterExecutionContext): Promise<AdapterExecutionResult> {
|
||||
const { runId, agent, runtime, config, context, onLog, onMeta, onSpawn, authToken } = ctx;
|
||||
const executionTarget = readAdapterExecutionTarget({
|
||||
executionTarget: ctx.executionTarget,
|
||||
legacyRemoteExecution: ctx.executionTransport?.remoteExecution,
|
||||
});
|
||||
const executionTargetIsRemote = adapterExecutionTargetIsRemote(executionTarget);
|
||||
|
||||
const promptTemplate = asString(
|
||||
config.promptTemplate,
|
||||
"You are agent {{agent.id}} ({{agent.name}}). Continue your Paperclip work.",
|
||||
DEFAULT_PAPERCLIP_AGENT_PROMPT_TEMPLATE,
|
||||
);
|
||||
const command = asString(config.command, "agent");
|
||||
const model = asString(config.model, DEFAULT_CURSOR_LOCAL_MODEL).trim();
|
||||
@@ -189,9 +222,11 @@ export async function execute(ctx: AdapterExecutionContext): Promise<AdapterExec
|
||||
await ensureAbsoluteDirectory(cwd, { createIfMissing: true });
|
||||
const cursorSkillEntries = await readPaperclipRuntimeSkillEntries(config, __moduleDir);
|
||||
const desiredCursorSkillNames = resolvePaperclipDesiredSkillNames(config, cursorSkillEntries);
|
||||
await ensureCursorSkillsInjected(onLog, {
|
||||
skillsEntries: cursorSkillEntries.filter((entry) => desiredCursorSkillNames.includes(entry.key)),
|
||||
});
|
||||
if (!executionTargetIsRemote) {
|
||||
await ensureCursorSkillsInjected(onLog, {
|
||||
skillsEntries: cursorSkillEntries.filter((entry) => desiredCursorSkillNames.includes(entry.key)),
|
||||
});
|
||||
}
|
||||
|
||||
const envConfig = parseObject(config.env);
|
||||
const hasExplicitApiKey =
|
||||
@@ -243,27 +278,21 @@ export async function execute(ctx: AdapterExecutionContext): Promise<AdapterExec
|
||||
if (wakePayloadJson) {
|
||||
env.PAPERCLIP_WAKE_PAYLOAD_JSON = wakePayloadJson;
|
||||
}
|
||||
if (effectiveWorkspaceCwd) {
|
||||
env.PAPERCLIP_WORKSPACE_CWD = effectiveWorkspaceCwd;
|
||||
}
|
||||
if (workspaceSource) {
|
||||
env.PAPERCLIP_WORKSPACE_SOURCE = workspaceSource;
|
||||
}
|
||||
if (workspaceId) {
|
||||
env.PAPERCLIP_WORKSPACE_ID = workspaceId;
|
||||
}
|
||||
if (workspaceRepoUrl) {
|
||||
env.PAPERCLIP_WORKSPACE_REPO_URL = workspaceRepoUrl;
|
||||
}
|
||||
if (workspaceRepoRef) {
|
||||
env.PAPERCLIP_WORKSPACE_REPO_REF = workspaceRepoRef;
|
||||
}
|
||||
if (agentHome) {
|
||||
env.AGENT_HOME = agentHome;
|
||||
}
|
||||
applyPaperclipWorkspaceEnv(env, {
|
||||
workspaceCwd: effectiveWorkspaceCwd,
|
||||
workspaceSource,
|
||||
workspaceId,
|
||||
workspaceRepoUrl,
|
||||
workspaceRepoRef,
|
||||
agentHome,
|
||||
});
|
||||
if (workspaceHints.length > 0) {
|
||||
env.PAPERCLIP_WORKSPACES_JSON = JSON.stringify(workspaceHints);
|
||||
}
|
||||
const targetPaperclipApiUrl = adapterExecutionTargetPaperclipApiUrl(executionTarget);
|
||||
if (targetPaperclipApiUrl) {
|
||||
env.PAPERCLIP_API_URL = targetPaperclipApiUrl;
|
||||
}
|
||||
for (const [k, v] of Object.entries(envConfig)) {
|
||||
if (typeof v === "string") env[k] = v;
|
||||
}
|
||||
@@ -277,8 +306,8 @@ export async function execute(ctx: AdapterExecutionContext): Promise<AdapterExec
|
||||
);
|
||||
const billingType = resolveCursorBillingType(effectiveEnv);
|
||||
const runtimeEnv = ensurePathInEnv(effectiveEnv);
|
||||
await ensureCommandResolvable(command, cwd, runtimeEnv);
|
||||
const resolvedCommand = await resolveCommandForLogs(command, cwd, runtimeEnv);
|
||||
await ensureAdapterExecutionTargetCommandResolvable(command, executionTarget, cwd, runtimeEnv);
|
||||
const resolvedCommand = await resolveAdapterExecutionTargetCommandForLogs(command, executionTarget, cwd, runtimeEnv);
|
||||
const loggedEnv = buildInvocationEnvForLogs(env, {
|
||||
runtimeEnv,
|
||||
includeRuntimeKeys: ["HOME"],
|
||||
@@ -293,18 +322,77 @@ export async function execute(ctx: AdapterExecutionContext): Promise<AdapterExec
|
||||
return asStringArray(config.args);
|
||||
})();
|
||||
const autoTrustEnabled = !hasCursorTrustBypassArg(extraArgs);
|
||||
const effectiveExecutionCwd = adapterExecutionTargetRemoteCwd(executionTarget, cwd);
|
||||
let restoreRemoteWorkspace: (() => Promise<void>) | null = null;
|
||||
let localSkillsDir: string | null = null;
|
||||
|
||||
if (executionTargetIsRemote) {
|
||||
try {
|
||||
localSkillsDir = await buildCursorSkillsDir(config);
|
||||
await onLog(
|
||||
"stdout",
|
||||
`[paperclip] Syncing workspace and Cursor runtime assets to ${describeAdapterExecutionTarget(executionTarget)}.\n`,
|
||||
);
|
||||
const preparedExecutionTargetRuntime = await prepareAdapterExecutionTargetRuntime({
|
||||
target: executionTarget,
|
||||
adapterKey: "cursor",
|
||||
workspaceLocalDir: cwd,
|
||||
assets: [{
|
||||
key: "skills",
|
||||
localDir: localSkillsDir,
|
||||
followSymlinks: true,
|
||||
}],
|
||||
});
|
||||
restoreRemoteWorkspace = () => preparedExecutionTargetRuntime.restoreWorkspace();
|
||||
const managedHome = adapterExecutionTargetUsesManagedHome(executionTarget);
|
||||
if (managedHome && preparedExecutionTargetRuntime.runtimeRootDir) {
|
||||
env.HOME = preparedExecutionTargetRuntime.runtimeRootDir;
|
||||
}
|
||||
const remoteHomeDir = managedHome && preparedExecutionTargetRuntime.runtimeRootDir
|
||||
? preparedExecutionTargetRuntime.runtimeRootDir
|
||||
: await readAdapterExecutionTargetHomeDir(runId, executionTarget, {
|
||||
cwd,
|
||||
env,
|
||||
timeoutSec,
|
||||
graceSec,
|
||||
onLog,
|
||||
});
|
||||
if (remoteHomeDir && preparedExecutionTargetRuntime.assetDirs.skills) {
|
||||
const remoteSkillsDir = path.posix.join(remoteHomeDir, ".cursor", "skills");
|
||||
await runAdapterExecutionTargetShellCommand(
|
||||
runId,
|
||||
executionTarget,
|
||||
`mkdir -p ${JSON.stringify(path.posix.dirname(remoteSkillsDir))} && rm -rf ${JSON.stringify(remoteSkillsDir)} && cp -a ${JSON.stringify(preparedExecutionTargetRuntime.assetDirs.skills)} ${JSON.stringify(remoteSkillsDir)}`,
|
||||
{ cwd, env, timeoutSec, graceSec, onLog },
|
||||
);
|
||||
}
|
||||
} catch (error) {
|
||||
await Promise.allSettled([
|
||||
restoreRemoteWorkspace?.(),
|
||||
localSkillsDir ? fs.rm(localSkillsDir, { recursive: true, force: true }).catch(() => undefined) : Promise.resolve(),
|
||||
]);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
const runtimeSessionParams = parseObject(runtime.sessionParams);
|
||||
const runtimeSessionId = asString(runtimeSessionParams.sessionId, runtime.sessionId ?? "");
|
||||
const runtimeSessionCwd = asString(runtimeSessionParams.cwd, "");
|
||||
const runtimeRemoteExecution = parseObject(runtimeSessionParams.remoteExecution);
|
||||
const canResumeSession =
|
||||
runtimeSessionId.length > 0 &&
|
||||
(runtimeSessionCwd.length === 0 || path.resolve(runtimeSessionCwd) === path.resolve(cwd));
|
||||
(runtimeSessionCwd.length === 0 || path.resolve(runtimeSessionCwd) === path.resolve(effectiveExecutionCwd)) &&
|
||||
adapterExecutionTargetSessionMatches(runtimeRemoteExecution, executionTarget);
|
||||
const sessionId = canResumeSession ? runtimeSessionId : null;
|
||||
if (runtimeSessionId && !canResumeSession) {
|
||||
if (executionTargetIsRemote && runtimeSessionId && !canResumeSession) {
|
||||
await onLog(
|
||||
"stdout",
|
||||
`[paperclip] Cursor session "${runtimeSessionId}" was saved for cwd "${runtimeSessionCwd}" and will not be resumed in "${cwd}".\n`,
|
||||
`[paperclip] Cursor session "${runtimeSessionId}" does not match the current remote execution identity and will not be resumed in "${effectiveExecutionCwd}". Starting a fresh remote session.\n`,
|
||||
);
|
||||
} else if (runtimeSessionId && !canResumeSession) {
|
||||
await onLog(
|
||||
"stdout",
|
||||
`[paperclip] Cursor session "${runtimeSessionId}" was saved for cwd "${runtimeSessionCwd}" and will not be resumed in "${effectiveExecutionCwd}".\n`,
|
||||
);
|
||||
}
|
||||
|
||||
@@ -386,7 +474,7 @@ export async function execute(ctx: AdapterExecutionContext): Promise<AdapterExec
|
||||
};
|
||||
|
||||
const buildArgs = (resumeSessionId: string | null) => {
|
||||
const args = ["-p", "--output-format", "stream-json", "--workspace", cwd];
|
||||
const args = ["-p", "--output-format", "stream-json", "--workspace", effectiveExecutionCwd];
|
||||
if (resumeSessionId) args.push("--resume", resumeSessionId);
|
||||
if (model) args.push("--model", model);
|
||||
if (mode) args.push("--mode", mode);
|
||||
@@ -401,7 +489,7 @@ export async function execute(ctx: AdapterExecutionContext): Promise<AdapterExec
|
||||
await onMeta({
|
||||
adapterType: "cursor",
|
||||
command: resolvedCommand,
|
||||
cwd,
|
||||
cwd: effectiveExecutionCwd,
|
||||
commandNotes,
|
||||
commandArgs: args,
|
||||
env: loggedEnv,
|
||||
@@ -435,7 +523,7 @@ export async function execute(ctx: AdapterExecutionContext): Promise<AdapterExec
|
||||
}
|
||||
};
|
||||
|
||||
const proc = await runChildProcess(runId, command, args, {
|
||||
const proc = await runAdapterExecutionTargetProcess(runId, executionTarget, command, args, {
|
||||
cwd,
|
||||
env,
|
||||
timeoutSec,
|
||||
@@ -487,10 +575,15 @@ export async function execute(ctx: AdapterExecutionContext): Promise<AdapterExec
|
||||
const resolvedSessionParams = resolvedSessionId
|
||||
? ({
|
||||
sessionId: resolvedSessionId,
|
||||
cwd,
|
||||
cwd: effectiveExecutionCwd,
|
||||
...(workspaceId ? { workspaceId } : {}),
|
||||
...(workspaceRepoUrl ? { repoUrl: workspaceRepoUrl } : {}),
|
||||
...(workspaceRepoRef ? { repoRef: workspaceRepoRef } : {}),
|
||||
...(executionTargetIsRemote
|
||||
? {
|
||||
remoteExecution: adapterExecutionTargetSessionIdentity(executionTarget),
|
||||
}
|
||||
: {}),
|
||||
} as Record<string, unknown>)
|
||||
: null;
|
||||
const parsedError = typeof attempt.parsed.errorMessage === "string" ? attempt.parsed.errorMessage.trim() : "";
|
||||
@@ -526,20 +619,32 @@ export async function execute(ctx: AdapterExecutionContext): Promise<AdapterExec
|
||||
};
|
||||
};
|
||||
|
||||
const initial = await runAttempt(sessionId);
|
||||
if (
|
||||
sessionId &&
|
||||
!initial.proc.timedOut &&
|
||||
(initial.proc.exitCode ?? 0) !== 0 &&
|
||||
isCursorUnknownSessionError(initial.proc.stdout, initial.proc.stderr)
|
||||
) {
|
||||
await onLog(
|
||||
"stdout",
|
||||
`[paperclip] Cursor resume session "${sessionId}" is unavailable; retrying with a fresh session.\n`,
|
||||
);
|
||||
const retry = await runAttempt(null);
|
||||
return toResult(retry, true);
|
||||
try {
|
||||
const initial = await runAttempt(sessionId);
|
||||
if (
|
||||
sessionId &&
|
||||
!initial.proc.timedOut &&
|
||||
(initial.proc.exitCode ?? 0) !== 0 &&
|
||||
isCursorUnknownSessionError(initial.proc.stdout, initial.proc.stderr)
|
||||
) {
|
||||
await onLog(
|
||||
"stdout",
|
||||
`[paperclip] Cursor resume session "${sessionId}" is unavailable; retrying with a fresh session.\n`,
|
||||
);
|
||||
const retry = await runAttempt(null);
|
||||
return toResult(retry, true);
|
||||
}
|
||||
return toResult(initial);
|
||||
} finally {
|
||||
if (restoreRemoteWorkspace) {
|
||||
await onLog(
|
||||
"stdout",
|
||||
`[paperclip] Restoring workspace changes from ${describeAdapterExecutionTarget(executionTarget)}.\n`,
|
||||
);
|
||||
await restoreRemoteWorkspace();
|
||||
}
|
||||
if (localSkillsDir) {
|
||||
await fs.rm(localSkillsDir, { recursive: true, force: true }).catch(() => undefined);
|
||||
}
|
||||
}
|
||||
|
||||
return toResult(initial);
|
||||
}
|
||||
|
||||
272
packages/adapters/gemini-local/src/server/execute.remote.test.ts
Normal file
272
packages/adapters/gemini-local/src/server/execute.remote.test.ts
Normal file
@@ -0,0 +1,272 @@
|
||||
import { mkdir, mkdtemp, rm } from "node:fs/promises";
|
||||
import os from "node:os";
|
||||
import path from "node:path";
|
||||
import { afterEach, describe, expect, it, vi } from "vitest";
|
||||
|
||||
const {
|
||||
runChildProcess,
|
||||
ensureCommandResolvable,
|
||||
resolveCommandForLogs,
|
||||
prepareWorkspaceForSshExecution,
|
||||
restoreWorkspaceFromSshExecution,
|
||||
runSshCommand,
|
||||
syncDirectoryToSsh,
|
||||
} = vi.hoisted(() => ({
|
||||
runChildProcess: vi.fn(async () => ({
|
||||
exitCode: 0,
|
||||
signal: null,
|
||||
timedOut: false,
|
||||
stdout: [
|
||||
JSON.stringify({ type: "system", subtype: "init", session_id: "gemini-session-1", model: "gemini-2.5-pro" }),
|
||||
JSON.stringify({ type: "assistant", message: { content: [{ type: "output_text", text: "hello" }] } }),
|
||||
JSON.stringify({
|
||||
type: "result",
|
||||
subtype: "success",
|
||||
session_id: "gemini-session-1",
|
||||
usage: { promptTokenCount: 1, cachedContentTokenCount: 0, candidatesTokenCount: 1 },
|
||||
result: "hello",
|
||||
}),
|
||||
].join("\n"),
|
||||
stderr: "",
|
||||
pid: 123,
|
||||
startedAt: new Date().toISOString(),
|
||||
})),
|
||||
ensureCommandResolvable: vi.fn(async () => undefined),
|
||||
resolveCommandForLogs: vi.fn(async () => "ssh://fixture@127.0.0.1:2222/remote/workspace :: gemini"),
|
||||
prepareWorkspaceForSshExecution: vi.fn(async () => undefined),
|
||||
restoreWorkspaceFromSshExecution: vi.fn(async () => undefined),
|
||||
runSshCommand: vi.fn(async () => ({
|
||||
stdout: "/home/agent",
|
||||
stderr: "",
|
||||
exitCode: 0,
|
||||
})),
|
||||
syncDirectoryToSsh: vi.fn(async () => undefined),
|
||||
}));
|
||||
|
||||
vi.mock("@paperclipai/adapter-utils/server-utils", async () => {
|
||||
const actual = await vi.importActual<typeof import("@paperclipai/adapter-utils/server-utils")>(
|
||||
"@paperclipai/adapter-utils/server-utils",
|
||||
);
|
||||
return {
|
||||
...actual,
|
||||
ensureCommandResolvable,
|
||||
resolveCommandForLogs,
|
||||
runChildProcess,
|
||||
};
|
||||
});
|
||||
|
||||
vi.mock("@paperclipai/adapter-utils/ssh", async () => {
|
||||
const actual = await vi.importActual<typeof import("@paperclipai/adapter-utils/ssh")>(
|
||||
"@paperclipai/adapter-utils/ssh",
|
||||
);
|
||||
return {
|
||||
...actual,
|
||||
prepareWorkspaceForSshExecution,
|
||||
restoreWorkspaceFromSshExecution,
|
||||
runSshCommand,
|
||||
syncDirectoryToSsh,
|
||||
};
|
||||
});
|
||||
|
||||
import { execute } from "./execute.js";
|
||||
|
||||
describe("gemini remote execution", () => {
|
||||
const cleanupDirs: string[] = [];
|
||||
|
||||
afterEach(async () => {
|
||||
vi.clearAllMocks();
|
||||
while (cleanupDirs.length > 0) {
|
||||
const dir = cleanupDirs.pop();
|
||||
if (!dir) continue;
|
||||
await rm(dir, { recursive: true, force: true }).catch(() => undefined);
|
||||
}
|
||||
});
|
||||
|
||||
it("prepares the workspace, syncs Gemini skills, and restores workspace changes for remote SSH execution", async () => {
|
||||
const rootDir = await mkdtemp(path.join(os.tmpdir(), "paperclip-gemini-remote-"));
|
||||
cleanupDirs.push(rootDir);
|
||||
const workspaceDir = path.join(rootDir, "workspace");
|
||||
await mkdir(workspaceDir, { recursive: true });
|
||||
|
||||
const result = await execute({
|
||||
runId: "run-1",
|
||||
agent: {
|
||||
id: "agent-1",
|
||||
companyId: "company-1",
|
||||
name: "Gemini Builder",
|
||||
adapterType: "gemini_local",
|
||||
adapterConfig: {},
|
||||
},
|
||||
runtime: {
|
||||
sessionId: null,
|
||||
sessionParams: null,
|
||||
sessionDisplayId: null,
|
||||
taskKey: null,
|
||||
},
|
||||
config: {
|
||||
command: "gemini",
|
||||
},
|
||||
context: {
|
||||
paperclipWorkspace: {
|
||||
cwd: workspaceDir,
|
||||
source: "project_primary",
|
||||
},
|
||||
},
|
||||
executionTransport: {
|
||||
remoteExecution: {
|
||||
host: "127.0.0.1",
|
||||
port: 2222,
|
||||
username: "fixture",
|
||||
remoteWorkspacePath: "/remote/workspace",
|
||||
remoteCwd: "/remote/workspace",
|
||||
privateKey: "PRIVATE KEY",
|
||||
knownHosts: "[127.0.0.1]:2222 ssh-ed25519 AAAA",
|
||||
strictHostKeyChecking: true,
|
||||
paperclipApiUrl: "http://198.51.100.10:3102",
|
||||
},
|
||||
},
|
||||
onLog: async () => {},
|
||||
});
|
||||
|
||||
expect(result.sessionParams).toMatchObject({
|
||||
sessionId: "gemini-session-1",
|
||||
cwd: "/remote/workspace",
|
||||
remoteExecution: {
|
||||
transport: "ssh",
|
||||
host: "127.0.0.1",
|
||||
port: 2222,
|
||||
username: "fixture",
|
||||
remoteCwd: "/remote/workspace",
|
||||
paperclipApiUrl: "http://198.51.100.10:3102",
|
||||
},
|
||||
});
|
||||
expect(prepareWorkspaceForSshExecution).toHaveBeenCalledTimes(1);
|
||||
expect(syncDirectoryToSsh).toHaveBeenCalledTimes(1);
|
||||
expect(syncDirectoryToSsh).toHaveBeenCalledWith(expect.objectContaining({
|
||||
remoteDir: "/remote/workspace/.paperclip-runtime/gemini/skills",
|
||||
followSymlinks: true,
|
||||
}));
|
||||
expect(runSshCommand).toHaveBeenCalledWith(
|
||||
expect.anything(),
|
||||
expect.stringContaining(".gemini/skills"),
|
||||
expect.anything(),
|
||||
);
|
||||
const call = runChildProcess.mock.calls[0] as unknown as
|
||||
| [string, string, string[], { env: Record<string, string>; remoteExecution?: { remoteCwd: string } | null }]
|
||||
| undefined;
|
||||
expect(call?.[3].env.PAPERCLIP_API_URL).toBe("http://198.51.100.10:3102");
|
||||
expect(call?.[3].remoteExecution?.remoteCwd).toBe("/remote/workspace");
|
||||
expect(restoreWorkspaceFromSshExecution).toHaveBeenCalledTimes(1);
|
||||
});
|
||||
|
||||
it("resumes saved Gemini sessions for remote SSH execution only when the identity matches", async () => {
|
||||
const rootDir = await mkdtemp(path.join(os.tmpdir(), "paperclip-gemini-remote-resume-"));
|
||||
cleanupDirs.push(rootDir);
|
||||
const workspaceDir = path.join(rootDir, "workspace");
|
||||
await mkdir(workspaceDir, { recursive: true });
|
||||
|
||||
await execute({
|
||||
runId: "run-ssh-resume",
|
||||
agent: {
|
||||
id: "agent-1",
|
||||
companyId: "company-1",
|
||||
name: "Gemini Builder",
|
||||
adapterType: "gemini_local",
|
||||
adapterConfig: {},
|
||||
},
|
||||
runtime: {
|
||||
sessionId: "session-123",
|
||||
sessionParams: {
|
||||
sessionId: "session-123",
|
||||
cwd: "/remote/workspace",
|
||||
remoteExecution: {
|
||||
transport: "ssh",
|
||||
host: "127.0.0.1",
|
||||
port: 2222,
|
||||
username: "fixture",
|
||||
remoteCwd: "/remote/workspace",
|
||||
},
|
||||
},
|
||||
sessionDisplayId: "session-123",
|
||||
taskKey: null,
|
||||
},
|
||||
config: {
|
||||
command: "gemini",
|
||||
},
|
||||
context: {
|
||||
paperclipWorkspace: {
|
||||
cwd: workspaceDir,
|
||||
source: "project_primary",
|
||||
},
|
||||
},
|
||||
executionTransport: {
|
||||
remoteExecution: {
|
||||
host: "127.0.0.1",
|
||||
port: 2222,
|
||||
username: "fixture",
|
||||
remoteWorkspacePath: "/remote/workspace",
|
||||
remoteCwd: "/remote/workspace",
|
||||
privateKey: "PRIVATE KEY",
|
||||
knownHosts: "[127.0.0.1]:2222 ssh-ed25519 AAAA",
|
||||
strictHostKeyChecking: true,
|
||||
},
|
||||
},
|
||||
onLog: async () => {},
|
||||
});
|
||||
|
||||
const call = runChildProcess.mock.calls[0] as unknown as [string, string, string[]] | undefined;
|
||||
expect(call?.[2]).toContain("--resume");
|
||||
expect(call?.[2]).toContain("session-123");
|
||||
});
|
||||
|
||||
it("restores the remote workspace if skills sync fails after workspace prep", async () => {
|
||||
const rootDir = await mkdtemp(path.join(os.tmpdir(), "paperclip-gemini-remote-sync-fail-"));
|
||||
cleanupDirs.push(rootDir);
|
||||
const workspaceDir = path.join(rootDir, "workspace");
|
||||
await mkdir(workspaceDir, { recursive: true });
|
||||
syncDirectoryToSsh.mockRejectedValueOnce(new Error("sync failed"));
|
||||
|
||||
await expect(execute({
|
||||
runId: "run-sync-fail",
|
||||
agent: {
|
||||
id: "agent-1",
|
||||
companyId: "company-1",
|
||||
name: "Gemini Builder",
|
||||
adapterType: "gemini_local",
|
||||
adapterConfig: {},
|
||||
},
|
||||
runtime: {
|
||||
sessionId: null,
|
||||
sessionParams: null,
|
||||
sessionDisplayId: null,
|
||||
taskKey: null,
|
||||
},
|
||||
config: {
|
||||
command: "gemini",
|
||||
},
|
||||
context: {
|
||||
paperclipWorkspace: {
|
||||
cwd: workspaceDir,
|
||||
source: "project_primary",
|
||||
},
|
||||
},
|
||||
executionTransport: {
|
||||
remoteExecution: {
|
||||
host: "127.0.0.1",
|
||||
port: 2222,
|
||||
username: "fixture",
|
||||
remoteWorkspacePath: "/remote/workspace",
|
||||
remoteCwd: "/remote/workspace",
|
||||
privateKey: "PRIVATE KEY",
|
||||
knownHosts: "[127.0.0.1]:2222 ssh-ed25519 AAAA",
|
||||
strictHostKeyChecking: true,
|
||||
},
|
||||
},
|
||||
onLog: async () => {},
|
||||
})).rejects.toThrow("sync failed");
|
||||
|
||||
expect(prepareWorkspaceForSshExecution).toHaveBeenCalledTimes(1);
|
||||
expect(restoreWorkspaceFromSshExecution).toHaveBeenCalledTimes(1);
|
||||
expect(runChildProcess).not.toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
@@ -4,26 +4,42 @@ import os from "node:os";
|
||||
import path from "node:path";
|
||||
import { fileURLToPath } from "node:url";
|
||||
import type { AdapterExecutionContext, AdapterExecutionResult } from "@paperclipai/adapter-utils";
|
||||
import {
|
||||
adapterExecutionTargetIsRemote,
|
||||
adapterExecutionTargetPaperclipApiUrl,
|
||||
adapterExecutionTargetRemoteCwd,
|
||||
adapterExecutionTargetSessionIdentity,
|
||||
adapterExecutionTargetSessionMatches,
|
||||
adapterExecutionTargetUsesManagedHome,
|
||||
describeAdapterExecutionTarget,
|
||||
ensureAdapterExecutionTargetCommandResolvable,
|
||||
prepareAdapterExecutionTargetRuntime,
|
||||
readAdapterExecutionTarget,
|
||||
readAdapterExecutionTargetHomeDir,
|
||||
resolveAdapterExecutionTargetCommandForLogs,
|
||||
runAdapterExecutionTargetProcess,
|
||||
runAdapterExecutionTargetShellCommand,
|
||||
} from "@paperclipai/adapter-utils/execution-target";
|
||||
import {
|
||||
asBoolean,
|
||||
asNumber,
|
||||
asString,
|
||||
asStringArray,
|
||||
applyPaperclipWorkspaceEnv,
|
||||
buildPaperclipEnv,
|
||||
buildInvocationEnvForLogs,
|
||||
ensureAbsoluteDirectory,
|
||||
ensureCommandResolvable,
|
||||
ensurePaperclipSkillSymlink,
|
||||
joinPromptSections,
|
||||
ensurePathInEnv,
|
||||
readPaperclipRuntimeSkillEntries,
|
||||
resolveCommandForLogs,
|
||||
resolvePaperclipDesiredSkillNames,
|
||||
removeMaintainerOnlySkillSymlinks,
|
||||
parseObject,
|
||||
renderTemplate,
|
||||
renderPaperclipWakePrompt,
|
||||
stringifyPaperclipWakePayload,
|
||||
DEFAULT_PAPERCLIP_AGENT_PROMPT_TEMPLATE,
|
||||
runChildProcess,
|
||||
} from "@paperclipai/adapter-utils/server-utils";
|
||||
import { DEFAULT_GEMINI_LOCAL_MODEL } from "../index.js";
|
||||
@@ -135,12 +151,32 @@ async function ensureGeminiSkillsInjected(
|
||||
}
|
||||
}
|
||||
|
||||
async function buildGeminiSkillsDir(
|
||||
config: Record<string, unknown>,
|
||||
): Promise<string> {
|
||||
const tmp = await fs.mkdtemp(path.join(os.tmpdir(), "paperclip-gemini-skills-"));
|
||||
const target = path.join(tmp, "skills");
|
||||
await fs.mkdir(target, { recursive: true });
|
||||
const availableEntries = await readPaperclipRuntimeSkillEntries(config, __moduleDir);
|
||||
const desiredNames = new Set(resolvePaperclipDesiredSkillNames(config, availableEntries));
|
||||
for (const entry of availableEntries) {
|
||||
if (!desiredNames.has(entry.key)) continue;
|
||||
await fs.symlink(entry.source, path.join(target, entry.runtimeName));
|
||||
}
|
||||
return target;
|
||||
}
|
||||
|
||||
export async function execute(ctx: AdapterExecutionContext): Promise<AdapterExecutionResult> {
|
||||
const { runId, agent, runtime, config, context, onLog, onMeta, onSpawn, authToken } = ctx;
|
||||
const executionTarget = readAdapterExecutionTarget({
|
||||
executionTarget: ctx.executionTarget,
|
||||
legacyRemoteExecution: ctx.executionTransport?.remoteExecution,
|
||||
});
|
||||
const executionTargetIsRemote = adapterExecutionTargetIsRemote(executionTarget);
|
||||
|
||||
const promptTemplate = asString(
|
||||
config.promptTemplate,
|
||||
"You are agent {{agent.id}} ({{agent.name}}). Continue your Paperclip work.",
|
||||
DEFAULT_PAPERCLIP_AGENT_PROMPT_TEMPLATE,
|
||||
);
|
||||
const command = asString(config.command, "gemini");
|
||||
const model = asString(config.model, DEFAULT_GEMINI_LOCAL_MODEL).trim();
|
||||
@@ -165,7 +201,9 @@ export async function execute(ctx: AdapterExecutionContext): Promise<AdapterExec
|
||||
await ensureAbsoluteDirectory(cwd, { createIfMissing: true });
|
||||
const geminiSkillEntries = await readPaperclipRuntimeSkillEntries(config, __moduleDir);
|
||||
const desiredGeminiSkillNames = resolvePaperclipDesiredSkillNames(config, geminiSkillEntries);
|
||||
await ensureGeminiSkillsInjected(onLog, geminiSkillEntries, desiredGeminiSkillNames);
|
||||
if (!executionTargetIsRemote) {
|
||||
await ensureGeminiSkillsInjected(onLog, geminiSkillEntries, desiredGeminiSkillNames);
|
||||
}
|
||||
|
||||
const envConfig = parseObject(config.env);
|
||||
const hasExplicitApiKey =
|
||||
@@ -203,13 +241,17 @@ export async function execute(ctx: AdapterExecutionContext): Promise<AdapterExec
|
||||
if (approvalStatus) env.PAPERCLIP_APPROVAL_STATUS = approvalStatus;
|
||||
if (linkedIssueIds.length > 0) env.PAPERCLIP_LINKED_ISSUE_IDS = linkedIssueIds.join(",");
|
||||
if (wakePayloadJson) env.PAPERCLIP_WAKE_PAYLOAD_JSON = wakePayloadJson;
|
||||
if (effectiveWorkspaceCwd) env.PAPERCLIP_WORKSPACE_CWD = effectiveWorkspaceCwd;
|
||||
if (workspaceSource) env.PAPERCLIP_WORKSPACE_SOURCE = workspaceSource;
|
||||
if (workspaceId) env.PAPERCLIP_WORKSPACE_ID = workspaceId;
|
||||
if (workspaceRepoUrl) env.PAPERCLIP_WORKSPACE_REPO_URL = workspaceRepoUrl;
|
||||
if (workspaceRepoRef) env.PAPERCLIP_WORKSPACE_REPO_REF = workspaceRepoRef;
|
||||
if (agentHome) env.AGENT_HOME = agentHome;
|
||||
applyPaperclipWorkspaceEnv(env, {
|
||||
workspaceCwd: effectiveWorkspaceCwd,
|
||||
workspaceSource,
|
||||
workspaceId,
|
||||
workspaceRepoUrl,
|
||||
workspaceRepoRef,
|
||||
agentHome,
|
||||
});
|
||||
if (workspaceHints.length > 0) env.PAPERCLIP_WORKSPACES_JSON = JSON.stringify(workspaceHints);
|
||||
const targetPaperclipApiUrl = adapterExecutionTargetPaperclipApiUrl(executionTarget);
|
||||
if (targetPaperclipApiUrl) env.PAPERCLIP_API_URL = targetPaperclipApiUrl;
|
||||
|
||||
for (const [key, value] of Object.entries(envConfig)) {
|
||||
if (typeof value === "string") env[key] = value;
|
||||
@@ -224,8 +266,8 @@ export async function execute(ctx: AdapterExecutionContext): Promise<AdapterExec
|
||||
);
|
||||
const billingType = resolveGeminiBillingType(effectiveEnv);
|
||||
const runtimeEnv = ensurePathInEnv(effectiveEnv);
|
||||
await ensureCommandResolvable(command, cwd, runtimeEnv);
|
||||
const resolvedCommand = await resolveCommandForLogs(command, cwd, runtimeEnv);
|
||||
await ensureAdapterExecutionTargetCommandResolvable(command, executionTarget, cwd, runtimeEnv);
|
||||
const resolvedCommand = await resolveAdapterExecutionTargetCommandForLogs(command, executionTarget, cwd, runtimeEnv);
|
||||
const loggedEnv = buildInvocationEnvForLogs(env, {
|
||||
runtimeEnv,
|
||||
includeRuntimeKeys: ["HOME"],
|
||||
@@ -239,18 +281,78 @@ export async function execute(ctx: AdapterExecutionContext): Promise<AdapterExec
|
||||
if (fromExtraArgs.length > 0) return fromExtraArgs;
|
||||
return asStringArray(config.args);
|
||||
})();
|
||||
const effectiveExecutionCwd = adapterExecutionTargetRemoteCwd(executionTarget, cwd);
|
||||
let restoreRemoteWorkspace: (() => Promise<void>) | null = null;
|
||||
let remoteSkillsDir: string | null = null;
|
||||
let localSkillsDir: string | null = null;
|
||||
|
||||
if (executionTargetIsRemote) {
|
||||
try {
|
||||
localSkillsDir = await buildGeminiSkillsDir(config);
|
||||
await onLog(
|
||||
"stdout",
|
||||
`[paperclip] Syncing workspace and Gemini runtime assets to ${describeAdapterExecutionTarget(executionTarget)}.\n`,
|
||||
);
|
||||
const preparedExecutionTargetRuntime = await prepareAdapterExecutionTargetRuntime({
|
||||
target: executionTarget,
|
||||
adapterKey: "gemini",
|
||||
workspaceLocalDir: cwd,
|
||||
assets: [{
|
||||
key: "skills",
|
||||
localDir: localSkillsDir,
|
||||
followSymlinks: true,
|
||||
}],
|
||||
});
|
||||
restoreRemoteWorkspace = () => preparedExecutionTargetRuntime.restoreWorkspace();
|
||||
const managedHome = adapterExecutionTargetUsesManagedHome(executionTarget);
|
||||
if (managedHome && preparedExecutionTargetRuntime.runtimeRootDir) {
|
||||
env.HOME = preparedExecutionTargetRuntime.runtimeRootDir;
|
||||
}
|
||||
const remoteHomeDir = managedHome && preparedExecutionTargetRuntime.runtimeRootDir
|
||||
? preparedExecutionTargetRuntime.runtimeRootDir
|
||||
: await readAdapterExecutionTargetHomeDir(runId, executionTarget, {
|
||||
cwd,
|
||||
env,
|
||||
timeoutSec,
|
||||
graceSec,
|
||||
onLog,
|
||||
});
|
||||
if (remoteHomeDir && preparedExecutionTargetRuntime.assetDirs.skills) {
|
||||
remoteSkillsDir = path.posix.join(remoteHomeDir, ".gemini", "skills");
|
||||
await runAdapterExecutionTargetShellCommand(
|
||||
runId,
|
||||
executionTarget,
|
||||
`mkdir -p ${JSON.stringify(path.posix.dirname(remoteSkillsDir))} && rm -rf ${JSON.stringify(remoteSkillsDir)} && cp -a ${JSON.stringify(preparedExecutionTargetRuntime.assetDirs.skills)} ${JSON.stringify(remoteSkillsDir)}`,
|
||||
{ cwd, env, timeoutSec, graceSec, onLog },
|
||||
);
|
||||
}
|
||||
} catch (error) {
|
||||
await Promise.allSettled([
|
||||
restoreRemoteWorkspace?.(),
|
||||
localSkillsDir ? fs.rm(path.dirname(localSkillsDir), { recursive: true, force: true }).catch(() => undefined) : Promise.resolve(),
|
||||
]);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
const runtimeSessionParams = parseObject(runtime.sessionParams);
|
||||
const runtimeSessionId = asString(runtimeSessionParams.sessionId, runtime.sessionId ?? "");
|
||||
const runtimeSessionCwd = asString(runtimeSessionParams.cwd, "");
|
||||
const runtimeRemoteExecution = parseObject(runtimeSessionParams.remoteExecution);
|
||||
const canResumeSession =
|
||||
runtimeSessionId.length > 0 &&
|
||||
(runtimeSessionCwd.length === 0 || path.resolve(runtimeSessionCwd) === path.resolve(cwd));
|
||||
(runtimeSessionCwd.length === 0 || path.resolve(runtimeSessionCwd) === path.resolve(effectiveExecutionCwd)) &&
|
||||
adapterExecutionTargetSessionMatches(runtimeRemoteExecution, executionTarget);
|
||||
const sessionId = canResumeSession ? runtimeSessionId : null;
|
||||
if (runtimeSessionId && !canResumeSession) {
|
||||
if (executionTargetIsRemote && runtimeSessionId && !canResumeSession) {
|
||||
await onLog(
|
||||
"stdout",
|
||||
`[paperclip] Gemini session "${runtimeSessionId}" was saved for cwd "${runtimeSessionCwd}" and will not be resumed in "${cwd}".\n`,
|
||||
`[paperclip] Gemini session "${runtimeSessionId}" does not match the current remote execution identity and will not be resumed in "${effectiveExecutionCwd}". Starting a fresh remote session.\n`,
|
||||
);
|
||||
} else if (runtimeSessionId && !canResumeSession) {
|
||||
await onLog(
|
||||
"stdout",
|
||||
`[paperclip] Gemini session "${runtimeSessionId}" was saved for cwd "${runtimeSessionCwd}" and will not be resumed in "${effectiveExecutionCwd}".\n`,
|
||||
);
|
||||
}
|
||||
|
||||
@@ -349,7 +451,7 @@ export async function execute(ctx: AdapterExecutionContext): Promise<AdapterExec
|
||||
await onMeta({
|
||||
adapterType: "gemini_local",
|
||||
command: resolvedCommand,
|
||||
cwd,
|
||||
cwd: effectiveExecutionCwd,
|
||||
commandNotes,
|
||||
commandArgs: args.map((value, index) => (
|
||||
index === args.length - 1 ? `<prompt ${prompt.length} chars>` : value
|
||||
@@ -361,7 +463,7 @@ export async function execute(ctx: AdapterExecutionContext): Promise<AdapterExec
|
||||
});
|
||||
}
|
||||
|
||||
const proc = await runChildProcess(runId, command, args, {
|
||||
const proc = await runAdapterExecutionTargetProcess(runId, executionTarget, command, args, {
|
||||
cwd,
|
||||
env,
|
||||
timeoutSec,
|
||||
@@ -415,10 +517,15 @@ export async function execute(ctx: AdapterExecutionContext): Promise<AdapterExec
|
||||
const resolvedSessionParams = resolvedSessionId
|
||||
? ({
|
||||
sessionId: resolvedSessionId,
|
||||
cwd,
|
||||
cwd: effectiveExecutionCwd,
|
||||
...(workspaceId ? { workspaceId } : {}),
|
||||
...(workspaceRepoUrl ? { repoUrl: workspaceRepoUrl } : {}),
|
||||
...(workspaceRepoRef ? { repoRef: workspaceRepoRef } : {}),
|
||||
...(executionTargetIsRemote
|
||||
? {
|
||||
remoteExecution: adapterExecutionTargetSessionIdentity(executionTarget),
|
||||
}
|
||||
: {}),
|
||||
} as Record<string, unknown>)
|
||||
: null;
|
||||
const parsedError = typeof attempt.parsed.errorMessage === "string" ? attempt.parsed.errorMessage.trim() : "";
|
||||
@@ -457,20 +564,27 @@ export async function execute(ctx: AdapterExecutionContext): Promise<AdapterExec
|
||||
};
|
||||
};
|
||||
|
||||
const initial = await runAttempt(sessionId);
|
||||
if (
|
||||
sessionId &&
|
||||
!initial.proc.timedOut &&
|
||||
(initial.proc.exitCode ?? 0) !== 0 &&
|
||||
isGeminiUnknownSessionError(initial.proc.stdout, initial.proc.stderr)
|
||||
) {
|
||||
await onLog(
|
||||
"stdout",
|
||||
`[paperclip] Gemini resume session "${sessionId}" is unavailable; retrying with a fresh session.\n`,
|
||||
);
|
||||
const retry = await runAttempt(null);
|
||||
return toResult(retry, true, true);
|
||||
}
|
||||
try {
|
||||
const initial = await runAttempt(sessionId);
|
||||
if (
|
||||
sessionId &&
|
||||
!initial.proc.timedOut &&
|
||||
(initial.proc.exitCode ?? 0) !== 0 &&
|
||||
isGeminiUnknownSessionError(initial.proc.stdout, initial.proc.stderr)
|
||||
) {
|
||||
await onLog(
|
||||
"stdout",
|
||||
`[paperclip] Gemini resume session "${sessionId}" is unavailable; retrying with a fresh session.\n`,
|
||||
);
|
||||
const retry = await runAttempt(null);
|
||||
return toResult(retry, true, true);
|
||||
}
|
||||
|
||||
return toResult(initial);
|
||||
return toResult(initial);
|
||||
} finally {
|
||||
await Promise.all([
|
||||
restoreRemoteWorkspace?.(),
|
||||
localSkillsDir ? fs.rm(path.dirname(localSkillsDir), { recursive: true, force: true }).catch(() => undefined) : Promise.resolve(),
|
||||
]);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -420,7 +420,11 @@ function buildWakeText(
|
||||
" - POST /api/issues/{issueId}/checkout with {\"agentId\":\"$PAPERCLIP_AGENT_ID\",\"expectedStatuses\":[\"todo\",\"backlog\",\"blocked\",\"in_review\"]}",
|
||||
" - GET /api/issues/{issueId}",
|
||||
" - GET /api/issues/{issueId}/comments",
|
||||
" - Execute the issue instructions exactly.",
|
||||
" - Execute the issue instructions exactly. If the issue is actionable, take concrete action in this run; do not stop at a plan unless planning was requested.",
|
||||
" - Leave durable progress with a clear next action. Use child issues for long or parallel delegated work instead of polling agents, sessions, or processes.",
|
||||
" - Create child issues directly when you know what needs to be done; use POST /api/issues/{issueId}/interactions with kind suggest_tasks, ask_user_questions, or request_confirmation when the board/user must choose, answer, or confirm before you can continue.",
|
||||
" - For plan approval, update the plan document first, then create request_confirmation targeting the latest plan revision with idempotencyKey confirmation:{issueId}:plan:{revisionId}; wait for acceptance before creating implementation subtasks.",
|
||||
" - If blocked, PATCH /api/issues/{issueId} with {\"status\":\"blocked\",\"comment\":\"what is blocked, who owns the unblock, and the next action\"}.",
|
||||
" - If instructions require a comment, POST /api/issues/{issueId}/comments with {\"body\":\"...\"}.",
|
||||
" - PATCH /api/issues/{issueId} with {\"status\":\"done\",\"comment\":\"what changed and why\"}.",
|
||||
"4) If issueId does not exist:",
|
||||
|
||||
@@ -0,0 +1,225 @@
|
||||
import { mkdir, mkdtemp, rm } from "node:fs/promises";
|
||||
import os from "node:os";
|
||||
import path from "node:path";
|
||||
import { afterEach, describe, expect, it, vi } from "vitest";
|
||||
|
||||
const {
|
||||
runChildProcess,
|
||||
ensureCommandResolvable,
|
||||
resolveCommandForLogs,
|
||||
prepareWorkspaceForSshExecution,
|
||||
restoreWorkspaceFromSshExecution,
|
||||
runSshCommand,
|
||||
syncDirectoryToSsh,
|
||||
} = vi.hoisted(() => ({
|
||||
runChildProcess: vi.fn(async () => ({
|
||||
exitCode: 0,
|
||||
signal: null,
|
||||
timedOut: false,
|
||||
stdout: [
|
||||
JSON.stringify({ type: "step_start", sessionID: "session_123" }),
|
||||
JSON.stringify({ type: "text", sessionID: "session_123", part: { text: "hello" } }),
|
||||
JSON.stringify({
|
||||
type: "step_finish",
|
||||
sessionID: "session_123",
|
||||
part: { cost: 0.001, tokens: { input: 1, output: 1, reasoning: 0, cache: { read: 0, write: 0 } } },
|
||||
}),
|
||||
].join("\n"),
|
||||
stderr: "",
|
||||
pid: 123,
|
||||
startedAt: new Date().toISOString(),
|
||||
})),
|
||||
ensureCommandResolvable: vi.fn(async () => undefined),
|
||||
resolveCommandForLogs: vi.fn(async () => "ssh://fixture@127.0.0.1:2222/remote/workspace :: opencode"),
|
||||
prepareWorkspaceForSshExecution: vi.fn(async () => undefined),
|
||||
restoreWorkspaceFromSshExecution: vi.fn(async () => undefined),
|
||||
runSshCommand: vi.fn(async () => ({
|
||||
stdout: "/home/agent",
|
||||
stderr: "",
|
||||
exitCode: 0,
|
||||
})),
|
||||
syncDirectoryToSsh: vi.fn(async () => undefined),
|
||||
}));
|
||||
|
||||
vi.mock("@paperclipai/adapter-utils/server-utils", async () => {
|
||||
const actual = await vi.importActual<typeof import("@paperclipai/adapter-utils/server-utils")>(
|
||||
"@paperclipai/adapter-utils/server-utils",
|
||||
);
|
||||
return {
|
||||
...actual,
|
||||
ensureCommandResolvable,
|
||||
resolveCommandForLogs,
|
||||
runChildProcess,
|
||||
};
|
||||
});
|
||||
|
||||
vi.mock("@paperclipai/adapter-utils/ssh", async () => {
|
||||
const actual = await vi.importActual<typeof import("@paperclipai/adapter-utils/ssh")>(
|
||||
"@paperclipai/adapter-utils/ssh",
|
||||
);
|
||||
return {
|
||||
...actual,
|
||||
prepareWorkspaceForSshExecution,
|
||||
restoreWorkspaceFromSshExecution,
|
||||
runSshCommand,
|
||||
syncDirectoryToSsh,
|
||||
};
|
||||
});
|
||||
|
||||
import { execute } from "./execute.js";
|
||||
|
||||
describe("opencode remote execution", () => {
|
||||
const cleanupDirs: string[] = [];
|
||||
|
||||
afterEach(async () => {
|
||||
vi.clearAllMocks();
|
||||
while (cleanupDirs.length > 0) {
|
||||
const dir = cleanupDirs.pop();
|
||||
if (!dir) continue;
|
||||
await rm(dir, { recursive: true, force: true }).catch(() => undefined);
|
||||
}
|
||||
});
|
||||
|
||||
it("prepares the workspace, syncs OpenCode skills, and restores workspace changes for remote SSH execution", async () => {
|
||||
const rootDir = await mkdtemp(path.join(os.tmpdir(), "paperclip-opencode-remote-"));
|
||||
cleanupDirs.push(rootDir);
|
||||
const workspaceDir = path.join(rootDir, "workspace");
|
||||
await mkdir(workspaceDir, { recursive: true });
|
||||
|
||||
const result = await execute({
|
||||
runId: "run-1",
|
||||
agent: {
|
||||
id: "agent-1",
|
||||
companyId: "company-1",
|
||||
name: "OpenCode Builder",
|
||||
adapterType: "opencode_local",
|
||||
adapterConfig: {},
|
||||
},
|
||||
runtime: {
|
||||
sessionId: null,
|
||||
sessionParams: null,
|
||||
sessionDisplayId: null,
|
||||
taskKey: null,
|
||||
},
|
||||
config: {
|
||||
command: "opencode",
|
||||
model: "opencode/gpt-5-nano",
|
||||
},
|
||||
context: {
|
||||
paperclipWorkspace: {
|
||||
cwd: workspaceDir,
|
||||
source: "project_primary",
|
||||
},
|
||||
},
|
||||
executionTransport: {
|
||||
remoteExecution: {
|
||||
host: "127.0.0.1",
|
||||
port: 2222,
|
||||
username: "fixture",
|
||||
remoteWorkspacePath: "/remote/workspace",
|
||||
remoteCwd: "/remote/workspace",
|
||||
privateKey: "PRIVATE KEY",
|
||||
knownHosts: "[127.0.0.1]:2222 ssh-ed25519 AAAA",
|
||||
strictHostKeyChecking: true,
|
||||
paperclipApiUrl: "http://198.51.100.10:3102",
|
||||
},
|
||||
},
|
||||
onLog: async () => {},
|
||||
});
|
||||
|
||||
expect(result.sessionParams).toMatchObject({
|
||||
sessionId: "session_123",
|
||||
cwd: "/remote/workspace",
|
||||
remoteExecution: {
|
||||
transport: "ssh",
|
||||
host: "127.0.0.1",
|
||||
port: 2222,
|
||||
username: "fixture",
|
||||
remoteCwd: "/remote/workspace",
|
||||
paperclipApiUrl: "http://198.51.100.10:3102",
|
||||
},
|
||||
});
|
||||
expect(prepareWorkspaceForSshExecution).toHaveBeenCalledTimes(1);
|
||||
expect(syncDirectoryToSsh).toHaveBeenCalledTimes(2);
|
||||
expect(syncDirectoryToSsh).toHaveBeenCalledWith(expect.objectContaining({
|
||||
remoteDir: "/remote/workspace/.paperclip-runtime/opencode/xdgConfig",
|
||||
}));
|
||||
expect(syncDirectoryToSsh).toHaveBeenCalledWith(expect.objectContaining({
|
||||
remoteDir: "/remote/workspace/.paperclip-runtime/opencode/skills",
|
||||
followSymlinks: true,
|
||||
}));
|
||||
expect(runSshCommand).toHaveBeenCalledWith(
|
||||
expect.anything(),
|
||||
expect.stringContaining(".claude/skills"),
|
||||
expect.anything(),
|
||||
);
|
||||
const call = runChildProcess.mock.calls[0] as unknown as
|
||||
| [string, string, string[], { env: Record<string, string>; remoteExecution?: { remoteCwd: string } | null }]
|
||||
| undefined;
|
||||
expect(call?.[3].env.PAPERCLIP_API_URL).toBe("http://198.51.100.10:3102");
|
||||
expect(call?.[3].env.XDG_CONFIG_HOME).toBe("/remote/workspace/.paperclip-runtime/opencode/xdgConfig");
|
||||
expect(call?.[3].remoteExecution?.remoteCwd).toBe("/remote/workspace");
|
||||
expect(restoreWorkspaceFromSshExecution).toHaveBeenCalledTimes(1);
|
||||
});
|
||||
|
||||
it("resumes saved OpenCode sessions for remote SSH execution only when the identity matches", async () => {
|
||||
const rootDir = await mkdtemp(path.join(os.tmpdir(), "paperclip-opencode-remote-resume-"));
|
||||
cleanupDirs.push(rootDir);
|
||||
const workspaceDir = path.join(rootDir, "workspace");
|
||||
await mkdir(workspaceDir, { recursive: true });
|
||||
|
||||
await execute({
|
||||
runId: "run-ssh-resume",
|
||||
agent: {
|
||||
id: "agent-1",
|
||||
companyId: "company-1",
|
||||
name: "OpenCode Builder",
|
||||
adapterType: "opencode_local",
|
||||
adapterConfig: {},
|
||||
},
|
||||
runtime: {
|
||||
sessionId: "session-123",
|
||||
sessionParams: {
|
||||
sessionId: "session-123",
|
||||
cwd: "/remote/workspace",
|
||||
remoteExecution: {
|
||||
transport: "ssh",
|
||||
host: "127.0.0.1",
|
||||
port: 2222,
|
||||
username: "fixture",
|
||||
remoteCwd: "/remote/workspace",
|
||||
},
|
||||
},
|
||||
sessionDisplayId: "session-123",
|
||||
taskKey: null,
|
||||
},
|
||||
config: {
|
||||
command: "opencode",
|
||||
model: "opencode/gpt-5-nano",
|
||||
},
|
||||
context: {
|
||||
paperclipWorkspace: {
|
||||
cwd: workspaceDir,
|
||||
source: "project_primary",
|
||||
},
|
||||
},
|
||||
executionTransport: {
|
||||
remoteExecution: {
|
||||
host: "127.0.0.1",
|
||||
port: 2222,
|
||||
username: "fixture",
|
||||
remoteWorkspacePath: "/remote/workspace",
|
||||
remoteCwd: "/remote/workspace",
|
||||
privateKey: "PRIVATE KEY",
|
||||
knownHosts: "[127.0.0.1]:2222 ssh-ed25519 AAAA",
|
||||
strictHostKeyChecking: true,
|
||||
},
|
||||
},
|
||||
onLog: async () => {},
|
||||
});
|
||||
|
||||
const call = runChildProcess.mock.calls[0] as unknown as [string, string, string[]] | undefined;
|
||||
expect(call?.[2]).toContain("--session");
|
||||
expect(call?.[2]).toContain("session-123");
|
||||
});
|
||||
});
|
||||
@@ -3,22 +3,38 @@ import os from "node:os";
|
||||
import path from "node:path";
|
||||
import { fileURLToPath } from "node:url";
|
||||
import { inferOpenAiCompatibleBiller, type AdapterExecutionContext, type AdapterExecutionResult } from "@paperclipai/adapter-utils";
|
||||
import {
|
||||
adapterExecutionTargetIsRemote,
|
||||
adapterExecutionTargetPaperclipApiUrl,
|
||||
adapterExecutionTargetRemoteCwd,
|
||||
adapterExecutionTargetSessionIdentity,
|
||||
adapterExecutionTargetSessionMatches,
|
||||
adapterExecutionTargetUsesManagedHome,
|
||||
describeAdapterExecutionTarget,
|
||||
ensureAdapterExecutionTargetCommandResolvable,
|
||||
prepareAdapterExecutionTargetRuntime,
|
||||
readAdapterExecutionTarget,
|
||||
readAdapterExecutionTargetHomeDir,
|
||||
resolveAdapterExecutionTargetCommandForLogs,
|
||||
runAdapterExecutionTargetProcess,
|
||||
runAdapterExecutionTargetShellCommand,
|
||||
} from "@paperclipai/adapter-utils/execution-target";
|
||||
import {
|
||||
asString,
|
||||
asNumber,
|
||||
asStringArray,
|
||||
parseObject,
|
||||
applyPaperclipWorkspaceEnv,
|
||||
buildPaperclipEnv,
|
||||
joinPromptSections,
|
||||
buildInvocationEnvForLogs,
|
||||
ensureAbsoluteDirectory,
|
||||
ensureCommandResolvable,
|
||||
ensurePaperclipSkillSymlink,
|
||||
ensurePathInEnv,
|
||||
resolveCommandForLogs,
|
||||
renderTemplate,
|
||||
renderPaperclipWakePrompt,
|
||||
stringifyPaperclipWakePayload,
|
||||
DEFAULT_PAPERCLIP_AGENT_PROMPT_TEMPLATE,
|
||||
runChildProcess,
|
||||
readPaperclipRuntimeSkillEntries,
|
||||
resolvePaperclipDesiredSkillNames,
|
||||
@@ -92,12 +108,30 @@ async function ensureOpenCodeSkillsInjected(
|
||||
}
|
||||
}
|
||||
|
||||
async function buildOpenCodeSkillsDir(config: Record<string, unknown>): Promise<string> {
|
||||
const tmp = await fs.mkdtemp(path.join(os.tmpdir(), "paperclip-opencode-skills-"));
|
||||
const target = path.join(tmp, "skills");
|
||||
await fs.mkdir(target, { recursive: true });
|
||||
const availableEntries = await readPaperclipRuntimeSkillEntries(config, __moduleDir);
|
||||
const desiredNames = new Set(resolvePaperclipDesiredSkillNames(config, availableEntries));
|
||||
for (const entry of availableEntries) {
|
||||
if (!desiredNames.has(entry.key)) continue;
|
||||
await fs.symlink(entry.source, path.join(target, entry.runtimeName));
|
||||
}
|
||||
return target;
|
||||
}
|
||||
|
||||
export async function execute(ctx: AdapterExecutionContext): Promise<AdapterExecutionResult> {
|
||||
const { runId, agent, runtime, config, context, onLog, onMeta, onSpawn, authToken } = ctx;
|
||||
const executionTarget = readAdapterExecutionTarget({
|
||||
executionTarget: ctx.executionTarget,
|
||||
legacyRemoteExecution: ctx.executionTransport?.remoteExecution,
|
||||
});
|
||||
const executionTargetIsRemote = adapterExecutionTargetIsRemote(executionTarget);
|
||||
|
||||
const promptTemplate = asString(
|
||||
config.promptTemplate,
|
||||
"You are agent {{agent.id}} ({{agent.name}}). Continue your Paperclip work.",
|
||||
DEFAULT_PAPERCLIP_AGENT_PROMPT_TEMPLATE,
|
||||
);
|
||||
const command = asString(config.command, "opencode");
|
||||
const model = asString(config.model, "").trim();
|
||||
@@ -122,11 +156,13 @@ export async function execute(ctx: AdapterExecutionContext): Promise<AdapterExec
|
||||
await ensureAbsoluteDirectory(cwd, { createIfMissing: true });
|
||||
const openCodeSkillEntries = await readPaperclipRuntimeSkillEntries(config, __moduleDir);
|
||||
const desiredOpenCodeSkillNames = resolvePaperclipDesiredSkillNames(config, openCodeSkillEntries);
|
||||
await ensureOpenCodeSkillsInjected(
|
||||
onLog,
|
||||
openCodeSkillEntries,
|
||||
desiredOpenCodeSkillNames,
|
||||
);
|
||||
if (!executionTargetIsRemote) {
|
||||
await ensureOpenCodeSkillsInjected(
|
||||
onLog,
|
||||
openCodeSkillEntries,
|
||||
desiredOpenCodeSkillNames,
|
||||
);
|
||||
}
|
||||
|
||||
const envConfig = parseObject(config.env);
|
||||
const hasExplicitApiKey =
|
||||
@@ -164,13 +200,17 @@ export async function execute(ctx: AdapterExecutionContext): Promise<AdapterExec
|
||||
if (approvalStatus) env.PAPERCLIP_APPROVAL_STATUS = approvalStatus;
|
||||
if (linkedIssueIds.length > 0) env.PAPERCLIP_LINKED_ISSUE_IDS = linkedIssueIds.join(",");
|
||||
if (wakePayloadJson) env.PAPERCLIP_WAKE_PAYLOAD_JSON = wakePayloadJson;
|
||||
if (effectiveWorkspaceCwd) env.PAPERCLIP_WORKSPACE_CWD = effectiveWorkspaceCwd;
|
||||
if (workspaceSource) env.PAPERCLIP_WORKSPACE_SOURCE = workspaceSource;
|
||||
if (workspaceId) env.PAPERCLIP_WORKSPACE_ID = workspaceId;
|
||||
if (workspaceRepoUrl) env.PAPERCLIP_WORKSPACE_REPO_URL = workspaceRepoUrl;
|
||||
if (workspaceRepoRef) env.PAPERCLIP_WORKSPACE_REPO_REF = workspaceRepoRef;
|
||||
if (agentHome) env.AGENT_HOME = agentHome;
|
||||
applyPaperclipWorkspaceEnv(env, {
|
||||
workspaceCwd: effectiveWorkspaceCwd,
|
||||
workspaceSource,
|
||||
workspaceId,
|
||||
workspaceRepoUrl,
|
||||
workspaceRepoRef,
|
||||
agentHome,
|
||||
});
|
||||
if (workspaceHints.length > 0) env.PAPERCLIP_WORKSPACES_JSON = JSON.stringify(workspaceHints);
|
||||
const targetPaperclipApiUrl = adapterExecutionTargetPaperclipApiUrl(executionTarget);
|
||||
if (targetPaperclipApiUrl) env.PAPERCLIP_API_URL = targetPaperclipApiUrl;
|
||||
|
||||
for (const [key, value] of Object.entries(envConfig)) {
|
||||
if (typeof value === "string") env[key] = value;
|
||||
@@ -184,26 +224,30 @@ export async function execute(ctx: AdapterExecutionContext): Promise<AdapterExec
|
||||
env.PAPERCLIP_API_KEY = authToken;
|
||||
}
|
||||
const preparedRuntimeConfig = await prepareOpenCodeRuntimeConfig({ env, config });
|
||||
const localRuntimeConfigHome =
|
||||
preparedRuntimeConfig.notes.length > 0 ? preparedRuntimeConfig.env.XDG_CONFIG_HOME : "";
|
||||
try {
|
||||
const runtimeEnv = Object.fromEntries(
|
||||
Object.entries(ensurePathInEnv({ ...process.env, ...preparedRuntimeConfig.env })).filter(
|
||||
(entry): entry is [string, string] => typeof entry[1] === "string",
|
||||
),
|
||||
);
|
||||
await ensureCommandResolvable(command, cwd, runtimeEnv);
|
||||
const resolvedCommand = await resolveCommandForLogs(command, cwd, runtimeEnv);
|
||||
await ensureAdapterExecutionTargetCommandResolvable(command, executionTarget, cwd, runtimeEnv);
|
||||
const resolvedCommand = await resolveAdapterExecutionTargetCommandForLogs(command, executionTarget, cwd, runtimeEnv);
|
||||
const loggedEnv = buildInvocationEnvForLogs(preparedRuntimeConfig.env, {
|
||||
runtimeEnv,
|
||||
includeRuntimeKeys: ["HOME"],
|
||||
resolvedCommand,
|
||||
});
|
||||
|
||||
await ensureOpenCodeModelConfiguredAndAvailable({
|
||||
model,
|
||||
command,
|
||||
cwd,
|
||||
env: runtimeEnv,
|
||||
});
|
||||
if (!executionTargetIsRemote) {
|
||||
await ensureOpenCodeModelConfiguredAndAvailable({
|
||||
model,
|
||||
command,
|
||||
cwd,
|
||||
env: runtimeEnv,
|
||||
});
|
||||
}
|
||||
|
||||
const timeoutSec = asNumber(config.timeoutSec, 0);
|
||||
const graceSec = asNumber(config.graceSec, 20);
|
||||
@@ -212,18 +256,80 @@ export async function execute(ctx: AdapterExecutionContext): Promise<AdapterExec
|
||||
if (fromExtraArgs.length > 0) return fromExtraArgs;
|
||||
return asStringArray(config.args);
|
||||
})();
|
||||
const effectiveExecutionCwd = adapterExecutionTargetRemoteCwd(executionTarget, cwd);
|
||||
let restoreRemoteWorkspace: (() => Promise<void>) | null = null;
|
||||
let localSkillsDir: string | null = null;
|
||||
|
||||
if (executionTargetIsRemote) {
|
||||
localSkillsDir = await buildOpenCodeSkillsDir(config);
|
||||
await onLog(
|
||||
"stdout",
|
||||
`[paperclip] Syncing workspace and OpenCode runtime assets to ${describeAdapterExecutionTarget(executionTarget)}.\n`,
|
||||
);
|
||||
const preparedExecutionTargetRuntime = await prepareAdapterExecutionTargetRuntime({
|
||||
target: executionTarget,
|
||||
adapterKey: "opencode",
|
||||
workspaceLocalDir: cwd,
|
||||
assets: [
|
||||
{
|
||||
key: "skills",
|
||||
localDir: localSkillsDir,
|
||||
followSymlinks: true,
|
||||
},
|
||||
...(localRuntimeConfigHome
|
||||
? [{
|
||||
key: "xdgConfig",
|
||||
localDir: localRuntimeConfigHome,
|
||||
}]
|
||||
: []),
|
||||
],
|
||||
});
|
||||
restoreRemoteWorkspace = () => preparedExecutionTargetRuntime.restoreWorkspace();
|
||||
const managedHome = adapterExecutionTargetUsesManagedHome(executionTarget);
|
||||
if (managedHome && preparedExecutionTargetRuntime.runtimeRootDir) {
|
||||
preparedRuntimeConfig.env.HOME = preparedExecutionTargetRuntime.runtimeRootDir;
|
||||
}
|
||||
if (localRuntimeConfigHome && preparedExecutionTargetRuntime.assetDirs.xdgConfig) {
|
||||
preparedRuntimeConfig.env.XDG_CONFIG_HOME = preparedExecutionTargetRuntime.assetDirs.xdgConfig;
|
||||
}
|
||||
const remoteHomeDir = managedHome && preparedExecutionTargetRuntime.runtimeRootDir
|
||||
? preparedExecutionTargetRuntime.runtimeRootDir
|
||||
: await readAdapterExecutionTargetHomeDir(runId, executionTarget, {
|
||||
cwd,
|
||||
env: preparedRuntimeConfig.env,
|
||||
timeoutSec,
|
||||
graceSec,
|
||||
onLog,
|
||||
});
|
||||
if (remoteHomeDir && preparedExecutionTargetRuntime.assetDirs.skills) {
|
||||
const remoteSkillsDir = path.posix.join(remoteHomeDir, ".claude", "skills");
|
||||
await runAdapterExecutionTargetShellCommand(
|
||||
runId,
|
||||
executionTarget,
|
||||
`mkdir -p ${JSON.stringify(path.posix.dirname(remoteSkillsDir))} && rm -rf ${JSON.stringify(remoteSkillsDir)} && cp -a ${JSON.stringify(preparedExecutionTargetRuntime.assetDirs.skills)} ${JSON.stringify(remoteSkillsDir)}`,
|
||||
{ cwd, env: preparedRuntimeConfig.env, timeoutSec, graceSec, onLog },
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
const runtimeSessionParams = parseObject(runtime.sessionParams);
|
||||
const runtimeSessionId = asString(runtimeSessionParams.sessionId, runtime.sessionId ?? "");
|
||||
const runtimeSessionCwd = asString(runtimeSessionParams.cwd, "");
|
||||
const runtimeRemoteExecution = parseObject(runtimeSessionParams.remoteExecution);
|
||||
const canResumeSession =
|
||||
runtimeSessionId.length > 0 &&
|
||||
(runtimeSessionCwd.length === 0 || path.resolve(runtimeSessionCwd) === path.resolve(cwd));
|
||||
(runtimeSessionCwd.length === 0 || path.resolve(runtimeSessionCwd) === path.resolve(effectiveExecutionCwd)) &&
|
||||
adapterExecutionTargetSessionMatches(runtimeRemoteExecution, executionTarget);
|
||||
const sessionId = canResumeSession ? runtimeSessionId : null;
|
||||
if (runtimeSessionId && !canResumeSession) {
|
||||
if (executionTargetIsRemote && runtimeSessionId && !canResumeSession) {
|
||||
await onLog(
|
||||
"stdout",
|
||||
`[paperclip] OpenCode session "${runtimeSessionId}" was saved for cwd "${runtimeSessionCwd}" and will not be resumed in "${cwd}".\n`,
|
||||
`[paperclip] OpenCode session "${runtimeSessionId}" does not match the current remote execution identity and will not be resumed in "${effectiveExecutionCwd}". Starting a fresh remote session.\n`,
|
||||
);
|
||||
} else if (runtimeSessionId && !canResumeSession) {
|
||||
await onLog(
|
||||
"stdout",
|
||||
`[paperclip] OpenCode session "${runtimeSessionId}" was saved for cwd "${runtimeSessionCwd}" and will not be resumed in "${effectiveExecutionCwd}".\n`,
|
||||
);
|
||||
}
|
||||
const instructionsFilePath = asString(config.instructionsFilePath, "").trim();
|
||||
@@ -313,7 +419,7 @@ export async function execute(ctx: AdapterExecutionContext): Promise<AdapterExec
|
||||
await onMeta({
|
||||
adapterType: "opencode_local",
|
||||
command: resolvedCommand,
|
||||
cwd,
|
||||
cwd: effectiveExecutionCwd,
|
||||
commandNotes,
|
||||
commandArgs: [...args, `<stdin prompt ${prompt.length} chars>`],
|
||||
env: loggedEnv,
|
||||
@@ -323,9 +429,9 @@ export async function execute(ctx: AdapterExecutionContext): Promise<AdapterExec
|
||||
});
|
||||
}
|
||||
|
||||
const proc = await runChildProcess(runId, command, args, {
|
||||
const proc = await runAdapterExecutionTargetProcess(runId, executionTarget, command, args, {
|
||||
cwd,
|
||||
env: runtimeEnv,
|
||||
env: preparedRuntimeConfig.env,
|
||||
stdin: prompt,
|
||||
timeoutSec,
|
||||
graceSec,
|
||||
@@ -363,10 +469,15 @@ export async function execute(ctx: AdapterExecutionContext): Promise<AdapterExec
|
||||
const resolvedSessionParams = resolvedSessionId
|
||||
? ({
|
||||
sessionId: resolvedSessionId,
|
||||
cwd,
|
||||
cwd: effectiveExecutionCwd,
|
||||
...(workspaceId ? { workspaceId } : {}),
|
||||
...(workspaceRepoUrl ? { repoUrl: workspaceRepoUrl } : {}),
|
||||
...(workspaceRepoRef ? { repoRef: workspaceRepoRef } : {}),
|
||||
...(executionTargetIsRemote
|
||||
? {
|
||||
remoteExecution: adapterExecutionTargetSessionIdentity(executionTarget),
|
||||
}
|
||||
: {}),
|
||||
} as Record<string, unknown>)
|
||||
: null;
|
||||
|
||||
@@ -407,23 +518,30 @@ export async function execute(ctx: AdapterExecutionContext): Promise<AdapterExec
|
||||
};
|
||||
};
|
||||
|
||||
const initial = await runAttempt(sessionId);
|
||||
const initialFailed =
|
||||
!initial.proc.timedOut && ((initial.proc.exitCode ?? 0) !== 0 || Boolean(initial.parsed.errorMessage));
|
||||
if (
|
||||
sessionId &&
|
||||
initialFailed &&
|
||||
isOpenCodeUnknownSessionError(initial.proc.stdout, initial.rawStderr)
|
||||
) {
|
||||
await onLog(
|
||||
"stdout",
|
||||
`[paperclip] OpenCode session "${sessionId}" is unavailable; retrying with a fresh session.\n`,
|
||||
);
|
||||
const retry = await runAttempt(null);
|
||||
return toResult(retry, true);
|
||||
}
|
||||
try {
|
||||
const initial = await runAttempt(sessionId);
|
||||
const initialFailed =
|
||||
!initial.proc.timedOut && ((initial.proc.exitCode ?? 0) !== 0 || Boolean(initial.parsed.errorMessage));
|
||||
if (
|
||||
sessionId &&
|
||||
initialFailed &&
|
||||
isOpenCodeUnknownSessionError(initial.proc.stdout, initial.rawStderr)
|
||||
) {
|
||||
await onLog(
|
||||
"stdout",
|
||||
`[paperclip] OpenCode session "${sessionId}" is unavailable; retrying with a fresh session.\n`,
|
||||
);
|
||||
const retry = await runAttempt(null);
|
||||
return toResult(retry, true);
|
||||
}
|
||||
|
||||
return toResult(initial);
|
||||
return toResult(initial);
|
||||
} finally {
|
||||
await Promise.all([
|
||||
restoreRemoteWorkspace?.(),
|
||||
localSkillsDir ? fs.rm(path.dirname(localSkillsDir), { recursive: true, force: true }).catch(() => undefined) : Promise.resolve(),
|
||||
]);
|
||||
}
|
||||
} finally {
|
||||
await preparedRuntimeConfig.cleanup();
|
||||
}
|
||||
|
||||
@@ -40,6 +40,33 @@ describe("parseOpenCodeJsonl", () => {
|
||||
});
|
||||
expect(parsed.costUsd).toBeCloseTo(0.0025, 6);
|
||||
expect(parsed.errorMessage).toContain("model unavailable");
|
||||
expect(parsed.toolErrors).toEqual([]);
|
||||
});
|
||||
|
||||
it("keeps failed tool calls separate from fatal run errors", () => {
|
||||
const stdout = [
|
||||
JSON.stringify({
|
||||
type: "tool_use",
|
||||
sessionID: "session_123",
|
||||
part: {
|
||||
state: {
|
||||
status: "error",
|
||||
error: "File not found: e2b-adapter-result.txt",
|
||||
},
|
||||
},
|
||||
}),
|
||||
JSON.stringify({
|
||||
type: "text",
|
||||
sessionID: "session_123",
|
||||
part: { text: "Recovered and completed the task" },
|
||||
}),
|
||||
].join("\n");
|
||||
|
||||
const parsed = parseOpenCodeJsonl(stdout);
|
||||
expect(parsed.sessionId).toBe("session_123");
|
||||
expect(parsed.summary).toBe("Recovered and completed the task");
|
||||
expect(parsed.errorMessage).toBeNull();
|
||||
expect(parsed.toolErrors).toEqual(["File not found: e2b-adapter-result.txt"]);
|
||||
});
|
||||
|
||||
it("detects unknown session errors", () => {
|
||||
|
||||
@@ -23,6 +23,7 @@ export function parseOpenCodeJsonl(stdout: string) {
|
||||
let sessionId: string | null = null;
|
||||
const messages: string[] = [];
|
||||
const errors: string[] = [];
|
||||
const toolErrors: string[] = [];
|
||||
const usage = {
|
||||
inputTokens: 0,
|
||||
cachedInputTokens: 0,
|
||||
@@ -65,7 +66,7 @@ export function parseOpenCodeJsonl(stdout: string) {
|
||||
const state = parseObject(part.state);
|
||||
if (asString(state.status, "") === "error") {
|
||||
const text = asString(state.error, "").trim();
|
||||
if (text) errors.push(text);
|
||||
if (text) toolErrors.push(text);
|
||||
}
|
||||
continue;
|
||||
}
|
||||
@@ -83,6 +84,7 @@ export function parseOpenCodeJsonl(stdout: string) {
|
||||
usage,
|
||||
costUsd,
|
||||
errorMessage: errors.length > 0 ? errors.join("\n") : null,
|
||||
toolErrors,
|
||||
};
|
||||
}
|
||||
|
||||
|
||||
229
packages/adapters/pi-local/src/server/execute.remote.test.ts
Normal file
229
packages/adapters/pi-local/src/server/execute.remote.test.ts
Normal file
@@ -0,0 +1,229 @@
|
||||
import { mkdir, mkdtemp, rm } from "node:fs/promises";
|
||||
import os from "node:os";
|
||||
import path from "node:path";
|
||||
import { afterEach, describe, expect, it, vi } from "vitest";
|
||||
|
||||
const {
|
||||
runChildProcess,
|
||||
ensureCommandResolvable,
|
||||
resolveCommandForLogs,
|
||||
prepareWorkspaceForSshExecution,
|
||||
restoreWorkspaceFromSshExecution,
|
||||
runSshCommand,
|
||||
syncDirectoryToSsh,
|
||||
} = vi.hoisted(() => ({
|
||||
runChildProcess: vi.fn(async () => ({
|
||||
exitCode: 0,
|
||||
signal: null,
|
||||
timedOut: false,
|
||||
stdout: JSON.stringify({
|
||||
type: "turn_end",
|
||||
message: {
|
||||
role: "assistant",
|
||||
content: "done",
|
||||
usage: {
|
||||
input: 10,
|
||||
output: 20,
|
||||
cacheRead: 0,
|
||||
cost: { total: 0.01 },
|
||||
},
|
||||
},
|
||||
toolResults: [],
|
||||
}),
|
||||
stderr: "",
|
||||
pid: 123,
|
||||
startedAt: new Date().toISOString(),
|
||||
})),
|
||||
ensureCommandResolvable: vi.fn(async () => undefined),
|
||||
resolveCommandForLogs: vi.fn(async () => "ssh://fixture@127.0.0.1:2222/remote/workspace :: pi"),
|
||||
prepareWorkspaceForSshExecution: vi.fn(async () => undefined),
|
||||
restoreWorkspaceFromSshExecution: vi.fn(async () => undefined),
|
||||
runSshCommand: vi.fn(async () => ({
|
||||
stdout: "",
|
||||
stderr: "",
|
||||
exitCode: 0,
|
||||
})),
|
||||
syncDirectoryToSsh: vi.fn(async () => undefined),
|
||||
}));
|
||||
|
||||
vi.mock("@paperclipai/adapter-utils/server-utils", async () => {
|
||||
const actual = await vi.importActual<typeof import("@paperclipai/adapter-utils/server-utils")>(
|
||||
"@paperclipai/adapter-utils/server-utils",
|
||||
);
|
||||
return {
|
||||
...actual,
|
||||
ensureCommandResolvable,
|
||||
resolveCommandForLogs,
|
||||
runChildProcess,
|
||||
};
|
||||
});
|
||||
|
||||
vi.mock("@paperclipai/adapter-utils/ssh", async () => {
|
||||
const actual = await vi.importActual<typeof import("@paperclipai/adapter-utils/ssh")>(
|
||||
"@paperclipai/adapter-utils/ssh",
|
||||
);
|
||||
return {
|
||||
...actual,
|
||||
prepareWorkspaceForSshExecution,
|
||||
restoreWorkspaceFromSshExecution,
|
||||
runSshCommand,
|
||||
syncDirectoryToSsh,
|
||||
};
|
||||
});
|
||||
|
||||
import { execute } from "./execute.js";
|
||||
|
||||
describe("pi remote execution", () => {
|
||||
const cleanupDirs: string[] = [];
|
||||
|
||||
afterEach(async () => {
|
||||
vi.clearAllMocks();
|
||||
while (cleanupDirs.length > 0) {
|
||||
const dir = cleanupDirs.pop();
|
||||
if (!dir) continue;
|
||||
await rm(dir, { recursive: true, force: true }).catch(() => undefined);
|
||||
}
|
||||
});
|
||||
|
||||
it("prepares the workspace, syncs Pi skills, and restores workspace changes for remote SSH execution", async () => {
|
||||
const rootDir = await mkdtemp(path.join(os.tmpdir(), "paperclip-pi-remote-"));
|
||||
cleanupDirs.push(rootDir);
|
||||
const workspaceDir = path.join(rootDir, "workspace");
|
||||
await mkdir(workspaceDir, { recursive: true });
|
||||
|
||||
const result = await execute({
|
||||
runId: "run-1",
|
||||
agent: {
|
||||
id: "agent-1",
|
||||
companyId: "company-1",
|
||||
name: "Pi Builder",
|
||||
adapterType: "pi_local",
|
||||
adapterConfig: {},
|
||||
},
|
||||
runtime: {
|
||||
sessionId: null,
|
||||
sessionParams: null,
|
||||
sessionDisplayId: null,
|
||||
taskKey: null,
|
||||
},
|
||||
config: {
|
||||
command: "pi",
|
||||
model: "openai/gpt-5.4-mini",
|
||||
},
|
||||
context: {
|
||||
paperclipWorkspace: {
|
||||
cwd: workspaceDir,
|
||||
source: "project_primary",
|
||||
},
|
||||
},
|
||||
executionTransport: {
|
||||
remoteExecution: {
|
||||
host: "127.0.0.1",
|
||||
port: 2222,
|
||||
username: "fixture",
|
||||
remoteWorkspacePath: "/remote/workspace",
|
||||
remoteCwd: "/remote/workspace",
|
||||
privateKey: "PRIVATE KEY",
|
||||
knownHosts: "[127.0.0.1]:2222 ssh-ed25519 AAAA",
|
||||
strictHostKeyChecking: true,
|
||||
paperclipApiUrl: "http://198.51.100.10:3102",
|
||||
},
|
||||
},
|
||||
onLog: async () => {},
|
||||
});
|
||||
|
||||
expect(result.sessionParams).toMatchObject({
|
||||
cwd: "/remote/workspace",
|
||||
remoteExecution: {
|
||||
transport: "ssh",
|
||||
host: "127.0.0.1",
|
||||
port: 2222,
|
||||
username: "fixture",
|
||||
remoteCwd: "/remote/workspace",
|
||||
paperclipApiUrl: "http://198.51.100.10:3102",
|
||||
},
|
||||
});
|
||||
expect(String(result.sessionId)).toContain("/remote/workspace/.paperclip-runtime/pi/sessions/");
|
||||
expect(prepareWorkspaceForSshExecution).toHaveBeenCalledTimes(1);
|
||||
expect(syncDirectoryToSsh).toHaveBeenCalledTimes(1);
|
||||
expect(syncDirectoryToSsh).toHaveBeenCalledWith(expect.objectContaining({
|
||||
remoteDir: "/remote/workspace/.paperclip-runtime/pi/skills",
|
||||
followSymlinks: true,
|
||||
}));
|
||||
expect(runSshCommand).toHaveBeenCalledWith(
|
||||
expect.anything(),
|
||||
expect.stringContaining(".paperclip-runtime/pi/sessions"),
|
||||
expect.anything(),
|
||||
);
|
||||
const call = runChildProcess.mock.calls[0] as unknown as
|
||||
| [string, string, string[], { env: Record<string, string>; remoteExecution?: { remoteCwd: string } | null }]
|
||||
| undefined;
|
||||
expect(call?.[2]).toContain("--session");
|
||||
expect(call?.[2]).toContain("--skill");
|
||||
expect(call?.[2]).toContain("/remote/workspace/.paperclip-runtime/pi/skills");
|
||||
expect(call?.[3].env.PAPERCLIP_API_URL).toBe("http://198.51.100.10:3102");
|
||||
expect(call?.[3].remoteExecution?.remoteCwd).toBe("/remote/workspace");
|
||||
expect(restoreWorkspaceFromSshExecution).toHaveBeenCalledTimes(1);
|
||||
});
|
||||
|
||||
it("resumes saved Pi sessions for remote SSH execution only when the identity matches", async () => {
|
||||
const rootDir = await mkdtemp(path.join(os.tmpdir(), "paperclip-pi-remote-resume-"));
|
||||
cleanupDirs.push(rootDir);
|
||||
const workspaceDir = path.join(rootDir, "workspace");
|
||||
await mkdir(workspaceDir, { recursive: true });
|
||||
|
||||
await execute({
|
||||
runId: "run-ssh-resume",
|
||||
agent: {
|
||||
id: "agent-1",
|
||||
companyId: "company-1",
|
||||
name: "Pi Builder",
|
||||
adapterType: "pi_local",
|
||||
adapterConfig: {},
|
||||
},
|
||||
runtime: {
|
||||
sessionId: "/remote/workspace/.paperclip-runtime/pi/sessions/session-123.jsonl",
|
||||
sessionParams: {
|
||||
sessionId: "/remote/workspace/.paperclip-runtime/pi/sessions/session-123.jsonl",
|
||||
cwd: "/remote/workspace",
|
||||
remoteExecution: {
|
||||
transport: "ssh",
|
||||
host: "127.0.0.1",
|
||||
port: 2222,
|
||||
username: "fixture",
|
||||
remoteCwd: "/remote/workspace",
|
||||
},
|
||||
},
|
||||
sessionDisplayId: "session-123",
|
||||
taskKey: null,
|
||||
},
|
||||
config: {
|
||||
command: "pi",
|
||||
model: "openai/gpt-5.4-mini",
|
||||
},
|
||||
context: {
|
||||
paperclipWorkspace: {
|
||||
cwd: workspaceDir,
|
||||
source: "project_primary",
|
||||
},
|
||||
},
|
||||
executionTransport: {
|
||||
remoteExecution: {
|
||||
host: "127.0.0.1",
|
||||
port: 2222,
|
||||
username: "fixture",
|
||||
remoteWorkspacePath: "/remote/workspace",
|
||||
remoteCwd: "/remote/workspace",
|
||||
privateKey: "PRIVATE KEY",
|
||||
knownHosts: "[127.0.0.1]:2222 ssh-ed25519 AAAA",
|
||||
strictHostKeyChecking: true,
|
||||
},
|
||||
},
|
||||
onLog: async () => {},
|
||||
});
|
||||
|
||||
const call = runChildProcess.mock.calls[0] as unknown as [string, string, string[]] | undefined;
|
||||
expect(call?.[2]).toContain("--session");
|
||||
expect(call?.[2]).toContain("/remote/workspace/.paperclip-runtime/pi/sessions/session-123.jsonl");
|
||||
});
|
||||
});
|
||||
@@ -3,25 +3,40 @@ import os from "node:os";
|
||||
import path from "node:path";
|
||||
import { fileURLToPath } from "node:url";
|
||||
import { inferOpenAiCompatibleBiller, type AdapterExecutionContext, type AdapterExecutionResult } from "@paperclipai/adapter-utils";
|
||||
import {
|
||||
adapterExecutionTargetIsRemote,
|
||||
adapterExecutionTargetPaperclipApiUrl,
|
||||
adapterExecutionTargetRemoteCwd,
|
||||
adapterExecutionTargetSessionIdentity,
|
||||
adapterExecutionTargetSessionMatches,
|
||||
adapterExecutionTargetUsesManagedHome,
|
||||
describeAdapterExecutionTarget,
|
||||
ensureAdapterExecutionTargetCommandResolvable,
|
||||
ensureAdapterExecutionTargetFile,
|
||||
prepareAdapterExecutionTargetRuntime,
|
||||
readAdapterExecutionTarget,
|
||||
resolveAdapterExecutionTargetCommandForLogs,
|
||||
runAdapterExecutionTargetProcess,
|
||||
} from "@paperclipai/adapter-utils/execution-target";
|
||||
import {
|
||||
asString,
|
||||
asNumber,
|
||||
asStringArray,
|
||||
parseObject,
|
||||
applyPaperclipWorkspaceEnv,
|
||||
buildPaperclipEnv,
|
||||
joinPromptSections,
|
||||
buildInvocationEnvForLogs,
|
||||
ensureAbsoluteDirectory,
|
||||
ensureCommandResolvable,
|
||||
ensurePaperclipSkillSymlink,
|
||||
ensurePathInEnv,
|
||||
readPaperclipRuntimeSkillEntries,
|
||||
resolveCommandForLogs,
|
||||
resolvePaperclipDesiredSkillNames,
|
||||
removeMaintainerOnlySkillSymlinks,
|
||||
renderTemplate,
|
||||
renderPaperclipWakePrompt,
|
||||
stringifyPaperclipWakePayload,
|
||||
DEFAULT_PAPERCLIP_AGENT_PROMPT_TEMPLATE,
|
||||
runChildProcess,
|
||||
} from "@paperclipai/adapter-utils/server-utils";
|
||||
import { isPiUnknownSessionError, parsePiJsonl } from "./parse.js";
|
||||
@@ -94,6 +109,19 @@ async function ensurePiSkillsInjected(
|
||||
}
|
||||
}
|
||||
|
||||
async function buildPiSkillsDir(config: Record<string, unknown>): Promise<string> {
|
||||
const tmp = await fs.mkdtemp(path.join(os.tmpdir(), "paperclip-pi-skills-"));
|
||||
const target = path.join(tmp, "skills");
|
||||
await fs.mkdir(target, { recursive: true });
|
||||
const availableEntries = await readPaperclipRuntimeSkillEntries(config, __moduleDir);
|
||||
const desiredNames = new Set(resolvePaperclipDesiredSkillNames(config, availableEntries));
|
||||
for (const entry of availableEntries) {
|
||||
if (!desiredNames.has(entry.key)) continue;
|
||||
await fs.symlink(entry.source, path.join(target, entry.runtimeName));
|
||||
}
|
||||
return target;
|
||||
}
|
||||
|
||||
function resolvePiBiller(env: Record<string, string>, provider: string | null): string {
|
||||
return inferOpenAiCompatibleBiller(env, null) ?? provider ?? "unknown";
|
||||
}
|
||||
@@ -108,12 +136,22 @@ function buildSessionPath(agentId: string, timestamp: string): string {
|
||||
return path.join(PAPERCLIP_SESSIONS_DIR, `${safeTimestamp}-${agentId}.jsonl`);
|
||||
}
|
||||
|
||||
function buildRemoteSessionPath(runtimeRootDir: string, agentId: string, timestamp: string): string {
|
||||
const safeTimestamp = timestamp.replace(/[:.]/g, "-");
|
||||
return path.posix.join(runtimeRootDir, "sessions", `${safeTimestamp}-${agentId}.jsonl`);
|
||||
}
|
||||
|
||||
export async function execute(ctx: AdapterExecutionContext): Promise<AdapterExecutionResult> {
|
||||
const { runId, agent, runtime, config, context, onLog, onMeta, onSpawn, authToken } = ctx;
|
||||
const executionTarget = readAdapterExecutionTarget({
|
||||
executionTarget: ctx.executionTarget,
|
||||
legacyRemoteExecution: ctx.executionTransport?.remoteExecution,
|
||||
});
|
||||
const executionTargetIsRemote = adapterExecutionTargetIsRemote(executionTarget);
|
||||
|
||||
const promptTemplate = asString(
|
||||
config.promptTemplate,
|
||||
"You are agent {{agent.id}} ({{agent.name}}). Continue your Paperclip work.",
|
||||
DEFAULT_PAPERCLIP_AGENT_PROMPT_TEMPLATE,
|
||||
);
|
||||
const command = asString(config.command, "pi");
|
||||
const model = asString(config.model, "").trim();
|
||||
@@ -139,15 +177,18 @@ export async function execute(ctx: AdapterExecutionContext): Promise<AdapterExec
|
||||
const useConfiguredInsteadOfAgentHome = workspaceSource === "agent_home" && configuredCwd.length > 0;
|
||||
const effectiveWorkspaceCwd = useConfiguredInsteadOfAgentHome ? "" : workspaceCwd;
|
||||
const cwd = effectiveWorkspaceCwd || configuredCwd || process.cwd();
|
||||
const effectiveExecutionCwd = adapterExecutionTargetRemoteCwd(executionTarget, cwd);
|
||||
await ensureAbsoluteDirectory(cwd, { createIfMissing: true });
|
||||
|
||||
// Ensure sessions directory exists
|
||||
await ensureSessionsDir();
|
||||
|
||||
// Inject skills
|
||||
|
||||
if (!executionTargetIsRemote) {
|
||||
await ensureSessionsDir();
|
||||
}
|
||||
|
||||
const piSkillEntries = await readPaperclipRuntimeSkillEntries(config, __moduleDir);
|
||||
const desiredPiSkillNames = resolvePaperclipDesiredSkillNames(config, piSkillEntries);
|
||||
await ensurePiSkillsInjected(onLog, piSkillEntries, desiredPiSkillNames);
|
||||
if (!executionTargetIsRemote) {
|
||||
await ensurePiSkillsInjected(onLog, piSkillEntries, desiredPiSkillNames);
|
||||
}
|
||||
|
||||
// Build environment
|
||||
const envConfig = parseObject(config.env);
|
||||
@@ -155,7 +196,7 @@ export async function execute(ctx: AdapterExecutionContext): Promise<AdapterExec
|
||||
typeof envConfig.PAPERCLIP_API_KEY === "string" && envConfig.PAPERCLIP_API_KEY.trim().length > 0;
|
||||
const env: Record<string, string> = { ...buildPaperclipEnv(agent) };
|
||||
env.PAPERCLIP_RUN_ID = runId;
|
||||
|
||||
|
||||
const wakeTaskId =
|
||||
(typeof context.taskId === "string" && context.taskId.trim().length > 0 && context.taskId.trim()) ||
|
||||
(typeof context.issueId === "string" && context.issueId.trim().length > 0 && context.issueId.trim()) ||
|
||||
@@ -188,13 +229,17 @@ export async function execute(ctx: AdapterExecutionContext): Promise<AdapterExec
|
||||
if (approvalStatus) env.PAPERCLIP_APPROVAL_STATUS = approvalStatus;
|
||||
if (linkedIssueIds.length > 0) env.PAPERCLIP_LINKED_ISSUE_IDS = linkedIssueIds.join(",");
|
||||
if (wakePayloadJson) env.PAPERCLIP_WAKE_PAYLOAD_JSON = wakePayloadJson;
|
||||
if (workspaceCwd) env.PAPERCLIP_WORKSPACE_CWD = workspaceCwd;
|
||||
if (workspaceSource) env.PAPERCLIP_WORKSPACE_SOURCE = workspaceSource;
|
||||
if (workspaceId) env.PAPERCLIP_WORKSPACE_ID = workspaceId;
|
||||
if (workspaceRepoUrl) env.PAPERCLIP_WORKSPACE_REPO_URL = workspaceRepoUrl;
|
||||
if (workspaceRepoRef) env.PAPERCLIP_WORKSPACE_REPO_REF = workspaceRepoRef;
|
||||
if (agentHome) env.AGENT_HOME = agentHome;
|
||||
applyPaperclipWorkspaceEnv(env, {
|
||||
workspaceCwd,
|
||||
workspaceSource,
|
||||
workspaceId,
|
||||
workspaceRepoUrl,
|
||||
workspaceRepoRef,
|
||||
agentHome,
|
||||
});
|
||||
if (workspaceHints.length > 0) env.PAPERCLIP_WORKSPACES_JSON = JSON.stringify(workspaceHints);
|
||||
const targetPaperclipApiUrl = adapterExecutionTargetPaperclipApiUrl(executionTarget);
|
||||
if (targetPaperclipApiUrl) env.PAPERCLIP_API_URL = targetPaperclipApiUrl;
|
||||
|
||||
for (const [key, value] of Object.entries(envConfig)) {
|
||||
if (typeof value === "string") env[key] = value;
|
||||
@@ -202,27 +247,51 @@ export async function execute(ctx: AdapterExecutionContext): Promise<AdapterExec
|
||||
if (!hasExplicitApiKey && authToken) {
|
||||
env.PAPERCLIP_API_KEY = authToken;
|
||||
}
|
||||
|
||||
|
||||
// Prepend installed skill `bin/` dirs to PATH so an agent's bash tool can
|
||||
// invoke skill binaries (e.g. `paperclip-get-issue`) by name. Without this,
|
||||
// any pi_local agent whose AGENTS.md calls a skill command via bash hits
|
||||
// exit 127 "command not found". Only include skills that ensurePiSkillsInjected
|
||||
// actually linked — otherwise non-injected skills' binaries would be reachable
|
||||
// to the agent.
|
||||
const injectedSkillKeys = new Set(desiredPiSkillNames);
|
||||
const skillBinDirs = piSkillEntries
|
||||
.filter((entry) => injectedSkillKeys.has(entry.key) && entry.source.length > 0)
|
||||
.map((entry) => path.join(entry.source, "bin"));
|
||||
const mergedEnv = ensurePathInEnv({ ...process.env, ...env });
|
||||
const pathKey =
|
||||
typeof mergedEnv.Path === "string" && mergedEnv.Path.length > 0 && !mergedEnv.PATH
|
||||
? "Path"
|
||||
: "PATH";
|
||||
const basePath = mergedEnv[pathKey] ?? "";
|
||||
if (skillBinDirs.length > 0) {
|
||||
const existing = basePath.split(path.delimiter).filter(Boolean);
|
||||
const additions = skillBinDirs.filter((dir) => !existing.includes(dir));
|
||||
if (additions.length > 0) {
|
||||
mergedEnv[pathKey] = [...additions, basePath].filter(Boolean).join(path.delimiter);
|
||||
}
|
||||
}
|
||||
const runtimeEnv = Object.fromEntries(
|
||||
Object.entries(ensurePathInEnv({ ...process.env, ...env })).filter(
|
||||
Object.entries(mergedEnv).filter(
|
||||
(entry): entry is [string, string] => typeof entry[1] === "string",
|
||||
),
|
||||
);
|
||||
await ensureCommandResolvable(command, cwd, runtimeEnv);
|
||||
const resolvedCommand = await resolveCommandForLogs(command, cwd, runtimeEnv);
|
||||
await ensureAdapterExecutionTargetCommandResolvable(command, executionTarget, cwd, runtimeEnv);
|
||||
const resolvedCommand = await resolveAdapterExecutionTargetCommandForLogs(command, executionTarget, cwd, runtimeEnv);
|
||||
const loggedEnv = buildInvocationEnvForLogs(env, {
|
||||
runtimeEnv,
|
||||
includeRuntimeKeys: ["HOME"],
|
||||
resolvedCommand,
|
||||
});
|
||||
|
||||
// Validate model is available before execution
|
||||
await ensurePiModelConfiguredAndAvailable({
|
||||
model,
|
||||
command,
|
||||
cwd,
|
||||
env: runtimeEnv,
|
||||
});
|
||||
if (!executionTargetIsRemote) {
|
||||
await ensurePiModelConfiguredAndAvailable({
|
||||
model,
|
||||
command,
|
||||
cwd,
|
||||
env: runtimeEnv,
|
||||
});
|
||||
}
|
||||
|
||||
const timeoutSec = asNumber(config.timeoutSec, 0);
|
||||
const graceSec = asNumber(config.graceSec, 20);
|
||||
@@ -231,31 +300,84 @@ export async function execute(ctx: AdapterExecutionContext): Promise<AdapterExec
|
||||
if (fromExtraArgs.length > 0) return fromExtraArgs;
|
||||
return asStringArray(config.args);
|
||||
})();
|
||||
let restoreRemoteWorkspace: (() => Promise<void>) | null = null;
|
||||
let remoteRuntimeRootDir: string | null = null;
|
||||
let localSkillsDir: string | null = null;
|
||||
let remoteSkillsDir: string | null = null;
|
||||
|
||||
if (executionTargetIsRemote) {
|
||||
try {
|
||||
localSkillsDir = await buildPiSkillsDir(config);
|
||||
await onLog(
|
||||
"stdout",
|
||||
`[paperclip] Syncing workspace and Pi runtime assets to ${describeAdapterExecutionTarget(executionTarget)}.\n`,
|
||||
);
|
||||
const preparedRemoteRuntime = await prepareAdapterExecutionTargetRuntime({
|
||||
target: executionTarget,
|
||||
adapterKey: "pi",
|
||||
workspaceLocalDir: cwd,
|
||||
assets: [
|
||||
{
|
||||
key: "skills",
|
||||
localDir: localSkillsDir,
|
||||
followSymlinks: true,
|
||||
},
|
||||
],
|
||||
});
|
||||
restoreRemoteWorkspace = () => preparedRemoteRuntime.restoreWorkspace();
|
||||
if (adapterExecutionTargetUsesManagedHome(executionTarget) && preparedRemoteRuntime.runtimeRootDir) {
|
||||
env.HOME = preparedRemoteRuntime.runtimeRootDir;
|
||||
}
|
||||
remoteRuntimeRootDir = preparedRemoteRuntime.runtimeRootDir;
|
||||
remoteSkillsDir = preparedRemoteRuntime.assetDirs.skills ?? null;
|
||||
} catch (error) {
|
||||
await Promise.allSettled([
|
||||
restoreRemoteWorkspace?.(),
|
||||
localSkillsDir ? fs.rm(path.dirname(localSkillsDir), { recursive: true, force: true }).catch(() => undefined) : Promise.resolve(),
|
||||
]);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
// Handle session
|
||||
const runtimeSessionParams = parseObject(runtime.sessionParams);
|
||||
const runtimeSessionId = asString(runtimeSessionParams.sessionId, runtime.sessionId ?? "");
|
||||
const runtimeSessionCwd = asString(runtimeSessionParams.cwd, "");
|
||||
const runtimeRemoteExecution = parseObject(runtimeSessionParams.remoteExecution);
|
||||
const canResumeSession =
|
||||
runtimeSessionId.length > 0 &&
|
||||
(runtimeSessionCwd.length === 0 || path.resolve(runtimeSessionCwd) === path.resolve(cwd));
|
||||
const sessionPath = canResumeSession ? runtimeSessionId : buildSessionPath(agent.id, new Date().toISOString());
|
||||
|
||||
(runtimeSessionCwd.length === 0 || path.resolve(runtimeSessionCwd) === path.resolve(effectiveExecutionCwd)) &&
|
||||
adapterExecutionTargetSessionMatches(runtimeRemoteExecution, executionTarget);
|
||||
const sessionPath = canResumeSession
|
||||
? runtimeSessionId
|
||||
: executionTargetIsRemote && remoteRuntimeRootDir
|
||||
? buildRemoteSessionPath(remoteRuntimeRootDir, agent.id, new Date().toISOString())
|
||||
: buildSessionPath(agent.id, new Date().toISOString());
|
||||
|
||||
if (runtimeSessionId && !canResumeSession) {
|
||||
await onLog(
|
||||
"stdout",
|
||||
`[paperclip] Pi session "${runtimeSessionId}" was saved for cwd "${runtimeSessionCwd}" and will not be resumed in "${cwd}".\n`,
|
||||
executionTargetIsRemote
|
||||
? `[paperclip] Pi session "${runtimeSessionId}" does not match the current remote execution identity and will not be resumed in "${effectiveExecutionCwd}". Starting a fresh remote session.\n`
|
||||
: `[paperclip] Pi session "${runtimeSessionId}" was saved for cwd "${runtimeSessionCwd}" and will not be resumed in "${effectiveExecutionCwd}".\n`,
|
||||
);
|
||||
}
|
||||
|
||||
// Ensure session file exists (Pi requires this on first run)
|
||||
if (!canResumeSession) {
|
||||
try {
|
||||
await fs.writeFile(sessionPath, "", { flag: "wx" });
|
||||
} catch (err) {
|
||||
// File may already exist, that's ok
|
||||
if ((err as NodeJS.ErrnoException).code !== "EEXIST") {
|
||||
throw err;
|
||||
if (executionTargetIsRemote) {
|
||||
await ensureAdapterExecutionTargetFile(runId, executionTarget, sessionPath, {
|
||||
cwd,
|
||||
env,
|
||||
timeoutSec: 15,
|
||||
graceSec: 5,
|
||||
onLog,
|
||||
});
|
||||
} else {
|
||||
try {
|
||||
await fs.writeFile(sessionPath, "", { flag: "wx" });
|
||||
} catch (err) {
|
||||
if ((err as NodeJS.ErrnoException).code !== "EEXIST") {
|
||||
throw err;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -266,7 +388,7 @@ export async function execute(ctx: AdapterExecutionContext): Promise<AdapterExec
|
||||
? path.resolve(cwd, instructionsFilePath)
|
||||
: "";
|
||||
const instructionsFileDir = instructionsFilePath ? `${path.dirname(instructionsFilePath)}/` : "";
|
||||
|
||||
|
||||
let systemPromptExtension = "";
|
||||
let instructionsReadFailed = false;
|
||||
if (resolvedInstructionsFilePath) {
|
||||
@@ -276,7 +398,7 @@ export async function execute(ctx: AdapterExecutionContext): Promise<AdapterExec
|
||||
`${instructionsContents}\n\n` +
|
||||
`The above agent instructions were loaded from ${resolvedInstructionsFilePath}. ` +
|
||||
`Resolve any relative file references from ${instructionsFileDir}.\n\n` +
|
||||
`You are agent {{agent.id}} ({{agent.name}}). Continue your Paperclip work.`;
|
||||
DEFAULT_PAPERCLIP_AGENT_PROMPT_TEMPLATE;
|
||||
} catch (err) {
|
||||
instructionsReadFailed = true;
|
||||
const reason = err instanceof Error ? err.message : String(err);
|
||||
@@ -340,26 +462,24 @@ export async function execute(ctx: AdapterExecutionContext): Promise<AdapterExec
|
||||
|
||||
const buildArgs = (sessionFile: string): string[] => {
|
||||
const args: string[] = [];
|
||||
|
||||
|
||||
// Use JSON mode for structured output with print mode (non-interactive)
|
||||
args.push("--mode", "json");
|
||||
args.push("-p"); // Non-interactive mode: process prompt and exit
|
||||
|
||||
|
||||
// Use --append-system-prompt to extend Pi's default system prompt
|
||||
args.push("--append-system-prompt", renderedSystemPromptExtension);
|
||||
|
||||
|
||||
if (provider) args.push("--provider", provider);
|
||||
if (modelId) args.push("--model", modelId);
|
||||
if (thinking) args.push("--thinking", thinking);
|
||||
|
||||
args.push("--tools", "read,bash,edit,write,grep,find,ls");
|
||||
args.push("--session", sessionFile);
|
||||
|
||||
// Add Paperclip skills directory so Pi can load the paperclip skill
|
||||
args.push("--skill", PI_AGENT_SKILLS_DIR);
|
||||
args.push("--skill", remoteSkillsDir ?? PI_AGENT_SKILLS_DIR);
|
||||
|
||||
if (extraArgs.length > 0) args.push(...extraArgs);
|
||||
|
||||
|
||||
// Add the user prompt as the last argument
|
||||
args.push(userPrompt);
|
||||
|
||||
@@ -372,7 +492,7 @@ export async function execute(ctx: AdapterExecutionContext): Promise<AdapterExec
|
||||
await onMeta({
|
||||
adapterType: "pi_local",
|
||||
command: resolvedCommand,
|
||||
cwd,
|
||||
cwd: effectiveExecutionCwd,
|
||||
commandNotes,
|
||||
commandArgs: args,
|
||||
env: loggedEnv,
|
||||
@@ -390,13 +510,13 @@ export async function execute(ctx: AdapterExecutionContext): Promise<AdapterExec
|
||||
await onLog(stream, chunk);
|
||||
return;
|
||||
}
|
||||
|
||||
|
||||
// Buffer stdout and emit only complete lines
|
||||
stdoutBuffer += chunk;
|
||||
const lines = stdoutBuffer.split("\n");
|
||||
// Keep the last (potentially incomplete) line in the buffer
|
||||
stdoutBuffer = lines.pop() || "";
|
||||
|
||||
|
||||
// Emit complete lines
|
||||
for (const line of lines) {
|
||||
if (line) {
|
||||
@@ -405,20 +525,20 @@ export async function execute(ctx: AdapterExecutionContext): Promise<AdapterExec
|
||||
}
|
||||
};
|
||||
|
||||
const proc = await runChildProcess(runId, command, args, {
|
||||
const proc = await runAdapterExecutionTargetProcess(runId, executionTarget, command, args, {
|
||||
cwd,
|
||||
env: runtimeEnv,
|
||||
env: executionTargetIsRemote ? env : runtimeEnv,
|
||||
timeoutSec,
|
||||
graceSec,
|
||||
onSpawn,
|
||||
onLog: bufferedOnLog,
|
||||
});
|
||||
|
||||
|
||||
// Flush any remaining buffer content
|
||||
if (stdoutBuffer) {
|
||||
await onLog("stdout", stdoutBuffer);
|
||||
}
|
||||
|
||||
|
||||
return {
|
||||
proc,
|
||||
rawStderr: proc.stderr,
|
||||
@@ -446,7 +566,18 @@ export async function execute(ctx: AdapterExecutionContext): Promise<AdapterExec
|
||||
|
||||
const resolvedSessionId = clearSessionOnMissingSession ? null : sessionPath;
|
||||
const resolvedSessionParams = resolvedSessionId
|
||||
? { sessionId: resolvedSessionId, cwd }
|
||||
? {
|
||||
sessionId: resolvedSessionId,
|
||||
cwd: effectiveExecutionCwd,
|
||||
...(workspaceId ? { workspaceId } : {}),
|
||||
...(workspaceRepoUrl ? { repoUrl: workspaceRepoUrl } : {}),
|
||||
...(workspaceRepoRef ? { repoRef: workspaceRepoRef } : {}),
|
||||
...(executionTargetIsRemote
|
||||
? {
|
||||
remoteExecution: adapterExecutionTargetSessionIdentity(executionTarget),
|
||||
}
|
||||
: {}),
|
||||
}
|
||||
: null;
|
||||
|
||||
const stderrLine = firstNonEmptyLine(attempt.proc.stderr);
|
||||
@@ -482,30 +613,49 @@ export async function execute(ctx: AdapterExecutionContext): Promise<AdapterExec
|
||||
};
|
||||
};
|
||||
|
||||
const initial = await runAttempt(sessionPath);
|
||||
const initialFailed =
|
||||
!initial.proc.timedOut && ((initial.proc.exitCode ?? 0) !== 0 || initial.parsed.errors.length > 0);
|
||||
|
||||
if (
|
||||
canResumeSession &&
|
||||
initialFailed &&
|
||||
isPiUnknownSessionError(initial.proc.stdout, initial.rawStderr)
|
||||
) {
|
||||
await onLog(
|
||||
"stdout",
|
||||
`[paperclip] Pi session "${runtimeSessionId}" is unavailable; retrying with a fresh session.\n`,
|
||||
);
|
||||
const newSessionPath = buildSessionPath(agent.id, new Date().toISOString());
|
||||
try {
|
||||
await fs.writeFile(newSessionPath, "", { flag: "wx" });
|
||||
} catch (err) {
|
||||
if ((err as NodeJS.ErrnoException).code !== "EEXIST") {
|
||||
throw err;
|
||||
}
|
||||
}
|
||||
const retry = await runAttempt(newSessionPath);
|
||||
return toResult(retry, true);
|
||||
}
|
||||
try {
|
||||
const initial = await runAttempt(sessionPath);
|
||||
const initialFailed =
|
||||
!initial.proc.timedOut && ((initial.proc.exitCode ?? 0) !== 0 || initial.parsed.errors.length > 0);
|
||||
|
||||
return toResult(initial);
|
||||
if (
|
||||
canResumeSession &&
|
||||
initialFailed &&
|
||||
isPiUnknownSessionError(initial.proc.stdout, initial.rawStderr)
|
||||
) {
|
||||
await onLog(
|
||||
"stdout",
|
||||
`[paperclip] Pi session "${runtimeSessionId}" is unavailable; retrying with a fresh session.\n`,
|
||||
);
|
||||
const newSessionPath = executionTargetIsRemote && remoteRuntimeRootDir
|
||||
? buildRemoteSessionPath(remoteRuntimeRootDir, agent.id, new Date().toISOString())
|
||||
: buildSessionPath(agent.id, new Date().toISOString());
|
||||
if (executionTargetIsRemote) {
|
||||
await ensureAdapterExecutionTargetFile(runId, executionTarget, newSessionPath, {
|
||||
cwd,
|
||||
env,
|
||||
timeoutSec: 15,
|
||||
graceSec: 5,
|
||||
onLog,
|
||||
});
|
||||
} else {
|
||||
try {
|
||||
await fs.writeFile(newSessionPath, "", { flag: "wx" });
|
||||
} catch (err) {
|
||||
if ((err as NodeJS.ErrnoException).code !== "EEXIST") {
|
||||
throw err;
|
||||
}
|
||||
}
|
||||
}
|
||||
const retry = await runAttempt(newSessionPath);
|
||||
return toResult(retry, true);
|
||||
}
|
||||
|
||||
return toResult(initial);
|
||||
} finally {
|
||||
await Promise.all([
|
||||
restoreRemoteWorkspace?.(),
|
||||
localSkillsDir ? fs.rm(path.dirname(localSkillsDir), { recursive: true, force: true }).catch(() => undefined) : Promise.resolve(),
|
||||
]);
|
||||
}
|
||||
}
|
||||
|
||||
84
packages/db/scripts/create-auth-bootstrap-invite.ts
Normal file
84
packages/db/scripts/create-auth-bootstrap-invite.ts
Normal file
@@ -0,0 +1,84 @@
|
||||
import { createHash, randomBytes } from "node:crypto";
|
||||
import { readFileSync } from "node:fs";
|
||||
import path from "node:path";
|
||||
import { and, eq, gt, isNull } from "drizzle-orm";
|
||||
import { createDb } from "../src/client.js";
|
||||
import { invites } from "../src/schema/index.js";
|
||||
|
||||
function hashToken(token: string) {
|
||||
return createHash("sha256").update(token).digest("hex");
|
||||
}
|
||||
|
||||
function createInviteToken() {
|
||||
return `pcp_bootstrap_${randomBytes(24).toString("hex")}`;
|
||||
}
|
||||
|
||||
function readArg(flag: string) {
|
||||
const index = process.argv.indexOf(flag);
|
||||
if (index === -1) return null;
|
||||
return process.argv[index + 1] ?? null;
|
||||
}
|
||||
|
||||
async function main() {
|
||||
const configPath = readArg("--config");
|
||||
const baseUrl = readArg("--base-url");
|
||||
|
||||
if (!configPath || !baseUrl) {
|
||||
throw new Error("Usage: tsx create-auth-bootstrap-invite.ts --config <path> --base-url <url>");
|
||||
}
|
||||
|
||||
const config = JSON.parse(readFileSync(path.resolve(configPath), "utf8")) as {
|
||||
database?: {
|
||||
mode?: string;
|
||||
embeddedPostgresPort?: number;
|
||||
connectionString?: string;
|
||||
};
|
||||
};
|
||||
const dbUrl =
|
||||
config.database?.mode === "postgres"
|
||||
? config.database.connectionString
|
||||
: `postgres://paperclip:paperclip@127.0.0.1:${config.database?.embeddedPostgresPort ?? 54329}/paperclip`;
|
||||
if (!dbUrl) {
|
||||
throw new Error(`Could not resolve database connection from ${configPath}`);
|
||||
}
|
||||
|
||||
const db = createDb(dbUrl);
|
||||
const closableDb = db as typeof db & {
|
||||
$client?: {
|
||||
end?: (options?: { timeout?: number }) => Promise<void>;
|
||||
};
|
||||
};
|
||||
|
||||
try {
|
||||
const now = new Date();
|
||||
await db
|
||||
.update(invites)
|
||||
.set({ revokedAt: now, updatedAt: now })
|
||||
.where(
|
||||
and(
|
||||
eq(invites.inviteType, "bootstrap_ceo"),
|
||||
isNull(invites.revokedAt),
|
||||
isNull(invites.acceptedAt),
|
||||
gt(invites.expiresAt, now)
|
||||
)
|
||||
);
|
||||
|
||||
const token = createInviteToken();
|
||||
await db.insert(invites).values({
|
||||
inviteType: "bootstrap_ceo",
|
||||
tokenHash: hashToken(token),
|
||||
allowedJoinTypes: "human",
|
||||
expiresAt: new Date(Date.now() + 72 * 60 * 60 * 1000),
|
||||
invitedByUserId: "system",
|
||||
});
|
||||
|
||||
process.stdout.write(`${baseUrl.replace(/\/+$/, "")}/invite/${token}\n`);
|
||||
} finally {
|
||||
await closableDb.$client?.end?.({ timeout: 5 }).catch(() => undefined);
|
||||
}
|
||||
}
|
||||
|
||||
main().catch((error) => {
|
||||
process.stderr.write(`${error instanceof Error ? error.stack ?? error.message : String(error)}\n`);
|
||||
process.exit(1);
|
||||
});
|
||||
@@ -127,6 +127,7 @@ describeEmbeddedPostgres("runDatabaseBackup", () => {
|
||||
backupDir,
|
||||
retention: { dailyDays: 7, weeklyWeeks: 4, monthlyMonths: 1 },
|
||||
filenamePrefix: "paperclip-test",
|
||||
backupEngine: "javascript",
|
||||
});
|
||||
|
||||
expect(result.backupFile).toMatch(/paperclip-test-.*\.sql\.gz$/);
|
||||
@@ -148,14 +149,17 @@ describeEmbeddedPostgres("runDatabaseBackup", () => {
|
||||
title: string;
|
||||
payload: string;
|
||||
state: string;
|
||||
metadata: { index: number; even: boolean };
|
||||
metadata: { index: number; even: boolean } | string;
|
||||
}[]>(`
|
||||
SELECT "title", "payload", "state"::text AS "state", "metadata"
|
||||
FROM "public"."backup_test_records"
|
||||
WHERE "title" IN ('row-0', 'row-159')
|
||||
ORDER BY "title"
|
||||
`);
|
||||
expect(sampleRows).toEqual([
|
||||
expect(sampleRows.map((row) => ({
|
||||
...row,
|
||||
metadata: typeof row.metadata === "string" ? JSON.parse(row.metadata) : row.metadata,
|
||||
}))).toEqual([
|
||||
{
|
||||
title: "row-0",
|
||||
payload,
|
||||
|
||||
@@ -1,6 +1,8 @@
|
||||
import { createReadStream, createWriteStream, existsSync, mkdirSync, readdirSync, statSync, unlinkSync } from "node:fs";
|
||||
import { basename, resolve } from "node:path";
|
||||
import { createInterface } from "node:readline";
|
||||
import { spawn } from "node:child_process";
|
||||
import { open as openFile } from "node:fs/promises";
|
||||
import { pipeline } from "node:stream/promises";
|
||||
import { createGunzip, createGzip } from "node:zlib";
|
||||
import postgres from "postgres";
|
||||
@@ -20,6 +22,7 @@ export type RunDatabaseBackupOptions = {
|
||||
includeMigrationJournal?: boolean;
|
||||
excludeTables?: string[];
|
||||
nullifyColumns?: Record<string, string[]>;
|
||||
backupEngine?: "auto" | "pg_dump" | "javascript";
|
||||
};
|
||||
|
||||
export type RunDatabaseBackupResult = {
|
||||
@@ -61,6 +64,9 @@ type ExtensionDefinition = {
|
||||
const DRIZZLE_SCHEMA = "drizzle";
|
||||
const DRIZZLE_MIGRATIONS_TABLE = "__drizzle_migrations";
|
||||
const DEFAULT_BACKUP_WRITE_BUFFER_BYTES = 1024 * 1024;
|
||||
const BACKUP_DATA_CURSOR_ROWS = 100;
|
||||
const BACKUP_CLI_STDERR_BYTES = 64 * 1024;
|
||||
const BACKUP_BREAKPOINT_DETECT_BYTES = 64 * 1024;
|
||||
|
||||
const STATEMENT_BREAKPOINT = "-- paperclip statement breakpoint 69f6f3f1-42fd-46a6-bf17-d1d85f8f3900";
|
||||
|
||||
@@ -223,6 +229,134 @@ function tableKey(schemaName: string, tableName: string): string {
|
||||
return `${schemaName}.${tableName}`;
|
||||
}
|
||||
|
||||
function hasBackupTransforms(opts: RunDatabaseBackupOptions): boolean {
|
||||
return opts.includeMigrationJournal === true ||
|
||||
(opts.excludeTables?.length ?? 0) > 0 ||
|
||||
Object.keys(opts.nullifyColumns ?? {}).length > 0;
|
||||
}
|
||||
|
||||
function formatSqlValue(rawValue: unknown, columnName: string | undefined, nullifiedColumns: Set<string>): string {
|
||||
const val = columnName && nullifiedColumns.has(columnName) ? null : rawValue;
|
||||
if (val === null || val === undefined) return "NULL";
|
||||
if (typeof val === "boolean") return val ? "true" : "false";
|
||||
if (typeof val === "number") return String(val);
|
||||
if (val instanceof Date) return formatSqlLiteral(val.toISOString());
|
||||
if (typeof val === "object") return formatSqlLiteral(JSON.stringify(val));
|
||||
return formatSqlLiteral(String(val));
|
||||
}
|
||||
|
||||
function appendCapturedStderr(previous: string, chunk: Buffer | string): string {
|
||||
const next = previous + (Buffer.isBuffer(chunk) ? chunk.toString("utf8") : chunk);
|
||||
if (Buffer.byteLength(next, "utf8") <= BACKUP_CLI_STDERR_BYTES) return next;
|
||||
return Buffer.from(next, "utf8").subarray(-BACKUP_CLI_STDERR_BYTES).toString("utf8");
|
||||
}
|
||||
|
||||
async function waitForChildExit(child: ReturnType<typeof spawn>, label: string): Promise<void> {
|
||||
let stderr = "";
|
||||
child.stderr?.on("data", (chunk) => {
|
||||
stderr = appendCapturedStderr(stderr, chunk);
|
||||
});
|
||||
|
||||
const result = await new Promise<{ code: number | null; signal: NodeJS.Signals | null }>((resolve, reject) => {
|
||||
child.once("error", reject);
|
||||
child.once("exit", (code, signal) => resolve({ code, signal }));
|
||||
});
|
||||
|
||||
if (result.signal) {
|
||||
throw new Error(`${label} exited via ${result.signal}${stderr.trim() ? `: ${stderr.trim()}` : ""}`);
|
||||
}
|
||||
if (result.code !== 0) {
|
||||
throw new Error(`${label} failed with exit code ${result.code ?? "unknown"}${stderr.trim() ? `: ${stderr.trim()}` : ""}`);
|
||||
}
|
||||
}
|
||||
|
||||
async function runPgDumpBackup(opts: {
|
||||
connectionString: string;
|
||||
backupFile: string;
|
||||
connectTimeout: number;
|
||||
}): Promise<void> {
|
||||
const pgDumpBin = process.env.PAPERCLIP_PG_DUMP_PATH || "pg_dump";
|
||||
const child = spawn(
|
||||
pgDumpBin,
|
||||
[
|
||||
`--dbname=${opts.connectionString}`,
|
||||
"--format=plain",
|
||||
"--clean",
|
||||
"--if-exists",
|
||||
"--no-owner",
|
||||
"--no-privileges",
|
||||
"--schema=public",
|
||||
],
|
||||
{
|
||||
stdio: ["ignore", "pipe", "pipe"],
|
||||
env: {
|
||||
...process.env,
|
||||
PGCONNECT_TIMEOUT: String(opts.connectTimeout),
|
||||
},
|
||||
},
|
||||
);
|
||||
|
||||
if (!child.stdout) {
|
||||
throw new Error("pg_dump did not expose stdout");
|
||||
}
|
||||
|
||||
await Promise.all([
|
||||
pipeline(child.stdout, createGzip(), createWriteStream(opts.backupFile)),
|
||||
waitForChildExit(child, pgDumpBin),
|
||||
]);
|
||||
}
|
||||
|
||||
async function restoreWithPsql(opts: RunDatabaseRestoreOptions, connectTimeout: number): Promise<void> {
|
||||
const psqlBin = process.env.PAPERCLIP_PSQL_PATH || "psql";
|
||||
const child = spawn(
|
||||
psqlBin,
|
||||
[
|
||||
`--dbname=${opts.connectionString}`,
|
||||
"--set=ON_ERROR_STOP=1",
|
||||
"--quiet",
|
||||
"--no-psqlrc",
|
||||
],
|
||||
{
|
||||
stdio: ["pipe", "ignore", "pipe"],
|
||||
env: {
|
||||
...process.env,
|
||||
PGCONNECT_TIMEOUT: String(connectTimeout),
|
||||
},
|
||||
},
|
||||
);
|
||||
|
||||
if (!child.stdin) {
|
||||
throw new Error("psql did not expose stdin");
|
||||
}
|
||||
|
||||
const input = opts.backupFile.endsWith(".gz")
|
||||
? createReadStream(opts.backupFile).pipe(createGunzip())
|
||||
: createReadStream(opts.backupFile);
|
||||
|
||||
await Promise.all([
|
||||
pipeline(input, child.stdin),
|
||||
waitForChildExit(child, psqlBin),
|
||||
]);
|
||||
}
|
||||
|
||||
async function hasStatementBreakpoints(backupFile: string): Promise<boolean> {
|
||||
const raw = createReadStream(backupFile);
|
||||
const stream = backupFile.endsWith(".gz") ? raw.pipe(createGunzip()) : raw;
|
||||
let text = "";
|
||||
|
||||
try {
|
||||
for await (const chunk of stream) {
|
||||
text += Buffer.isBuffer(chunk) ? chunk.toString("utf8") : String(chunk);
|
||||
if (text.includes(STATEMENT_BREAKPOINT)) return true;
|
||||
if (Buffer.byteLength(text, "utf8") >= BACKUP_BREAKPOINT_DETECT_BYTES) return false;
|
||||
}
|
||||
return text.includes(STATEMENT_BREAKPOINT);
|
||||
} finally {
|
||||
stream.destroy();
|
||||
raw.destroy();
|
||||
}
|
||||
}
|
||||
|
||||
async function* readRestoreStatements(backupFile: string): AsyncGenerator<string> {
|
||||
const raw = createReadStream(backupFile);
|
||||
const stream = backupFile.endsWith(".gz") ? raw.pipe(createGunzip()) : raw;
|
||||
@@ -263,41 +397,21 @@ async function* readRestoreStatements(backupFile: string): AsyncGenerator<string
|
||||
}
|
||||
|
||||
export function createBufferedTextFileWriter(filePath: string, maxBufferedBytes = DEFAULT_BACKUP_WRITE_BUFFER_BYTES) {
|
||||
const stream = createWriteStream(filePath, { encoding: "utf8" });
|
||||
const filePromise = openFile(filePath, "w");
|
||||
const flushThreshold = Math.max(1, Math.trunc(maxBufferedBytes));
|
||||
let bufferedLines: string[] = [];
|
||||
let bufferedBytes = 0;
|
||||
let firstChunk = true;
|
||||
let closed = false;
|
||||
let streamError: Error | null = null;
|
||||
let pendingWrite = Promise.resolve();
|
||||
|
||||
stream.on("error", (error) => {
|
||||
streamError = error;
|
||||
});
|
||||
|
||||
const writeChunk = async (chunk: string): Promise<void> => {
|
||||
if (streamError) throw streamError;
|
||||
const canContinue = stream.write(chunk);
|
||||
if (!canContinue) {
|
||||
await new Promise<void>((resolve, reject) => {
|
||||
const handleDrain = () => {
|
||||
cleanup();
|
||||
resolve();
|
||||
};
|
||||
const handleError = (error: Error) => {
|
||||
cleanup();
|
||||
reject(error);
|
||||
};
|
||||
const cleanup = () => {
|
||||
stream.off("drain", handleDrain);
|
||||
stream.off("error", handleError);
|
||||
};
|
||||
stream.once("drain", handleDrain);
|
||||
stream.once("error", handleError);
|
||||
});
|
||||
const writeChunk = async (chunk: string | Buffer): Promise<void> => {
|
||||
const file = await filePromise;
|
||||
if (typeof chunk === "string") {
|
||||
await file.write(chunk, null, "utf8");
|
||||
} else {
|
||||
await file.write(chunk);
|
||||
}
|
||||
if (streamError) throw streamError;
|
||||
};
|
||||
|
||||
const flushBufferedLines = () => {
|
||||
@@ -316,37 +430,43 @@ export function createBufferedTextFileWriter(filePath: string, maxBufferedBytes
|
||||
if (closed) {
|
||||
throw new Error(`Cannot write to closed backup file: ${filePath}`);
|
||||
}
|
||||
if (streamError) throw streamError;
|
||||
bufferedLines.push(line);
|
||||
bufferedBytes += Buffer.byteLength(line, "utf8") + 1;
|
||||
if (bufferedBytes >= flushThreshold) {
|
||||
flushBufferedLines();
|
||||
}
|
||||
},
|
||||
async drain() {
|
||||
if (closed) {
|
||||
throw new Error(`Cannot drain closed backup file: ${filePath}`);
|
||||
}
|
||||
flushBufferedLines();
|
||||
await pendingWrite;
|
||||
},
|
||||
async writeRaw(chunk: string | Buffer) {
|
||||
if (closed) {
|
||||
throw new Error(`Cannot write to closed backup file: ${filePath}`);
|
||||
}
|
||||
flushBufferedLines();
|
||||
firstChunk = false;
|
||||
pendingWrite = pendingWrite.then(() => writeChunk(chunk));
|
||||
await pendingWrite;
|
||||
},
|
||||
async close() {
|
||||
if (closed) return;
|
||||
closed = true;
|
||||
flushBufferedLines();
|
||||
await pendingWrite;
|
||||
await new Promise<void>((resolve, reject) => {
|
||||
if (streamError) {
|
||||
reject(streamError);
|
||||
return;
|
||||
}
|
||||
stream.end((error?: Error | null) => {
|
||||
if (error) reject(error);
|
||||
else resolve();
|
||||
});
|
||||
});
|
||||
if (streamError) throw streamError;
|
||||
const file = await filePromise;
|
||||
await file.close();
|
||||
},
|
||||
async abort() {
|
||||
if (closed) return;
|
||||
closed = true;
|
||||
bufferedLines = [];
|
||||
bufferedBytes = 0;
|
||||
stream.destroy();
|
||||
await pendingWrite.catch(() => {});
|
||||
await filePromise.then((file) => file.close()).catch(() => {});
|
||||
if (existsSync(filePath)) {
|
||||
try {
|
||||
unlinkSync(filePath);
|
||||
@@ -362,16 +482,53 @@ export async function runDatabaseBackup(opts: RunDatabaseBackupOptions): Promise
|
||||
const filenamePrefix = opts.filenamePrefix ?? "paperclip";
|
||||
const retention = opts.retention;
|
||||
const connectTimeout = Math.max(1, Math.trunc(opts.connectTimeoutSeconds ?? 5));
|
||||
const backupEngine = opts.backupEngine ?? "auto";
|
||||
const canUsePgDump = !hasBackupTransforms(opts);
|
||||
const includeMigrationJournal = opts.includeMigrationJournal === true;
|
||||
const excludedTableNames = normalizeTableNameSet(opts.excludeTables);
|
||||
const nullifiedColumnsByTable = normalizeNullifyColumnMap(opts.nullifyColumns);
|
||||
const sql = postgres(opts.connectionString, { max: 1, connect_timeout: connectTimeout });
|
||||
let sql = postgres(opts.connectionString, { max: 1, connect_timeout: connectTimeout });
|
||||
let sqlClosed = false;
|
||||
const closeSql = async () => {
|
||||
if (sqlClosed) return;
|
||||
sqlClosed = true;
|
||||
await sql.end();
|
||||
};
|
||||
mkdirSync(opts.backupDir, { recursive: true });
|
||||
const sqlFile = resolve(opts.backupDir, `${filenamePrefix}-${timestamp()}.sql`);
|
||||
const backupFile = `${sqlFile}.gz`;
|
||||
const writer = createBufferedTextFileWriter(sqlFile);
|
||||
|
||||
try {
|
||||
if (backupEngine === "pg_dump" || (backupEngine === "auto" && canUsePgDump)) {
|
||||
await sql`SELECT 1`;
|
||||
try {
|
||||
await closeSql();
|
||||
await runPgDumpBackup({
|
||||
connectionString: opts.connectionString,
|
||||
backupFile,
|
||||
connectTimeout,
|
||||
});
|
||||
await writer.abort();
|
||||
const sizeBytes = statSync(backupFile).size;
|
||||
const prunedCount = pruneOldBackups(opts.backupDir, retention, filenamePrefix);
|
||||
return {
|
||||
backupFile,
|
||||
sizeBytes,
|
||||
prunedCount,
|
||||
};
|
||||
} catch (error) {
|
||||
if (existsSync(backupFile)) {
|
||||
try { unlinkSync(backupFile); } catch { /* ignore */ }
|
||||
}
|
||||
if (backupEngine === "pg_dump") {
|
||||
throw error;
|
||||
}
|
||||
sql = postgres(opts.connectionString, { max: 1, connect_timeout: connectTimeout });
|
||||
sqlClosed = false;
|
||||
}
|
||||
}
|
||||
|
||||
await sql`SELECT 1`;
|
||||
|
||||
const emit = (line: string) => writer.emit(line);
|
||||
@@ -703,20 +860,39 @@ export async function runDatabaseBackup(opts: RunDatabaseBackupOptions): Promise
|
||||
|
||||
emit(`-- Data for: ${schema_name}.${tablename} (${count[0]!.n} rows)`);
|
||||
|
||||
const rows = await sql.unsafe(`SELECT * FROM ${qualifiedTableName}`).values();
|
||||
const nullifiedColumns = nullifiedColumnsByTable.get(tablename) ?? new Set<string>();
|
||||
for (const row of rows) {
|
||||
const values = row.map((rawValue: unknown, index) => {
|
||||
const columnName = cols[index]?.column_name;
|
||||
const val = columnName && nullifiedColumns.has(columnName) ? null : rawValue;
|
||||
if (val === null || val === undefined) return "NULL";
|
||||
if (typeof val === "boolean") return val ? "true" : "false";
|
||||
if (typeof val === "number") return String(val);
|
||||
if (val instanceof Date) return formatSqlLiteral(val.toISOString());
|
||||
if (typeof val === "object") return formatSqlLiteral(JSON.stringify(val));
|
||||
return formatSqlLiteral(String(val));
|
||||
});
|
||||
emitStatement(`INSERT INTO ${qualifiedTableName} (${colNames}) VALUES (${values.join(", ")});`);
|
||||
if (backupEngine !== "javascript" && nullifiedColumns.size === 0) {
|
||||
emit(`COPY ${qualifiedTableName} (${colNames}) FROM stdin;`);
|
||||
await writer.writeRaw("\n");
|
||||
const copySql = postgres(opts.connectionString, { max: 1, connect_timeout: connectTimeout });
|
||||
try {
|
||||
const copyStream = await copySql
|
||||
.unsafe(`COPY ${qualifiedTableName} (${colNames}) TO STDOUT`)
|
||||
.readable();
|
||||
for await (const chunk of copyStream) {
|
||||
await writer.writeRaw(Buffer.isBuffer(chunk) ? chunk : Buffer.from(String(chunk)));
|
||||
}
|
||||
} finally {
|
||||
await copySql.end();
|
||||
}
|
||||
await writer.writeRaw("\\.\n");
|
||||
emitStatementBoundary();
|
||||
emit("");
|
||||
continue;
|
||||
}
|
||||
|
||||
const rowCursor = sql
|
||||
.unsafe(`SELECT * FROM ${qualifiedTableName}`)
|
||||
.values()
|
||||
.cursor(BACKUP_DATA_CURSOR_ROWS) as AsyncIterable<unknown[][]>;
|
||||
for await (const rows of rowCursor) {
|
||||
for (const row of rows) {
|
||||
const values = row.map((rawValue, index) =>
|
||||
formatSqlValue(rawValue, cols[index]?.column_name, nullifiedColumns),
|
||||
);
|
||||
emitStatement(`INSERT INTO ${qualifiedTableName} (${colNames}) VALUES (${values.join(", ")});`);
|
||||
}
|
||||
await writer.drain();
|
||||
}
|
||||
emit("");
|
||||
}
|
||||
@@ -768,12 +944,23 @@ export async function runDatabaseBackup(opts: RunDatabaseBackupOptions): Promise
|
||||
}
|
||||
throw error;
|
||||
} finally {
|
||||
await sql.end();
|
||||
await closeSql();
|
||||
}
|
||||
}
|
||||
|
||||
export async function runDatabaseRestore(opts: RunDatabaseRestoreOptions): Promise<void> {
|
||||
const connectTimeout = Math.max(1, Math.trunc(opts.connectTimeoutSeconds ?? 5));
|
||||
try {
|
||||
await restoreWithPsql(opts, connectTimeout);
|
||||
return;
|
||||
} catch (error) {
|
||||
if (!(await hasStatementBreakpoints(opts.backupFile))) {
|
||||
throw new Error(
|
||||
`Failed to restore ${basename(opts.backupFile)} with psql: ${sanitizeRestoreErrorMessage(error)}`,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
const sql = postgres(opts.connectionString, { max: 1, connect_timeout: connectTimeout });
|
||||
|
||||
try {
|
||||
|
||||
@@ -467,4 +467,78 @@ describeEmbeddedPostgres("applyPendingMigrations", () => {
|
||||
},
|
||||
20_000,
|
||||
);
|
||||
|
||||
it(
|
||||
"replays migration 0059 safely when plugin_database_namespaces already exists",
|
||||
async () => {
|
||||
const connectionString = await createTempDatabase();
|
||||
|
||||
await applyPendingMigrations(connectionString);
|
||||
|
||||
const sql = postgres(connectionString, { max: 1, onnotice: () => {} });
|
||||
try {
|
||||
const pluginNamespacesHash = await migrationHash(
|
||||
"0059_plugin_database_namespaces.sql",
|
||||
);
|
||||
|
||||
await sql.unsafe(
|
||||
`DELETE FROM "drizzle"."__drizzle_migrations" WHERE hash = '${pluginNamespacesHash}'`,
|
||||
);
|
||||
|
||||
const tables = await sql.unsafe<{ table_name: string }[]>(
|
||||
`
|
||||
SELECT table_name
|
||||
FROM information_schema.tables
|
||||
WHERE table_schema = 'public'
|
||||
AND table_name IN ('plugin_database_namespaces', 'plugin_migrations')
|
||||
ORDER BY table_name
|
||||
`,
|
||||
);
|
||||
expect(tables.map((row) => row.table_name)).toEqual([
|
||||
"plugin_database_namespaces",
|
||||
"plugin_migrations",
|
||||
]);
|
||||
} finally {
|
||||
await sql.end();
|
||||
}
|
||||
|
||||
const pendingState = await inspectMigrations(connectionString);
|
||||
expect(pendingState).toMatchObject({
|
||||
status: "needsMigrations",
|
||||
pendingMigrations: ["0059_plugin_database_namespaces.sql"],
|
||||
reason: "pending-migrations",
|
||||
});
|
||||
|
||||
await applyPendingMigrations(connectionString);
|
||||
|
||||
const finalState = await inspectMigrations(connectionString);
|
||||
expect(finalState.status).toBe("upToDate");
|
||||
|
||||
const verifySql = postgres(connectionString, { max: 1, onnotice: () => {} });
|
||||
try {
|
||||
const indexes = await verifySql.unsafe<{ indexname: string }[]>(
|
||||
`
|
||||
SELECT indexname
|
||||
FROM pg_indexes
|
||||
WHERE schemaname = 'public'
|
||||
AND tablename IN ('plugin_database_namespaces', 'plugin_migrations')
|
||||
ORDER BY indexname
|
||||
`,
|
||||
);
|
||||
expect(indexes.map((row) => row.indexname)).toEqual(
|
||||
expect.arrayContaining([
|
||||
"plugin_database_namespaces_namespace_idx",
|
||||
"plugin_database_namespaces_plugin_idx",
|
||||
"plugin_database_namespaces_status_idx",
|
||||
"plugin_migrations_plugin_idx",
|
||||
"plugin_migrations_plugin_key_idx",
|
||||
"plugin_migrations_status_idx",
|
||||
]),
|
||||
);
|
||||
} finally {
|
||||
await verifySql.end();
|
||||
}
|
||||
},
|
||||
20_000,
|
||||
);
|
||||
});
|
||||
|
||||
@@ -31,4 +31,5 @@ export {
|
||||
formatEmbeddedPostgresError,
|
||||
} from "./embedded-postgres-error.js";
|
||||
export { issueRelations } from "./schema/issue_relations.js";
|
||||
export { issueReferenceMentions } from "./schema/issue_reference_mentions.js";
|
||||
export * from "./schema/index.js";
|
||||
|
||||
57
packages/db/src/migrations/0057_tidy_join_requests.sql
Normal file
57
packages/db/src/migrations/0057_tidy_join_requests.sql
Normal file
@@ -0,0 +1,57 @@
|
||||
WITH ranked_user_requests AS (
|
||||
SELECT
|
||||
id,
|
||||
row_number() OVER (
|
||||
PARTITION BY company_id, requesting_user_id
|
||||
ORDER BY created_at ASC, id ASC
|
||||
) AS rank
|
||||
FROM join_requests
|
||||
WHERE request_type = 'human'
|
||||
AND status = 'pending_approval'
|
||||
AND requesting_user_id IS NOT NULL
|
||||
)
|
||||
UPDATE join_requests
|
||||
SET
|
||||
status = 'rejected',
|
||||
rejected_at = COALESCE(rejected_at, now()),
|
||||
updated_at = now()
|
||||
WHERE id IN (
|
||||
SELECT id
|
||||
FROM ranked_user_requests
|
||||
WHERE rank > 1
|
||||
);
|
||||
--> statement-breakpoint
|
||||
WITH ranked_email_requests AS (
|
||||
SELECT
|
||||
id,
|
||||
row_number() OVER (
|
||||
PARTITION BY company_id, lower(request_email_snapshot)
|
||||
ORDER BY created_at ASC, id ASC
|
||||
) AS rank
|
||||
FROM join_requests
|
||||
WHERE request_type = 'human'
|
||||
AND status = 'pending_approval'
|
||||
AND request_email_snapshot IS NOT NULL
|
||||
)
|
||||
UPDATE join_requests
|
||||
SET
|
||||
status = 'rejected',
|
||||
rejected_at = COALESCE(rejected_at, now()),
|
||||
updated_at = now()
|
||||
WHERE id IN (
|
||||
SELECT id
|
||||
FROM ranked_email_requests
|
||||
WHERE rank > 1
|
||||
);
|
||||
--> statement-breakpoint
|
||||
CREATE UNIQUE INDEX IF NOT EXISTS "join_requests_pending_human_user_uq"
|
||||
ON "join_requests" USING btree ("company_id", "requesting_user_id")
|
||||
WHERE "request_type" = 'human'
|
||||
AND "status" = 'pending_approval'
|
||||
AND "requesting_user_id" IS NOT NULL;
|
||||
--> statement-breakpoint
|
||||
CREATE UNIQUE INDEX IF NOT EXISTS "join_requests_pending_human_email_uq"
|
||||
ON "join_requests" USING btree ("company_id", lower("request_email_snapshot"))
|
||||
WHERE "request_type" = 'human'
|
||||
AND "status" = 'pending_approval'
|
||||
AND "request_email_snapshot" IS NOT NULL;
|
||||
6
packages/db/src/migrations/0058_wealthy_starbolt.sql
Normal file
6
packages/db/src/migrations/0058_wealthy_starbolt.sql
Normal file
@@ -0,0 +1,6 @@
|
||||
ALTER TABLE "heartbeat_runs" ADD COLUMN IF NOT EXISTS "liveness_state" text;--> statement-breakpoint
|
||||
ALTER TABLE "heartbeat_runs" ADD COLUMN IF NOT EXISTS "liveness_reason" text;--> statement-breakpoint
|
||||
ALTER TABLE "heartbeat_runs" ADD COLUMN IF NOT EXISTS "continuation_attempt" integer DEFAULT 0 NOT NULL;--> statement-breakpoint
|
||||
ALTER TABLE "heartbeat_runs" ADD COLUMN IF NOT EXISTS "last_useful_action_at" timestamp with time zone;--> statement-breakpoint
|
||||
ALTER TABLE "heartbeat_runs" ADD COLUMN IF NOT EXISTS "next_action" text;--> statement-breakpoint
|
||||
CREATE INDEX IF NOT EXISTS "heartbeat_runs_company_liveness_idx" ON "heartbeat_runs" USING btree ("company_id","liveness_state","created_at");
|
||||
@@ -0,0 +1,41 @@
|
||||
CREATE TABLE IF NOT EXISTS "plugin_database_namespaces" (
|
||||
"id" uuid PRIMARY KEY DEFAULT gen_random_uuid() NOT NULL,
|
||||
"plugin_id" uuid NOT NULL,
|
||||
"plugin_key" text NOT NULL,
|
||||
"namespace_name" text NOT NULL,
|
||||
"namespace_mode" text DEFAULT 'schema' NOT NULL,
|
||||
"status" text DEFAULT 'active' NOT NULL,
|
||||
"created_at" timestamp with time zone DEFAULT now() NOT NULL,
|
||||
"updated_at" timestamp with time zone DEFAULT now() NOT NULL
|
||||
);
|
||||
--> statement-breakpoint
|
||||
CREATE TABLE IF NOT EXISTS "plugin_migrations" (
|
||||
"id" uuid PRIMARY KEY DEFAULT gen_random_uuid() NOT NULL,
|
||||
"plugin_id" uuid NOT NULL,
|
||||
"plugin_key" text NOT NULL,
|
||||
"namespace_name" text NOT NULL,
|
||||
"migration_key" text NOT NULL,
|
||||
"checksum" text NOT NULL,
|
||||
"plugin_version" text NOT NULL,
|
||||
"status" text NOT NULL,
|
||||
"started_at" timestamp with time zone DEFAULT now() NOT NULL,
|
||||
"applied_at" timestamp with time zone,
|
||||
"error_message" text
|
||||
);
|
||||
--> statement-breakpoint
|
||||
DO $$ BEGIN
|
||||
IF NOT EXISTS (SELECT 1 FROM pg_constraint WHERE conname = 'plugin_database_namespaces_plugin_id_plugins_id_fk') THEN
|
||||
ALTER TABLE "plugin_database_namespaces" ADD CONSTRAINT "plugin_database_namespaces_plugin_id_plugins_id_fk" FOREIGN KEY ("plugin_id") REFERENCES "public"."plugins"("id") ON DELETE cascade ON UPDATE no action;
|
||||
END IF;
|
||||
END $$;--> statement-breakpoint
|
||||
DO $$ BEGIN
|
||||
IF NOT EXISTS (SELECT 1 FROM pg_constraint WHERE conname = 'plugin_migrations_plugin_id_plugins_id_fk') THEN
|
||||
ALTER TABLE "plugin_migrations" ADD CONSTRAINT "plugin_migrations_plugin_id_plugins_id_fk" FOREIGN KEY ("plugin_id") REFERENCES "public"."plugins"("id") ON DELETE cascade ON UPDATE no action;
|
||||
END IF;
|
||||
END $$;--> statement-breakpoint
|
||||
CREATE UNIQUE INDEX IF NOT EXISTS "plugin_database_namespaces_plugin_idx" ON "plugin_database_namespaces" USING btree ("plugin_id");--> statement-breakpoint
|
||||
CREATE UNIQUE INDEX IF NOT EXISTS "plugin_database_namespaces_namespace_idx" ON "plugin_database_namespaces" USING btree ("namespace_name");--> statement-breakpoint
|
||||
CREATE INDEX IF NOT EXISTS "plugin_database_namespaces_status_idx" ON "plugin_database_namespaces" USING btree ("status");--> statement-breakpoint
|
||||
CREATE UNIQUE INDEX IF NOT EXISTS "plugin_migrations_plugin_key_idx" ON "plugin_migrations" USING btree ("plugin_id","migration_key");--> statement-breakpoint
|
||||
CREATE INDEX IF NOT EXISTS "plugin_migrations_plugin_idx" ON "plugin_migrations" USING btree ("plugin_id");--> statement-breakpoint
|
||||
CREATE INDEX IF NOT EXISTS "plugin_migrations_status_idx" ON "plugin_migrations" USING btree ("status");
|
||||
50
packages/db/src/migrations/0060_orange_annihilus.sql
Normal file
50
packages/db/src/migrations/0060_orange_annihilus.sql
Normal file
@@ -0,0 +1,50 @@
|
||||
CREATE TABLE IF NOT EXISTS "issue_reference_mentions" (
|
||||
"id" uuid PRIMARY KEY DEFAULT gen_random_uuid() NOT NULL,
|
||||
"company_id" uuid NOT NULL,
|
||||
"source_issue_id" uuid NOT NULL,
|
||||
"target_issue_id" uuid NOT NULL,
|
||||
"source_kind" text NOT NULL,
|
||||
"source_record_id" uuid,
|
||||
"document_key" text,
|
||||
"matched_text" text,
|
||||
"created_at" timestamp with time zone DEFAULT now() NOT NULL,
|
||||
"updated_at" timestamp with time zone DEFAULT now() NOT NULL
|
||||
);
|
||||
--> statement-breakpoint
|
||||
DO $$ BEGIN
|
||||
IF NOT EXISTS (SELECT 1 FROM pg_constraint WHERE conname = 'issue_reference_mentions_company_id_companies_id_fk') THEN
|
||||
ALTER TABLE "issue_reference_mentions" ADD CONSTRAINT "issue_reference_mentions_company_id_companies_id_fk" FOREIGN KEY ("company_id") REFERENCES "public"."companies"("id") ON DELETE no action ON UPDATE no action;
|
||||
END IF;
|
||||
END $$;
|
||||
--> statement-breakpoint
|
||||
DO $$ BEGIN
|
||||
IF NOT EXISTS (SELECT 1 FROM pg_constraint WHERE conname = 'issue_reference_mentions_source_issue_id_issues_id_fk') THEN
|
||||
ALTER TABLE "issue_reference_mentions" ADD CONSTRAINT "issue_reference_mentions_source_issue_id_issues_id_fk" FOREIGN KEY ("source_issue_id") REFERENCES "public"."issues"("id") ON DELETE cascade ON UPDATE no action;
|
||||
END IF;
|
||||
END $$;
|
||||
--> statement-breakpoint
|
||||
DO $$ BEGIN
|
||||
IF NOT EXISTS (SELECT 1 FROM pg_constraint WHERE conname = 'issue_reference_mentions_target_issue_id_issues_id_fk') THEN
|
||||
ALTER TABLE "issue_reference_mentions" ADD CONSTRAINT "issue_reference_mentions_target_issue_id_issues_id_fk" FOREIGN KEY ("target_issue_id") REFERENCES "public"."issues"("id") ON DELETE cascade ON UPDATE no action;
|
||||
END IF;
|
||||
END $$;--> statement-breakpoint
|
||||
CREATE INDEX IF NOT EXISTS "issue_reference_mentions_company_source_issue_idx" ON "issue_reference_mentions" USING btree ("company_id","source_issue_id");--> statement-breakpoint
|
||||
CREATE INDEX IF NOT EXISTS "issue_reference_mentions_company_target_issue_idx" ON "issue_reference_mentions" USING btree ("company_id","target_issue_id");--> statement-breakpoint
|
||||
CREATE INDEX IF NOT EXISTS "issue_reference_mentions_company_issue_pair_idx" ON "issue_reference_mentions" USING btree ("company_id","source_issue_id","target_issue_id");--> statement-breakpoint
|
||||
DELETE FROM "issue_reference_mentions"
|
||||
WHERE "id" IN (
|
||||
SELECT "id"
|
||||
FROM (
|
||||
SELECT
|
||||
"id",
|
||||
row_number() OVER (
|
||||
PARTITION BY "company_id", "source_issue_id", "target_issue_id", "source_kind", "source_record_id"
|
||||
ORDER BY "created_at", "id"
|
||||
) AS "row_number"
|
||||
FROM "issue_reference_mentions"
|
||||
) AS "duplicates"
|
||||
WHERE "duplicates"."row_number" > 1
|
||||
);--> statement-breakpoint
|
||||
DROP INDEX IF EXISTS "issue_reference_mentions_company_source_mention_uq";--> statement-breakpoint
|
||||
CREATE UNIQUE INDEX IF NOT EXISTS "issue_reference_mentions_company_source_mention_record_uq" ON "issue_reference_mentions" USING btree ("company_id","source_issue_id","target_issue_id","source_kind","source_record_id") WHERE "source_record_id" IS NOT NULL;--> statement-breakpoint
|
||||
CREATE UNIQUE INDEX IF NOT EXISTS "issue_reference_mentions_company_source_mention_null_record_uq" ON "issue_reference_mentions" USING btree ("company_id","source_issue_id","target_issue_id","source_kind") WHERE "source_record_id" IS NULL;
|
||||
3
packages/db/src/migrations/0061_lively_thor_girl.sql
Normal file
3
packages/db/src/migrations/0061_lively_thor_girl.sql
Normal file
@@ -0,0 +1,3 @@
|
||||
ALTER TABLE "heartbeat_runs" ADD COLUMN IF NOT EXISTS "scheduled_retry_at" timestamp with time zone;--> statement-breakpoint
|
||||
ALTER TABLE "heartbeat_runs" ADD COLUMN IF NOT EXISTS "scheduled_retry_attempt" integer DEFAULT 0 NOT NULL;--> statement-breakpoint
|
||||
ALTER TABLE "heartbeat_runs" ADD COLUMN IF NOT EXISTS "scheduled_retry_reason" text;
|
||||
@@ -0,0 +1,9 @@
|
||||
ALTER TABLE "routine_runs" ADD COLUMN IF NOT EXISTS "dispatch_fingerprint" text;--> statement-breakpoint
|
||||
ALTER TABLE "issues" ADD COLUMN IF NOT EXISTS "origin_fingerprint" text DEFAULT 'default' NOT NULL;--> statement-breakpoint
|
||||
DROP INDEX IF EXISTS "issues_open_routine_execution_uq";--> statement-breakpoint
|
||||
CREATE UNIQUE INDEX IF NOT EXISTS "issues_open_routine_execution_uq" ON "issues" USING btree ("company_id","origin_kind","origin_id","origin_fingerprint") WHERE "issues"."origin_kind" = 'routine_execution'
|
||||
and "issues"."origin_id" is not null
|
||||
and "issues"."hidden_at" is null
|
||||
and "issues"."execution_run_id" is not null
|
||||
and "issues"."status" in ('backlog', 'todo', 'in_progress', 'in_review', 'blocked');--> statement-breakpoint
|
||||
CREATE INDEX IF NOT EXISTS "routine_runs_dispatch_fingerprint_idx" ON "routine_runs" USING btree ("routine_id","dispatch_fingerprint");
|
||||
@@ -0,0 +1,65 @@
|
||||
CREATE TABLE IF NOT EXISTS "issue_thread_interactions" (
|
||||
"id" uuid PRIMARY KEY DEFAULT gen_random_uuid() NOT NULL,
|
||||
"company_id" uuid NOT NULL,
|
||||
"issue_id" uuid NOT NULL,
|
||||
"kind" text NOT NULL,
|
||||
"status" text DEFAULT 'pending' NOT NULL,
|
||||
"continuation_policy" text DEFAULT 'wake_assignee' NOT NULL,
|
||||
"source_comment_id" uuid,
|
||||
"source_run_id" uuid,
|
||||
"title" text,
|
||||
"summary" text,
|
||||
"created_by_agent_id" uuid,
|
||||
"created_by_user_id" text,
|
||||
"resolved_by_agent_id" uuid,
|
||||
"resolved_by_user_id" text,
|
||||
"payload" jsonb NOT NULL,
|
||||
"result" jsonb,
|
||||
"resolved_at" timestamp with time zone,
|
||||
"created_at" timestamp with time zone DEFAULT now() NOT NULL,
|
||||
"updated_at" timestamp with time zone DEFAULT now() NOT NULL
|
||||
);
|
||||
--> statement-breakpoint
|
||||
DO $$ BEGIN
|
||||
IF NOT EXISTS (SELECT 1 FROM pg_constraint WHERE conname = 'issue_thread_interactions_company_id_companies_id_fk') THEN
|
||||
ALTER TABLE "issue_thread_interactions" ADD CONSTRAINT "issue_thread_interactions_company_id_companies_id_fk" FOREIGN KEY ("company_id") REFERENCES "public"."companies"("id") ON DELETE no action ON UPDATE no action;
|
||||
END IF;
|
||||
END $$;
|
||||
--> statement-breakpoint
|
||||
DO $$ BEGIN
|
||||
IF NOT EXISTS (SELECT 1 FROM pg_constraint WHERE conname = 'issue_thread_interactions_issue_id_issues_id_fk') THEN
|
||||
ALTER TABLE "issue_thread_interactions" ADD CONSTRAINT "issue_thread_interactions_issue_id_issues_id_fk" FOREIGN KEY ("issue_id") REFERENCES "public"."issues"("id") ON DELETE no action ON UPDATE no action;
|
||||
END IF;
|
||||
END $$;
|
||||
--> statement-breakpoint
|
||||
DO $$ BEGIN
|
||||
IF NOT EXISTS (SELECT 1 FROM pg_constraint WHERE conname = 'issue_thread_interactions_source_comment_id_issue_comments_id_fk') THEN
|
||||
ALTER TABLE "issue_thread_interactions" ADD CONSTRAINT "issue_thread_interactions_source_comment_id_issue_comments_id_fk" FOREIGN KEY ("source_comment_id") REFERENCES "public"."issue_comments"("id") ON DELETE set null ON UPDATE no action;
|
||||
END IF;
|
||||
END $$;
|
||||
--> statement-breakpoint
|
||||
DO $$ BEGIN
|
||||
IF NOT EXISTS (SELECT 1 FROM pg_constraint WHERE conname = 'issue_thread_interactions_source_run_id_heartbeat_runs_id_fk') THEN
|
||||
ALTER TABLE "issue_thread_interactions" ADD CONSTRAINT "issue_thread_interactions_source_run_id_heartbeat_runs_id_fk" FOREIGN KEY ("source_run_id") REFERENCES "public"."heartbeat_runs"("id") ON DELETE set null ON UPDATE no action;
|
||||
END IF;
|
||||
END $$;
|
||||
--> statement-breakpoint
|
||||
DO $$ BEGIN
|
||||
IF NOT EXISTS (SELECT 1 FROM pg_constraint WHERE conname = 'issue_thread_interactions_created_by_agent_id_agents_id_fk') THEN
|
||||
ALTER TABLE "issue_thread_interactions" ADD CONSTRAINT "issue_thread_interactions_created_by_agent_id_agents_id_fk" FOREIGN KEY ("created_by_agent_id") REFERENCES "public"."agents"("id") ON DELETE no action ON UPDATE no action;
|
||||
END IF;
|
||||
END $$;
|
||||
--> statement-breakpoint
|
||||
DO $$ BEGIN
|
||||
IF NOT EXISTS (SELECT 1 FROM pg_constraint WHERE conname = 'issue_thread_interactions_resolved_by_agent_id_agents_id_fk') THEN
|
||||
ALTER TABLE "issue_thread_interactions" ADD CONSTRAINT "issue_thread_interactions_resolved_by_agent_id_agents_id_fk" FOREIGN KEY ("resolved_by_agent_id") REFERENCES "public"."agents"("id") ON DELETE no action ON UPDATE no action;
|
||||
END IF;
|
||||
END $$;
|
||||
--> statement-breakpoint
|
||||
CREATE INDEX IF NOT EXISTS "issue_thread_interactions_issue_idx" ON "issue_thread_interactions" USING btree ("issue_id");
|
||||
--> statement-breakpoint
|
||||
CREATE INDEX IF NOT EXISTS "issue_thread_interactions_company_issue_created_at_idx" ON "issue_thread_interactions" USING btree ("company_id","issue_id","created_at");
|
||||
--> statement-breakpoint
|
||||
CREATE INDEX IF NOT EXISTS "issue_thread_interactions_company_issue_status_idx" ON "issue_thread_interactions" USING btree ("company_id","issue_id","status");
|
||||
--> statement-breakpoint
|
||||
CREATE INDEX IF NOT EXISTS "issue_thread_interactions_source_comment_idx" ON "issue_thread_interactions" USING btree ("source_comment_id");
|
||||
@@ -0,0 +1,4 @@
|
||||
ALTER TABLE "issue_thread_interactions" ADD COLUMN IF NOT EXISTS "idempotency_key" text;--> statement-breakpoint
|
||||
CREATE UNIQUE INDEX IF NOT EXISTS "issue_thread_interactions_company_issue_idempotency_uq"
|
||||
ON "issue_thread_interactions" USING btree ("company_id","issue_id","idempotency_key")
|
||||
WHERE "issue_thread_interactions"."idempotency_key" IS NOT NULL;
|
||||
50
packages/db/src/migrations/0065_environments.sql
Normal file
50
packages/db/src/migrations/0065_environments.sql
Normal file
@@ -0,0 +1,50 @@
|
||||
CREATE TABLE "environments" (
|
||||
"id" uuid PRIMARY KEY DEFAULT gen_random_uuid() NOT NULL,
|
||||
"company_id" uuid NOT NULL,
|
||||
"name" text NOT NULL,
|
||||
"description" text,
|
||||
"driver" text DEFAULT 'local' NOT NULL,
|
||||
"status" text DEFAULT 'active' NOT NULL,
|
||||
"config" jsonb DEFAULT '{}'::jsonb NOT NULL,
|
||||
"metadata" jsonb,
|
||||
"created_at" timestamp with time zone DEFAULT now() NOT NULL,
|
||||
"updated_at" timestamp with time zone DEFAULT now() NOT NULL
|
||||
);
|
||||
--> statement-breakpoint
|
||||
CREATE TABLE "environment_leases" (
|
||||
"id" uuid PRIMARY KEY DEFAULT gen_random_uuid() NOT NULL,
|
||||
"company_id" uuid NOT NULL,
|
||||
"environment_id" uuid NOT NULL,
|
||||
"execution_workspace_id" uuid,
|
||||
"issue_id" uuid,
|
||||
"heartbeat_run_id" uuid,
|
||||
"status" text DEFAULT 'active' NOT NULL,
|
||||
"lease_policy" text DEFAULT 'ephemeral' NOT NULL,
|
||||
"provider" text,
|
||||
"provider_lease_id" text,
|
||||
"acquired_at" timestamp with time zone DEFAULT now() NOT NULL,
|
||||
"last_used_at" timestamp with time zone DEFAULT now() NOT NULL,
|
||||
"expires_at" timestamp with time zone,
|
||||
"released_at" timestamp with time zone,
|
||||
"failure_reason" text,
|
||||
"cleanup_status" text,
|
||||
"metadata" jsonb,
|
||||
"created_at" timestamp with time zone DEFAULT now() NOT NULL,
|
||||
"updated_at" timestamp with time zone DEFAULT now() NOT NULL
|
||||
);
|
||||
--> statement-breakpoint
|
||||
ALTER TABLE "environments" ADD CONSTRAINT "environments_company_id_companies_id_fk" FOREIGN KEY ("company_id") REFERENCES "public"."companies"("id") ON DELETE cascade ON UPDATE no action;--> statement-breakpoint
|
||||
ALTER TABLE "environment_leases" ADD CONSTRAINT "environment_leases_company_id_companies_id_fk" FOREIGN KEY ("company_id") REFERENCES "public"."companies"("id") ON DELETE cascade ON UPDATE no action;--> statement-breakpoint
|
||||
ALTER TABLE "environment_leases" ADD CONSTRAINT "environment_leases_environment_id_environments_id_fk" FOREIGN KEY ("environment_id") REFERENCES "public"."environments"("id") ON DELETE cascade ON UPDATE no action;--> statement-breakpoint
|
||||
ALTER TABLE "environment_leases" ADD CONSTRAINT "environment_leases_execution_workspace_id_execution_workspaces_id_fk" FOREIGN KEY ("execution_workspace_id") REFERENCES "public"."execution_workspaces"("id") ON DELETE set null ON UPDATE no action;--> statement-breakpoint
|
||||
ALTER TABLE "environment_leases" ADD CONSTRAINT "environment_leases_issue_id_issues_id_fk" FOREIGN KEY ("issue_id") REFERENCES "public"."issues"("id") ON DELETE set null ON UPDATE no action;--> statement-breakpoint
|
||||
ALTER TABLE "environment_leases" ADD CONSTRAINT "environment_leases_heartbeat_run_id_heartbeat_runs_id_fk" FOREIGN KEY ("heartbeat_run_id") REFERENCES "public"."heartbeat_runs"("id") ON DELETE set null ON UPDATE no action;--> statement-breakpoint
|
||||
CREATE INDEX "environments_company_status_idx" ON "environments" USING btree ("company_id","status");--> statement-breakpoint
|
||||
CREATE UNIQUE INDEX "environments_company_driver_idx" ON "environments" USING btree ("company_id","driver");--> statement-breakpoint
|
||||
CREATE INDEX "environments_company_name_idx" ON "environments" USING btree ("company_id","name");--> statement-breakpoint
|
||||
CREATE INDEX "environment_leases_company_environment_status_idx" ON "environment_leases" USING btree ("company_id","environment_id","status");--> statement-breakpoint
|
||||
CREATE INDEX "environment_leases_company_execution_workspace_idx" ON "environment_leases" USING btree ("company_id","execution_workspace_id");--> statement-breakpoint
|
||||
CREATE INDEX "environment_leases_company_issue_idx" ON "environment_leases" USING btree ("company_id","issue_id");--> statement-breakpoint
|
||||
CREATE INDEX "environment_leases_heartbeat_run_idx" ON "environment_leases" USING btree ("heartbeat_run_id");--> statement-breakpoint
|
||||
CREATE INDEX "environment_leases_company_last_used_idx" ON "environment_leases" USING btree ("company_id","last_used_at");--> statement-breakpoint
|
||||
CREATE INDEX "environment_leases_provider_lease_idx" ON "environment_leases" USING btree ("provider_lease_id");
|
||||
107
packages/db/src/migrations/0066_issue_tree_holds.sql
Normal file
107
packages/db/src/migrations/0066_issue_tree_holds.sql
Normal file
@@ -0,0 +1,107 @@
|
||||
CREATE TABLE IF NOT EXISTS "issue_tree_holds" (
|
||||
"id" uuid PRIMARY KEY DEFAULT gen_random_uuid() NOT NULL,
|
||||
"company_id" uuid NOT NULL,
|
||||
"root_issue_id" uuid NOT NULL,
|
||||
"mode" text NOT NULL,
|
||||
"status" text DEFAULT 'active' NOT NULL,
|
||||
"reason" text,
|
||||
"release_policy" jsonb,
|
||||
"created_by_actor_type" text DEFAULT 'system' NOT NULL,
|
||||
"created_by_agent_id" uuid,
|
||||
"created_by_user_id" text,
|
||||
"created_by_run_id" uuid,
|
||||
"released_at" timestamp with time zone,
|
||||
"released_by_actor_type" text,
|
||||
"released_by_agent_id" uuid,
|
||||
"released_by_user_id" text,
|
||||
"released_by_run_id" uuid,
|
||||
"release_reason" text,
|
||||
"release_metadata" jsonb,
|
||||
"created_at" timestamp with time zone DEFAULT now() NOT NULL,
|
||||
"updated_at" timestamp with time zone DEFAULT now() NOT NULL
|
||||
);
|
||||
--> statement-breakpoint
|
||||
CREATE TABLE IF NOT EXISTS "issue_tree_hold_members" (
|
||||
"id" uuid PRIMARY KEY DEFAULT gen_random_uuid() NOT NULL,
|
||||
"company_id" uuid NOT NULL,
|
||||
"hold_id" uuid NOT NULL,
|
||||
"issue_id" uuid NOT NULL,
|
||||
"parent_issue_id" uuid,
|
||||
"depth" integer DEFAULT 0 NOT NULL,
|
||||
"issue_identifier" text,
|
||||
"issue_title" text NOT NULL,
|
||||
"issue_status" text NOT NULL,
|
||||
"assignee_agent_id" uuid,
|
||||
"assignee_user_id" text,
|
||||
"active_run_id" uuid,
|
||||
"active_run_status" text,
|
||||
"skipped" boolean DEFAULT false NOT NULL,
|
||||
"skip_reason" text,
|
||||
"created_at" timestamp with time zone DEFAULT now() NOT NULL
|
||||
);
|
||||
--> statement-breakpoint
|
||||
DO $$ BEGIN
|
||||
IF NOT EXISTS (SELECT 1 FROM pg_constraint WHERE conname = 'issue_tree_holds_company_id_companies_id_fk') THEN
|
||||
ALTER TABLE "issue_tree_holds" ADD CONSTRAINT "issue_tree_holds_company_id_companies_id_fk" FOREIGN KEY ("company_id") REFERENCES "public"."companies"("id") ON DELETE no action ON UPDATE no action;
|
||||
END IF;
|
||||
END $$;--> statement-breakpoint
|
||||
DO $$ BEGIN
|
||||
IF NOT EXISTS (SELECT 1 FROM pg_constraint WHERE conname = 'issue_tree_holds_root_issue_id_issues_id_fk') THEN
|
||||
ALTER TABLE "issue_tree_holds" ADD CONSTRAINT "issue_tree_holds_root_issue_id_issues_id_fk" FOREIGN KEY ("root_issue_id") REFERENCES "public"."issues"("id") ON DELETE cascade ON UPDATE no action;
|
||||
END IF;
|
||||
END $$;--> statement-breakpoint
|
||||
DO $$ BEGIN
|
||||
IF NOT EXISTS (SELECT 1 FROM pg_constraint WHERE conname = 'issue_tree_holds_created_by_agent_id_agents_id_fk') THEN
|
||||
ALTER TABLE "issue_tree_holds" ADD CONSTRAINT "issue_tree_holds_created_by_agent_id_agents_id_fk" FOREIGN KEY ("created_by_agent_id") REFERENCES "public"."agents"("id") ON DELETE set null ON UPDATE no action;
|
||||
END IF;
|
||||
END $$;--> statement-breakpoint
|
||||
DO $$ BEGIN
|
||||
IF NOT EXISTS (SELECT 1 FROM pg_constraint WHERE conname = 'issue_tree_holds_created_by_run_id_heartbeat_runs_id_fk') THEN
|
||||
ALTER TABLE "issue_tree_holds" ADD CONSTRAINT "issue_tree_holds_created_by_run_id_heartbeat_runs_id_fk" FOREIGN KEY ("created_by_run_id") REFERENCES "public"."heartbeat_runs"("id") ON DELETE set null ON UPDATE no action;
|
||||
END IF;
|
||||
END $$;--> statement-breakpoint
|
||||
DO $$ BEGIN
|
||||
IF NOT EXISTS (SELECT 1 FROM pg_constraint WHERE conname = 'issue_tree_holds_released_by_agent_id_agents_id_fk') THEN
|
||||
ALTER TABLE "issue_tree_holds" ADD CONSTRAINT "issue_tree_holds_released_by_agent_id_agents_id_fk" FOREIGN KEY ("released_by_agent_id") REFERENCES "public"."agents"("id") ON DELETE set null ON UPDATE no action;
|
||||
END IF;
|
||||
END $$;--> statement-breakpoint
|
||||
DO $$ BEGIN
|
||||
IF NOT EXISTS (SELECT 1 FROM pg_constraint WHERE conname = 'issue_tree_holds_released_by_run_id_heartbeat_runs_id_fk') THEN
|
||||
ALTER TABLE "issue_tree_holds" ADD CONSTRAINT "issue_tree_holds_released_by_run_id_heartbeat_runs_id_fk" FOREIGN KEY ("released_by_run_id") REFERENCES "public"."heartbeat_runs"("id") ON DELETE set null ON UPDATE no action;
|
||||
END IF;
|
||||
END $$;--> statement-breakpoint
|
||||
DO $$ BEGIN
|
||||
IF NOT EXISTS (SELECT 1 FROM pg_constraint WHERE conname = 'issue_tree_hold_members_company_id_companies_id_fk') THEN
|
||||
ALTER TABLE "issue_tree_hold_members" ADD CONSTRAINT "issue_tree_hold_members_company_id_companies_id_fk" FOREIGN KEY ("company_id") REFERENCES "public"."companies"("id") ON DELETE no action ON UPDATE no action;
|
||||
END IF;
|
||||
END $$;--> statement-breakpoint
|
||||
DO $$ BEGIN
|
||||
IF NOT EXISTS (SELECT 1 FROM pg_constraint WHERE conname = 'issue_tree_hold_members_hold_id_issue_tree_holds_id_fk') THEN
|
||||
ALTER TABLE "issue_tree_hold_members" ADD CONSTRAINT "issue_tree_hold_members_hold_id_issue_tree_holds_id_fk" FOREIGN KEY ("hold_id") REFERENCES "public"."issue_tree_holds"("id") ON DELETE cascade ON UPDATE no action;
|
||||
END IF;
|
||||
END $$;--> statement-breakpoint
|
||||
DO $$ BEGIN
|
||||
IF NOT EXISTS (SELECT 1 FROM pg_constraint WHERE conname = 'issue_tree_hold_members_issue_id_issues_id_fk') THEN
|
||||
ALTER TABLE "issue_tree_hold_members" ADD CONSTRAINT "issue_tree_hold_members_issue_id_issues_id_fk" FOREIGN KEY ("issue_id") REFERENCES "public"."issues"("id") ON DELETE cascade ON UPDATE no action;
|
||||
END IF;
|
||||
END $$;--> statement-breakpoint
|
||||
DO $$ BEGIN
|
||||
IF NOT EXISTS (SELECT 1 FROM pg_constraint WHERE conname = 'issue_tree_hold_members_parent_issue_id_issues_id_fk') THEN
|
||||
ALTER TABLE "issue_tree_hold_members" ADD CONSTRAINT "issue_tree_hold_members_parent_issue_id_issues_id_fk" FOREIGN KEY ("parent_issue_id") REFERENCES "public"."issues"("id") ON DELETE set null ON UPDATE no action;
|
||||
END IF;
|
||||
END $$;--> statement-breakpoint
|
||||
DO $$ BEGIN
|
||||
IF NOT EXISTS (SELECT 1 FROM pg_constraint WHERE conname = 'issue_tree_hold_members_assignee_agent_id_agents_id_fk') THEN
|
||||
ALTER TABLE "issue_tree_hold_members" ADD CONSTRAINT "issue_tree_hold_members_assignee_agent_id_agents_id_fk" FOREIGN KEY ("assignee_agent_id") REFERENCES "public"."agents"("id") ON DELETE set null ON UPDATE no action;
|
||||
END IF;
|
||||
END $$;--> statement-breakpoint
|
||||
DO $$ BEGIN
|
||||
IF NOT EXISTS (SELECT 1 FROM pg_constraint WHERE conname = 'issue_tree_hold_members_active_run_id_heartbeat_runs_id_fk') THEN
|
||||
ALTER TABLE "issue_tree_hold_members" ADD CONSTRAINT "issue_tree_hold_members_active_run_id_heartbeat_runs_id_fk" FOREIGN KEY ("active_run_id") REFERENCES "public"."heartbeat_runs"("id") ON DELETE set null ON UPDATE no action;
|
||||
END IF;
|
||||
END $$;--> statement-breakpoint
|
||||
CREATE INDEX IF NOT EXISTS "issue_tree_holds_company_root_status_idx" ON "issue_tree_holds" USING btree ("company_id","root_issue_id","status");--> statement-breakpoint
|
||||
CREATE INDEX IF NOT EXISTS "issue_tree_holds_company_status_mode_idx" ON "issue_tree_holds" USING btree ("company_id","status","mode");--> statement-breakpoint
|
||||
CREATE UNIQUE INDEX IF NOT EXISTS "issue_tree_hold_members_hold_issue_uq" ON "issue_tree_hold_members" USING btree ("hold_id","issue_id");--> statement-breakpoint
|
||||
CREATE INDEX IF NOT EXISTS "issue_tree_hold_members_company_issue_idx" ON "issue_tree_hold_members" USING btree ("company_id","issue_id");--> statement-breakpoint
|
||||
CREATE INDEX IF NOT EXISTS "issue_tree_hold_members_hold_depth_idx" ON "issue_tree_hold_members" USING btree ("hold_id","depth");
|
||||
@@ -0,0 +1,3 @@
|
||||
ALTER TABLE "agents" ADD COLUMN "default_environment_id" uuid;
|
||||
ALTER TABLE "agents" ADD CONSTRAINT "agents_default_environment_id_environments_id_fk" FOREIGN KEY ("default_environment_id") REFERENCES "public"."environments"("id") ON DELETE set null ON UPDATE no action;
|
||||
CREATE INDEX "agents_company_default_environment_idx" ON "agents" USING btree ("company_id","default_environment_id");
|
||||
@@ -0,0 +1,2 @@
|
||||
DROP INDEX IF EXISTS "environments_company_driver_idx";--> statement-breakpoint
|
||||
CREATE UNIQUE INDEX IF NOT EXISTS "environments_company_driver_idx" ON "environments" USING btree ("company_id","driver") WHERE "driver" = 'local';
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user