Compare commits
54 Commits
codex/pap-
...
pap-3598/o
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
bab5136645 | ||
|
|
09ed4e54cb | ||
|
|
783f4d2f28 | ||
|
|
433326ffcb | ||
|
|
3c73ed26b5 | ||
|
|
d6bee62f02 | ||
|
|
edbb670c3b | ||
|
|
fd10404374 | ||
|
|
47920f9c47 | ||
|
|
e01ffc18d3 | ||
|
|
ae23e02526 | ||
|
|
29401b231b | ||
|
|
a5430f010d | ||
|
|
6c090f84a9 | ||
|
|
90631b09b3 | ||
|
|
2dce81fbf6 | ||
|
|
0e51fa2b0d | ||
|
|
09eceb952a | ||
|
|
d22e790bd4 | ||
|
|
856c6cb192 | ||
|
|
bb7d040894 | ||
|
|
076067865f | ||
|
|
a7b45938b7 | ||
|
|
15eac43b43 | ||
|
|
57229d0f24 | ||
|
|
76f09c8eb6 | ||
|
|
685ee84e4a | ||
|
|
d7719423e9 | ||
|
|
fe401b7fa9 | ||
|
|
2d72292ad6 | ||
|
|
570a4206da | ||
|
|
3cd26a78fc | ||
|
|
e8275318ba | ||
|
|
e273d621fc | ||
|
|
42a299fb9d | ||
|
|
d2dd759caa | ||
|
|
b02e67cea5 | ||
|
|
6a7cca95ef | ||
|
|
4272c1604d | ||
|
|
ad5432fece | ||
|
|
a3de1d764d | ||
|
|
1fe1067361 | ||
|
|
c4269bab59 | ||
|
|
87f19cd9a6 | ||
|
|
cd606563f6 | ||
|
|
c0ce35d1fb | ||
|
|
a4ac6ff133 | ||
|
|
4cf612a92d | ||
|
|
f9cf1d2f6a | ||
|
|
a0f5cbffd7 | ||
|
|
367d4cab72 | ||
|
|
9b99d30330 | ||
|
|
3494e84a29 | ||
|
|
6b7f6ce4b8 |
2
.github/workflows/docker.yml
vendored
@@ -14,7 +14,7 @@ permissions:
|
||||
jobs:
|
||||
build-and-push:
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 30
|
||||
timeout-minutes: 60
|
||||
concurrency:
|
||||
group: docker-${{ github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
100
.github/workflows/pr.yml
vendored
@@ -23,7 +23,9 @@ jobs:
|
||||
- name: Block manual lockfile edits
|
||||
if: github.head_ref != 'chore/refresh-lockfile'
|
||||
run: |
|
||||
changed="$(git diff --name-only "${{ github.event.pull_request.base.sha }}" "${{ github.event.pull_request.head.sha }}")"
|
||||
# Diff the PR branch against its merge base so recent base-branch commits
|
||||
# do not masquerade as changes made by the PR itself.
|
||||
changed="$(git diff --name-only "${{ github.event.pull_request.base.sha }}...${{ github.event.pull_request.head.sha }}")"
|
||||
if printf '%s\n' "$changed" | grep -qx 'pnpm-lock.yaml'; then
|
||||
echo "Do not commit pnpm-lock.yaml in pull requests. CI owns lockfile updates."
|
||||
exit 1
|
||||
@@ -43,9 +45,18 @@ jobs:
|
||||
- name: Validate Dockerfile deps stage
|
||||
run: node ./scripts/check-docker-deps-stage.mjs
|
||||
|
||||
- name: Validate release package manifest
|
||||
run: node ./scripts/release-package-map.mjs check
|
||||
|
||||
- name: Verify release package bootstrap for changed manifests
|
||||
run: |
|
||||
mapfile -t changed_paths < <(git diff --name-only "${{ github.event.pull_request.base.sha }}...${{ github.event.pull_request.head.sha }}")
|
||||
PAPERCLIP_RELEASE_BOOTSTRAP_BASE_SHA="${{ github.event.pull_request.base.sha }}" \
|
||||
node ./scripts/check-release-package-bootstrap.mjs "${changed_paths[@]}"
|
||||
|
||||
- name: Validate dependency resolution when manifests change
|
||||
run: |
|
||||
changed="$(git diff --name-only "${{ github.event.pull_request.base.sha }}" "${{ github.event.pull_request.head.sha }}")"
|
||||
changed="$(git diff --name-only "${{ github.event.pull_request.base.sha }}...${{ github.event.pull_request.head.sha }}")"
|
||||
manifest_pattern='(^|/)package\.json$|^pnpm-workspace\.yaml$|^\.npmrc$|^pnpmfile\.(cjs|js|mjs)$'
|
||||
if printf '%s\n' "$changed" | grep -Eq "$manifest_pattern"; then
|
||||
pnpm install --lockfile-only --ignore-scripts --no-frozen-lockfile
|
||||
@@ -74,16 +85,88 @@ jobs:
|
||||
- name: Install dependencies
|
||||
run: pnpm install --frozen-lockfile
|
||||
|
||||
- name: Typecheck
|
||||
run: pnpm -r typecheck
|
||||
- name: Typecheck workspaces whose build scripts skip TypeScript
|
||||
run: pnpm run typecheck:build-gaps
|
||||
|
||||
- name: Run tests
|
||||
run: pnpm test:run
|
||||
- name: Run general test suites
|
||||
run: pnpm test:run:general
|
||||
|
||||
- name: Verify release registry test coverage
|
||||
run: pnpm run test:release-registry
|
||||
|
||||
- name: Build
|
||||
run: pnpm build
|
||||
|
||||
- name: Release canary dry run
|
||||
verify_serialized_server:
|
||||
name: Verify serialized server suites (${{ matrix.shard_label }})
|
||||
needs: [policy]
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 20
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
include:
|
||||
- shard_index: 0
|
||||
shard_count: 4
|
||||
shard_label: 1/4
|
||||
- shard_index: 1
|
||||
shard_count: 4
|
||||
shard_label: 2/4
|
||||
- shard_index: 2
|
||||
shard_count: 4
|
||||
shard_label: 3/4
|
||||
- shard_index: 3
|
||||
shard_count: 4
|
||||
shard_label: 4/4
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Setup pnpm
|
||||
uses: pnpm/action-setup@v4
|
||||
with:
|
||||
version: 9.15.4
|
||||
|
||||
- name: Setup Node.js
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: 24
|
||||
cache: pnpm
|
||||
|
||||
- name: Install dependencies
|
||||
run: pnpm install --frozen-lockfile
|
||||
|
||||
- name: Run serialized server test shard
|
||||
run: pnpm test:run:serialized -- --shard-index ${{ matrix.shard_index }} --shard-count ${{ matrix.shard_count }}
|
||||
|
||||
canary_dry_run:
|
||||
name: Canary Dry Run
|
||||
needs: [policy]
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 20
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Setup pnpm
|
||||
uses: pnpm/action-setup@v4
|
||||
with:
|
||||
version: 9.15.4
|
||||
|
||||
- name: Setup Node.js
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: 24
|
||||
cache: pnpm
|
||||
|
||||
- name: Install dependencies
|
||||
run: pnpm install --frozen-lockfile
|
||||
|
||||
# `release.sh` always executes its Step 2/7 workspace build, even when
|
||||
# `--skip-verify` bypasses the initial verification gate.
|
||||
- name: Release canary dry run via release.sh internal build
|
||||
run: |
|
||||
git checkout -B master HEAD
|
||||
git checkout -- pnpm-lock.yaml
|
||||
@@ -112,9 +195,6 @@ jobs:
|
||||
- name: Install dependencies
|
||||
run: pnpm install --frozen-lockfile
|
||||
|
||||
- name: Build
|
||||
run: pnpm build
|
||||
|
||||
- name: Install Playwright
|
||||
run: npx playwright install --with-deps chromium
|
||||
|
||||
|
||||
12
.github/workflows/release.yml
vendored
@@ -50,6 +50,9 @@ jobs:
|
||||
node-version: 24
|
||||
cache: pnpm
|
||||
|
||||
- name: Validate release package manifest
|
||||
run: node ./scripts/release-package-map.mjs check
|
||||
|
||||
- name: Install dependencies
|
||||
run: pnpm install --no-frozen-lockfile
|
||||
|
||||
@@ -89,6 +92,9 @@ jobs:
|
||||
node-version: 24
|
||||
cache: pnpm
|
||||
|
||||
- name: Validate release package manifest
|
||||
run: node ./scripts/release-package-map.mjs check
|
||||
|
||||
- name: Install dependencies
|
||||
run: pnpm install --no-frozen-lockfile
|
||||
|
||||
@@ -139,6 +145,9 @@ jobs:
|
||||
node-version: 24
|
||||
cache: pnpm
|
||||
|
||||
- name: Validate release package manifest
|
||||
run: node ./scripts/release-package-map.mjs check
|
||||
|
||||
- name: Install dependencies
|
||||
run: pnpm install --no-frozen-lockfile
|
||||
|
||||
@@ -177,6 +186,9 @@ jobs:
|
||||
node-version: 24
|
||||
cache: pnpm
|
||||
|
||||
- name: Validate release package manifest
|
||||
run: node ./scripts/release-package-map.mjs check
|
||||
|
||||
- name: Install dependencies
|
||||
run: pnpm install --no-frozen-lockfile
|
||||
|
||||
|
||||
@@ -22,6 +22,7 @@ COPY packages/shared/package.json packages/shared/
|
||||
COPY packages/db/package.json packages/db/
|
||||
COPY packages/adapter-utils/package.json packages/adapter-utils/
|
||||
COPY packages/mcp-server/package.json packages/mcp-server/
|
||||
COPY packages/adapters/acpx-local/package.json packages/adapters/acpx-local/
|
||||
COPY packages/adapters/claude-local/package.json packages/adapters/claude-local/
|
||||
COPY packages/adapters/codex-local/package.json packages/adapters/codex-local/
|
||||
COPY packages/adapters/cursor-local/package.json packages/adapters/cursor-local/
|
||||
|
||||
@@ -37,6 +37,7 @@
|
||||
},
|
||||
"dependencies": {
|
||||
"@clack/prompts": "^0.10.0",
|
||||
"@paperclipai/adapter-acpx-local": "workspace:*",
|
||||
"@paperclipai/adapter-claude-local": "workspace:*",
|
||||
"@paperclipai/adapter-codex-local": "workspace:*",
|
||||
"@paperclipai/adapter-cursor-local": "workspace:*",
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
import type { CLIAdapterModule } from "@paperclipai/adapter-utils";
|
||||
import { printAcpxStreamEvent } from "@paperclipai/adapter-acpx-local/cli";
|
||||
import { printClaudeStreamEvent } from "@paperclipai/adapter-claude-local/cli";
|
||||
import { printCodexStreamEvent } from "@paperclipai/adapter-codex-local/cli";
|
||||
import { printCursorStreamEvent } from "@paperclipai/adapter-cursor-local/cli";
|
||||
@@ -14,6 +15,11 @@ const claudeLocalCLIAdapter: CLIAdapterModule = {
|
||||
formatStdoutEvent: printClaudeStreamEvent,
|
||||
};
|
||||
|
||||
const acpxLocalCLIAdapter: CLIAdapterModule = {
|
||||
type: "acpx_local",
|
||||
formatStdoutEvent: printAcpxStreamEvent,
|
||||
};
|
||||
|
||||
const codexLocalCLIAdapter: CLIAdapterModule = {
|
||||
type: "codex_local",
|
||||
formatStdoutEvent: printCodexStreamEvent,
|
||||
@@ -46,6 +52,7 @@ const openclawGatewayCLIAdapter: CLIAdapterModule = {
|
||||
|
||||
const adaptersByType = new Map<string, CLIAdapterModule>(
|
||||
[
|
||||
acpxLocalCLIAdapter,
|
||||
claudeLocalCLIAdapter,
|
||||
codexLocalCLIAdapter,
|
||||
openCodeLocalCLIAdapter,
|
||||
|
||||
@@ -149,7 +149,15 @@ The plugin runtime tracks plugin-owned database namespaces and migrations in `pl
|
||||
|
||||
## Backups
|
||||
|
||||
Paperclip supports automatic and manual database backups. See `doc/DEVELOPING.md` for the current `paperclipai db:backup` / `pnpm db:backup` commands and backup retention configuration.
|
||||
Paperclip supports automatic and manual logical database backups. These dumps include
|
||||
non-system database schemas such as `public`, the Drizzle migration journal, and
|
||||
plugin-owned database schemas. See `doc/DEVELOPING.md` for the current
|
||||
`paperclipai db:backup` / `pnpm db:backup` commands and backup retention
|
||||
configuration.
|
||||
|
||||
Database backups do not include non-database instance files such as local-disk
|
||||
uploads, workspace files, or the local encrypted secrets master key. Back those paths
|
||||
up separately when you need full instance disaster recovery.
|
||||
|
||||
## Secret storage
|
||||
|
||||
|
||||
@@ -421,7 +421,9 @@ If you set `DATABASE_URL`, the server will use that instead of embedded PostgreS
|
||||
|
||||
## Automatic DB Backups
|
||||
|
||||
Paperclip can run automatic DB backups on a timer. Defaults:
|
||||
Paperclip can run automatic logical database backups on a timer. These backups cover
|
||||
non-system database schemas, including migration history and plugin-owned database
|
||||
schemas. Defaults:
|
||||
|
||||
- enabled
|
||||
- every 60 minutes
|
||||
@@ -449,6 +451,10 @@ Environment overrides:
|
||||
- `PAPERCLIP_DB_BACKUP_RETENTION_DAYS=<days>`
|
||||
- `PAPERCLIP_DB_BACKUP_DIR=/absolute/or/~/path`
|
||||
|
||||
DB backups are not full instance filesystem backups. For full local disaster
|
||||
recovery, also back up local storage files and the local encrypted secrets key if
|
||||
those providers are enabled.
|
||||
|
||||
## Secrets in Dev
|
||||
|
||||
Agent env vars now support secret references. By default, secret values are stored with local encryption and only secret refs are persisted in agent config.
|
||||
|
||||
@@ -143,6 +143,13 @@ This keeps the default install path unchanged while allowing explicit installs w
|
||||
npx paperclipai@canary onboard
|
||||
```
|
||||
|
||||
The release script now verifies two things after a canary publish:
|
||||
|
||||
- the `canary` dist-tag resolves to the version that was just published
|
||||
- every published internal `@paperclipai/*` dependency referenced by that manifest exists on npm
|
||||
|
||||
It also treats `latest -> canary` as a failure by default, because npm metadata can otherwise leave the default install path pointing at an unreleased canary dependency graph. Only pass `./scripts/release.sh canary --allow-canary-latest` when that `latest` behavior is explicitly intended.
|
||||
|
||||
### Stable
|
||||
|
||||
Stable publishes use the npm dist-tag `latest`.
|
||||
@@ -169,6 +176,58 @@ That means:
|
||||
|
||||
See [doc/RELEASE-AUTOMATION-SETUP.md](RELEASE-AUTOMATION-SETUP.md) for the GitHub/npm setup steps.
|
||||
|
||||
## Release enrollment for new public packages
|
||||
|
||||
Paperclip does not auto-publish every non-private workspace package anymore.
|
||||
CI publishing is controlled by [`scripts/release-package-manifest.json`](../scripts/release-package-manifest.json).
|
||||
|
||||
When you add a new public package:
|
||||
|
||||
1. add it to the manifest and decide whether CI should publish it immediately
|
||||
2. if CI should publish it, bootstrap the package on npm before merge
|
||||
3. if CI should not publish it yet, keep `"publishFromCi": false`
|
||||
4. only enable `"publishFromCi": true` after npm trusted publishing is configured for that package
|
||||
|
||||
PR CI now checks changed release-enabled package manifests against npm. That catches a missing first-publish bootstrap before the change reaches `master`.
|
||||
|
||||
### One-time bootstrap sequence for a new package
|
||||
|
||||
The first publish of a brand-new package still needs one human maintainer with npm write access.
|
||||
After that, trusted publishing can take over.
|
||||
|
||||
Example for `@paperclipai/adapter-acpx-local` from the repo root:
|
||||
|
||||
```bash
|
||||
# safe preview
|
||||
pnpm run release:bootstrap-package -- @paperclipai/adapter-acpx-local
|
||||
|
||||
# one-time first publish from an authenticated maintainer machine
|
||||
pnpm run release:bootstrap-package -- @paperclipai/adapter-acpx-local --publish --otp 123456
|
||||
```
|
||||
|
||||
The helper script:
|
||||
|
||||
- checks that the package does not already exist on npm
|
||||
- builds the target package unless `--skip-build` is passed
|
||||
- runs `npm pack --dry-run` in the package directory
|
||||
- only runs the real `npm publish --access public` when `--publish --otp <code>` is provided
|
||||
|
||||
For the real `--publish` step, the maintainer machine must already be authenticated to npm.
|
||||
If `npm whoami` returns `401`, first run `npm logout --registry=https://registry.npmjs.org/` to clear any stale local auth, then run `npm login` or `npm adduser` locally as an npm org member, and finally rerun the helper.
|
||||
That local human auth is fine for the one-time bootstrap publish; we just do not want the same auth model inside CI.
|
||||
The helper now requires `--otp <code>` up front for `--publish`, so it fails before the real publish attempt if the one-time password is missing.
|
||||
|
||||
After that first publish succeeds:
|
||||
|
||||
1. open `https://www.npmjs.com/package/@paperclipai/adapter-acpx-local`
|
||||
2. go to `Settings` → `Trusted publishing`
|
||||
3. add repository `paperclipai/paperclip`
|
||||
4. set workflow filename to `release.yml`
|
||||
5. optionally go to `Settings` → `Publishing access` and enable `Require two-factor authentication and disallow tokens`
|
||||
6. keep `publishFromCi: true` in [`scripts/release-package-manifest.json`](../scripts/release-package-manifest.json)
|
||||
|
||||
Once those steps are done, future canary and stable publishes for that package are automated through GitHub OIDC. The manual step is only the first package creation on npm.
|
||||
|
||||
## Rollback model
|
||||
|
||||
Rollback does not unpublish anything.
|
||||
|
||||
@@ -67,6 +67,27 @@ Why:
|
||||
- the single `release.yml` workflow handles both canary and stable publishing
|
||||
- GitHub environments `npm-canary` and `npm-stable` still enforce different approval rules on the GitHub side
|
||||
|
||||
### 2.2.1. Newly added public packages need a bootstrap phase
|
||||
|
||||
Trusted publishing is configured on the npm package itself, not at the repo scope.
|
||||
That means a brand-new public package must not be auto-enrolled into CI publishing until its npm package exists and its trusted publisher has been configured.
|
||||
|
||||
Repo policy:
|
||||
|
||||
1. add every non-private package to [`scripts/release-package-manifest.json`](../scripts/release-package-manifest.json)
|
||||
2. set `"publishFromCi": true` only when CI is expected to publish that package
|
||||
3. if the package is not ready for CI publishing yet, keep `"publishFromCi": false`
|
||||
4. complete the package bootstrap before merging any PR that changes a release-enabled new package
|
||||
|
||||
Bootstrap sequence for a new package:
|
||||
|
||||
1. publish the package once from a trusted maintainer machine using normal npm auth
|
||||
2. open that package on npm and add the `paperclipai/paperclip` trusted publisher for `.github/workflows/release.yml`
|
||||
3. rerun or dry-run the release flow as needed to confirm CI publishing now works
|
||||
4. only then enable `"publishFromCi": true`
|
||||
|
||||
PR CI enforces this by checking changed release-enabled package manifests against npm. That keeps `master` canary publishing healthy while preserving the no-long-lived-token model for normal CI releases.
|
||||
|
||||
### 2.3. Verify trusted publishing before removing old auth
|
||||
|
||||
After the workflows are live:
|
||||
|
||||
@@ -63,6 +63,8 @@ It:
|
||||
- verifies the pushed commit
|
||||
- computes the canary version for the current UTC date
|
||||
- publishes under npm dist-tag `canary`
|
||||
- verifies that `canary` resolves to the just-published version and that published internal dependencies exist on npm
|
||||
- fails by default if npm leaves `latest` pointing at a canary; use `--allow-canary-latest` only when that state is intentional
|
||||
- creates a git tag `canary/vYYYY.MDD.P-canary.N`
|
||||
|
||||
Users install canaries with:
|
||||
|
||||
@@ -150,7 +150,7 @@ Invariant: every business record belongs to exactly one company.
|
||||
- `capabilities` text null
|
||||
- `adapter_type` text; built-ins include `process`, `http`, `claude_local`, `codex_local`, `gemini_local`, `opencode_local`, `pi_local`, `cursor`, and `openclaw_gateway`
|
||||
- `adapter_config` jsonb not null
|
||||
- `runtime_config` jsonb not null default `{}`
|
||||
- `runtime_config` jsonb not null default `{}`; may include Paperclip runtime policy such as `modelProfiles.cheap.adapterConfig` for an optional low-cost model lane that does not change the primary adapter config
|
||||
- `default_environment_id` uuid fk `environments.id` null
|
||||
- `context_mode` enum: `thin | fat` default `thin`
|
||||
- `budget_monthly_cents` int not null default 0
|
||||
@@ -676,7 +676,7 @@ Per-agent schedule fields in `adapter_config`:
|
||||
|
||||
- `enabled` boolean
|
||||
- `intervalSec` integer (minimum 30)
|
||||
- `maxConcurrentRuns` integer; new agents default to `5`
|
||||
- `maxConcurrentRuns` integer; new agents default to `20`; scheduler clamps configured values to `1..50`
|
||||
|
||||
Scheduler must skip invocation when:
|
||||
|
||||
|
||||
@@ -67,13 +67,15 @@ This is the right state for:
|
||||
|
||||
- waiting on another issue
|
||||
- waiting on a human decision
|
||||
- waiting on an external dependency or system
|
||||
- waiting on an external dependency or system when Paperclip does not own a scheduled re-check
|
||||
- work that automatic recovery could not safely continue
|
||||
|
||||
### `in_review`
|
||||
|
||||
Execution work is paused because the next move belongs to a reviewer or approver, not the current executor.
|
||||
|
||||
An external review service can also be a valid review path when the issue keeps an agent assignee and has an active one-shot monitor that will wake that assignee to check the service later.
|
||||
|
||||
### `done`
|
||||
|
||||
The work is complete and terminal.
|
||||
@@ -164,6 +166,7 @@ The valid action-path primitives are:
|
||||
- a queued wake or continuation that can be delivered to the responsible agent
|
||||
- a typed execution-policy participant, such as `executionState.currentParticipant`
|
||||
- a pending issue-thread interaction or linked approval that is waiting for a specific responder
|
||||
- a one-shot issue monitor (`executionPolicy.monitor.nextCheckAt`) that will wake the assignee for a future check
|
||||
- a human owner via `assigneeUserId`
|
||||
- a first-class blocker chain whose unresolved leaf issues are themselves healthy
|
||||
- an open explicit recovery issue that names the owner and action needed to restore liveness
|
||||
@@ -188,6 +191,7 @@ A healthy active-work state means at least one of these is true:
|
||||
|
||||
- there is an active run for the issue
|
||||
- there is already a queued continuation wake
|
||||
- there is an active one-shot monitor that will wake the assignee for a future check
|
||||
- there is an open explicit recovery issue for the lost execution path
|
||||
|
||||
An agent-owned `in_progress` issue is stalled when it has no active run, no queued continuation, and no explicit recovery surface. A still-running but silent process is not automatically stalled; it is handled by the active-run watchdog contract.
|
||||
@@ -202,11 +206,34 @@ A healthy `in_review` issue has at least one valid action path:
|
||||
- a pending issue-thread interaction or linked approval waiting for a named responder
|
||||
- a human owner via `assigneeUserId`
|
||||
- an active run or queued wake that is expected to process the review state
|
||||
- an active one-shot monitor for an external service or async review loop that the assignee owns
|
||||
- an open explicit recovery issue for an ambiguous review handoff
|
||||
|
||||
Agent-assigned `in_review` with no typed participant is only healthy when one of the other paths exists. Assignment to the same agent that produced the handoff is not, by itself, a review path.
|
||||
|
||||
An `in_review` issue is stalled when it has no typed participant, no pending interaction or approval, no user owner, no active run, no queued wake, and no explicit recovery issue. Paperclip should surface that state as recovery work rather than silently completing the issue or leaving blocker chains parked indefinitely.
|
||||
An `in_review` issue is stalled when it has no typed participant, no pending interaction or approval, no user owner, no active monitor, no active run, no queued wake, and no explicit recovery issue. Paperclip should surface that state as recovery work rather than silently completing the issue or leaving blocker chains parked indefinitely.
|
||||
|
||||
### Issue monitors
|
||||
|
||||
An issue monitor is a one-shot deferred action path for agent-owned issues in `in_progress` or `in_review`.
|
||||
|
||||
Use a monitor when the current assignee owns a future check against an async system or external service. Examples include Greptile review loops, GitHub checks, Vercel deployments, or provider jobs where the agent should come back later and decide what happens next.
|
||||
|
||||
Monitor policy lives under `executionPolicy.monitor` and includes:
|
||||
|
||||
- `nextCheckAt`: when Paperclip should wake the assignee
|
||||
- `notes`: non-secret instructions for what the assignee should check
|
||||
- `serviceName`: optional non-secret external-service context
|
||||
- `externalRef`: optional external-service reference input; Paperclip treats it as secret-adjacent, redacts it before persistence/visibility, and omits it from activity and wake payloads
|
||||
- `timeoutAt`, `maxAttempts`, and `recoveryPolicy`: optional recovery hints for bounded waits
|
||||
|
||||
Monitors are not recurring intervals. When a monitor fires, Paperclip clears the scheduled monitor and queues an `issue_monitor_due` wake for the assignee. If the external service is still pending, the assignee must explicitly re-arm the monitor with a new `nextCheckAt`. If the issue moves to `done`, `cancelled`, an invalid status, or a human/unassigned owner, the monitor is cleared.
|
||||
|
||||
Because `serviceName` and `notes` remain visible in issue activity and wake context, operators should keep them short and non-secret. Put enough context for the assignee to know what to inspect, but do not include signed URLs, bearer tokens, customer secrets, tenant-private identifiers, or provider links with embedded credentials.
|
||||
|
||||
Monitor bounds are enforced. Paperclip rejects attempts to re-arm a monitor whose `timeoutAt` or `maxAttempts` is already exhausted. When a scheduled monitor reaches an exhausted bound at trigger time, Paperclip clears it and follows `recoveryPolicy`: `wake_owner` queues a bounded recovery wake for the assignee, `create_recovery_issue` opens visible recovery work, and `escalate_to_board` records a board-visible escalation comment/activity.
|
||||
|
||||
Use `blocked` instead of a monitor when no Paperclip assignee owns a responsible polling path. In that case, name the external owner/action or create first-class recovery/blocker work.
|
||||
|
||||
### `blocked`
|
||||
|
||||
|
||||
@@ -13,7 +13,9 @@ It is intentionally narrower than [PLUGIN_SPEC.md](./PLUGIN_SPEC.md). The spec i
|
||||
- Plugin database migrations are restricted to a host-derived plugin namespace.
|
||||
- Plugin-owned JSON API routes must be declared in the manifest and are mounted
|
||||
only under `/api/plugins/:pluginId/api/*`.
|
||||
- There is no host-provided shared React component kit for plugins yet.
|
||||
- The host provides a small shared React component kit through
|
||||
`@paperclipai/plugin-sdk/ui`; use it for common Paperclip controls before
|
||||
building custom versions.
|
||||
- `ctx.assets` is not supported in the current runtime.
|
||||
|
||||
## Scaffold a plugin
|
||||
@@ -168,6 +170,187 @@ Mount surfaces currently wired in the host include:
|
||||
- `commentAnnotation`
|
||||
- `commentContextMenuItem`
|
||||
|
||||
## Shared host components
|
||||
|
||||
Use shared components from `@paperclipai/plugin-sdk/ui` when the plugin needs a
|
||||
Paperclip-native control. The host owns the implementation, so plugins inherit
|
||||
the board's current styling, ordering, recent selections, and dark-mode behavior
|
||||
without importing `ui/src` internals.
|
||||
|
||||
Currently exposed components include:
|
||||
|
||||
- `MarkdownBlock` and `MarkdownEditor` for rendered and editable markdown.
|
||||
- `FileTree` for serializable file and directory trees.
|
||||
- `IssuesList` for a native company-scoped issue table.
|
||||
- `AssigneePicker` for the same agent/user selector used in the new issue pane.
|
||||
Use the controlled `value` format `agent:<id>`, `user:<id>`, or `""`.
|
||||
- `ProjectPicker` for the same project selector used in the new issue pane.
|
||||
Use the controlled project id value, or `""` for no project.
|
||||
- `ManagedRoutinesList` for plugin-owned routine settings pages.
|
||||
|
||||
```tsx
|
||||
import { AssigneePicker, ProjectPicker } from "@paperclipai/plugin-sdk/ui";
|
||||
|
||||
export function PluginAssignmentControls({ companyId }: { companyId: string }) {
|
||||
const [assignee, setAssignee] = useState("");
|
||||
const [projectId, setProjectId] = useState("");
|
||||
|
||||
return (
|
||||
<>
|
||||
<AssigneePicker
|
||||
companyId={companyId}
|
||||
value={assignee}
|
||||
onChange={(value) => setAssignee(value)}
|
||||
/>
|
||||
<ProjectPicker
|
||||
companyId={companyId}
|
||||
value={projectId}
|
||||
onChange={setProjectId}
|
||||
/>
|
||||
</>
|
||||
);
|
||||
}
|
||||
```
|
||||
|
||||
## File and path UI
|
||||
|
||||
Plugin UI often needs to render a file tree, accept a folder path, or browse a
|
||||
project workspace. There are three different surfaces for that, and they map to
|
||||
different trust and data-flow boundaries. Pick the surface that matches the
|
||||
data the plugin actually has.
|
||||
|
||||
### When to use the shared `FileTree`
|
||||
|
||||
Use `FileTree` from `@paperclipai/plugin-sdk/ui` whenever the plugin only needs
|
||||
to render a serializable file/directory list and react to selection or
|
||||
expand/collapse. The host owns the implementation, so plugin UI inherits the
|
||||
board's icons, indent, focus ring, and dark-mode styling without importing host
|
||||
internals.
|
||||
|
||||
```tsx
|
||||
import {
|
||||
FileTree,
|
||||
type FileTreeNode,
|
||||
} from "@paperclipai/plugin-sdk/ui";
|
||||
|
||||
const nodes: FileTreeNode[] = [
|
||||
{ name: "AGENTS.md", path: "AGENTS.md", kind: "file", children: [] },
|
||||
{
|
||||
name: "wiki",
|
||||
path: "wiki",
|
||||
kind: "dir",
|
||||
children: [
|
||||
{ name: "index.md", path: "wiki/index.md", kind: "file", children: [] },
|
||||
],
|
||||
},
|
||||
];
|
||||
|
||||
export function WikiTree() {
|
||||
const [expanded, setExpanded] = useState<Set<string>>(() => new Set(["wiki"]));
|
||||
const [selected, setSelected] = useState<string | null>(null);
|
||||
|
||||
return (
|
||||
<FileTree
|
||||
nodes={nodes}
|
||||
selectedFile={selected}
|
||||
expandedPaths={expanded}
|
||||
onSelectFile={(path) => setSelected(path)}
|
||||
onToggleDir={(path) =>
|
||||
setExpanded((current) => {
|
||||
const next = new Set(current);
|
||||
next.has(path) ? next.delete(path) : next.add(path);
|
||||
return next;
|
||||
})
|
||||
}
|
||||
/>
|
||||
);
|
||||
}
|
||||
```
|
||||
|
||||
Good fits:
|
||||
|
||||
- LLM Wiki page navigation in `packages/plugins/plugin-llm-wiki` builds a
|
||||
`FileTreeNode[]` from worker query results and renders it through `FileTree`.
|
||||
- The example `plugin-file-browser-example` lazily fetches a directory's
|
||||
children through a `loadFileList` action when `onToggleDir` fires, then
|
||||
merges the children into the local tree state — letting the shared component
|
||||
handle rendering and selection.
|
||||
|
||||
Boundary rules:
|
||||
|
||||
- Keep the prop surface serializable (`nodes`, `expandedPaths`, `checkedPaths`,
|
||||
`fileBadges`, `fileTones`). Do not pass arbitrary render functions across the
|
||||
plugin/host boundary in v1; the supported escape hatches are
|
||||
`fileBadges` (status pill keyed by path) and `fileTones` (row tone keyed by
|
||||
path).
|
||||
- Do not import the host's `FileTree.tsx` or any `ui/src/*` module. The SDK
|
||||
declaration is the only supported import path for plugin UI.
|
||||
- The shared `FileTree` is for rendering and selection. Plugin-specific editors,
|
||||
ingest flows, query forms, and lint runs stay inside the plugin and do not
|
||||
belong as `FileTree` props.
|
||||
|
||||
### When to declare `localFolders`
|
||||
|
||||
When the plugin needs operator-configured filesystem roots — typically for
|
||||
trusted local plugins like wiki tooling — declare `localFolders[]` on the
|
||||
manifest and add the `local.folders` capability. The host renders a settings
|
||||
surface for the operator to set the absolute path, validates the path
|
||||
server-side (containment, symlinks, required files/directories), and exposes
|
||||
`ctx.localFolders.readText()` and `ctx.localFolders.writeTextAtomic()` in the
|
||||
worker.
|
||||
|
||||
```ts
|
||||
export const manifest = {
|
||||
capabilities: ["local.folders"],
|
||||
localFolders: [
|
||||
{
|
||||
folderKey: "content-root",
|
||||
displayName: "Content root",
|
||||
access: "readWrite",
|
||||
requiredDirectories: ["sources", "pages"],
|
||||
requiredFiles: ["schema.md"],
|
||||
},
|
||||
],
|
||||
};
|
||||
```
|
||||
|
||||
Use this when:
|
||||
|
||||
- The data lives outside any project workspace.
|
||||
- Reads and writes need company-scoped configuration.
|
||||
- The operator picks the path once in plugin settings and the worker resolves
|
||||
files relative to that root.
|
||||
|
||||
Do not use `localFolders` to grant the UI direct browser-side access to the
|
||||
filesystem — there is no such capability. The browser still goes through the
|
||||
worker via `getData` / `performAction`, and the worker only exposes paths it
|
||||
chose to expose.
|
||||
|
||||
### When to keep worker-mediated project workspace browsing
|
||||
|
||||
When the data lives inside an existing project workspace, keep the browsing
|
||||
flow worker-mediated:
|
||||
|
||||
- The worker uses `ctx.projects.listWorkspaces()` to resolve the workspace
|
||||
path, then reads its filesystem with normal Node APIs.
|
||||
- The plugin UI calls a `getData` handler for the root listing and an action
|
||||
for lazy children, then renders them through `FileTree`.
|
||||
- The worker is the only side that touches the disk. The browser receives a
|
||||
serializable tree and never sees raw absolute paths it can replay.
|
||||
|
||||
The example `plugin-file-browser-example` is the reference for this pattern:
|
||||
the worker registers `fileList` (data) and `loadFileList` (action) over the
|
||||
same handler, and the UI uses the action for on-toggle directory loading so the
|
||||
shared `FileTree` stays the rendering surface.
|
||||
|
||||
### Mixing surfaces
|
||||
|
||||
A single plugin can use more than one of these. The LLM Wiki uses
|
||||
`localFolders` for its content root, then renders the resulting page list
|
||||
through `FileTree`. The file browser example uses `ctx.projects.listWorkspaces`
|
||||
to pick a workspace and renders its on-disk tree through `FileTree` with lazy
|
||||
loading. Pick the boundary per data source, not per plugin.
|
||||
|
||||
## Company routes
|
||||
|
||||
Plugins may declare a `page` slot with `routePath` to own a company route like:
|
||||
|
||||
@@ -27,7 +27,7 @@ Current limitations to keep in mind:
|
||||
- Published npm packages are the intended install artifact for deployed plugins.
|
||||
- The repo example plugins under `packages/plugins/examples/` are development conveniences. They work from a source checkout and should not be assumed to exist in a generic published build unless they are explicitly shipped with that build.
|
||||
- Dynamic plugin install is not yet cloud-ready for horizontally scaled or ephemeral deployments. There is no shared artifact store, install coordination, or cross-node distribution layer yet.
|
||||
- The current runtime does not yet ship a real host-provided plugin UI component kit, and it does not support plugin asset uploads/reads. Treat those as future-scope ideas in this spec, not current implementation promises.
|
||||
- The current runtime ships a small host-provided plugin UI component kit through `@paperclipai/plugin-sdk/ui`, but does not support plugin asset uploads/reads yet. Treat plugin asset APIs as future-scope ideas, not current implementation promises.
|
||||
- Scoped plugin API routes are JSON-only and must be declared in `apiRoutes`.
|
||||
They mount under `/api/plugins/:pluginId/api/*`; plugins cannot shadow core
|
||||
API routes.
|
||||
@@ -976,13 +976,23 @@ export function DashboardWidget({ context }: PluginWidgetProps) {
|
||||
|
||||
The SDK includes a `ui` subpath export that plugin frontends import. This subpath provides:
|
||||
|
||||
- **Bridge hooks**: `usePluginData(key, params)`, `usePluginAction(key)`, `useHostContext()`
|
||||
- **Bridge hooks**: `usePluginData(key, params)`, `usePluginAction(key)`, `useHostContext()`, `useHostNavigation()`
|
||||
- **Design tokens**: colors, spacing, typography, shadows matching the host theme
|
||||
- **Shared components**: `MetricCard`, `StatusBadge`, `DataTable`, `LogView`, `ActionBar`, `Spinner`, etc.
|
||||
- **Type definitions**: `PluginPageProps`, `PluginWidgetProps`, `PluginDetailTabProps`
|
||||
|
||||
Plugins are encouraged but not required to use the shared components. A plugin may render entirely custom UI as long as it communicates through the bridge.
|
||||
|
||||
`useHostNavigation()` is the supported way for plugin UI to navigate to
|
||||
Paperclip-internal pages. It exposes `resolveHref(to)`, `navigate(to,
|
||||
options?)`, and `linkProps(to, options?)`. Plugin links should prefer
|
||||
`linkProps()` so anchors keep real `href` values for copy-link, modifier-click,
|
||||
middle-click, and open-in-new-tab behavior while plain left-clicks route through
|
||||
the host SPA router. The host resolves company-scoped paths against the active
|
||||
company prefix without double-prefixing already-prefixed paths. Plugin UI should
|
||||
not use raw same-origin `href`s or `window.location.assign()` for internal
|
||||
Paperclip navigation because those can force a full document reload.
|
||||
|
||||
### 19.0.2 Bundle Isolation
|
||||
|
||||
Plugin UI bundles are loaded as standard ES modules, not iframed. This gives plugins full rendering performance and access to the host's design tokens.
|
||||
@@ -1062,6 +1072,11 @@ The host SDK ships shared components that plugins can import to quickly build UI
|
||||
| `LogView` | Scrollable log output with timestamps | Webhook deliveries, job output, process logs |
|
||||
| `JsonTree` | Collapsible JSON tree for debugging | Raw API responses, plugin state inspection |
|
||||
| `Spinner` | Loading indicator | Data fetch states |
|
||||
| `FileTree` | Host-styled file/directory tree | Wiki pages, workspace files, import previews |
|
||||
| `IssuesList` | Host issue list | Plugin pages that need a native issue view |
|
||||
| `AssigneePicker` | Host assignee picker for agents and board users | Creating issues, assigning routines, filtering work |
|
||||
| `ProjectPicker` | Host project picker | Creating issues, scoping dashboards, filtering work |
|
||||
| `ManagedRoutinesList` | Host routine list | Plugin settings pages that manage routines |
|
||||
|
||||
Plugins may also use entirely custom components. The shared components exist to reduce boilerplate and keep visual consistency, not to limit what plugins can render.
|
||||
|
||||
|
||||
BIN
docs/pr-screenshots/pap-2837/newissue-cheap-desktop.png
Normal file
|
After Width: | Height: | Size: 182 KiB |
BIN
docs/pr-screenshots/pap-2837/newissue-cheap-mobile.png
Normal file
|
After Width: | Height: | Size: 108 KiB |
BIN
docs/pr-screenshots/pap-2837/newissue-custom-desktop.png
Normal file
|
After Width: | Height: | Size: 191 KiB |
BIN
docs/pr-screenshots/pap-2837/newissue-custom-mobile.png
Normal file
|
After Width: | Height: | Size: 121 KiB |
BIN
docs/pr-screenshots/pap-2837/newissue-primary-desktop.png
Normal file
|
After Width: | Height: | Size: 183 KiB |
BIN
docs/pr-screenshots/pap-2837/newissue-primary-mobile.png
Normal file
|
After Width: | Height: | Size: 105 KiB |
BIN
docs/pr-screenshots/pap-2837/newissue-unsupported-desktop.png
Normal file
|
After Width: | Height: | Size: 188 KiB |
BIN
docs/pr-screenshots/pap-2837/newissue-unsupported-mobile.png
Normal file
|
After Width: | Height: | Size: 106 KiB |
|
After Width: | Height: | Size: 335 KiB |
BIN
docs/pr-screenshots/pap-2837/runledger-profile-badges-mobile.png
Normal file
|
After Width: | Height: | Size: 151 KiB |
BIN
docs/pr-screenshots/pap-2944/skills-claude-dark.png
Normal file
|
After Width: | Height: | Size: 74 KiB |
BIN
docs/pr-screenshots/pap-2944/skills-claude-light.png
Normal file
|
After Width: | Height: | Size: 74 KiB |
BIN
docs/pr-screenshots/pap-2944/skills-codex-dark.png
Normal file
|
After Width: | Height: | Size: 74 KiB |
BIN
docs/pr-screenshots/pap-2944/skills-codex-light.png
Normal file
|
After Width: | Height: | Size: 74 KiB |
BIN
docs/pr-screenshots/pap-2944/skills-custom-dark.png
Normal file
|
After Width: | Height: | Size: 88 KiB |
BIN
docs/pr-screenshots/pap-2944/skills-custom-light.png
Normal file
|
After Width: | Height: | Size: 87 KiB |
BIN
docs/pr-screenshots/pap-2944/skills-empty-library-dark.png
Normal file
|
After Width: | Height: | Size: 41 KiB |
BIN
docs/pr-screenshots/pap-2944/skills-empty-library-light.png
Normal file
|
After Width: | Height: | Size: 40 KiB |
BIN
docs/pr-screenshots/pap-2944/skills-loading-dark.png
Normal file
|
After Width: | Height: | Size: 36 KiB |
BIN
docs/pr-screenshots/pap-2944/skills-loading-light.png
Normal file
|
After Width: | Height: | Size: 36 KiB |
BIN
docs/pr-screenshots/pap-2945/monitor-surfaces.png
Normal file
|
After Width: | Height: | Size: 180 KiB |
BIN
docs/pr-screenshots/pr-5291/after-issue-management.png
Normal file
|
After Width: | Height: | Size: 701 KiB |
BIN
docs/pr-screenshots/pr-5291/after-navigation-layout.png
Normal file
|
After Width: | Height: | Size: 316 KiB |
BIN
docs/pr-screenshots/pr-5291/after-projects-workspaces.png
Normal file
|
After Width: | Height: | Size: 694 KiB |
BIN
docs/pr-screenshots/pr-5291/after-status-language.png
Normal file
|
After Width: | Height: | Size: 546 KiB |
BIN
docs/pr-screenshots/pr-5291/before-issue-management.png
Normal file
|
After Width: | Height: | Size: 701 KiB |
BIN
docs/pr-screenshots/pr-5291/before-navigation-layout.png
Normal file
|
After Width: | Height: | Size: 316 KiB |
BIN
docs/pr-screenshots/pr-5291/before-projects-workspaces.png
Normal file
|
After Width: | Height: | Size: 694 KiB |
@@ -15,9 +15,12 @@
|
||||
"build-storybook": "pnpm --filter @paperclipai/ui build-storybook",
|
||||
"build": "pnpm run preflight:workspace-links && pnpm -r build",
|
||||
"typecheck": "pnpm run preflight:workspace-links && pnpm -r typecheck",
|
||||
"typecheck:build-gaps": "pnpm run preflight:workspace-links && node scripts/run-typecheck-build-gaps.mjs",
|
||||
"test": "pnpm run test:run",
|
||||
"test:watch": "pnpm run preflight:workspace-links && vitest",
|
||||
"test:run": "pnpm run preflight:workspace-links && node scripts/run-vitest-stable.mjs",
|
||||
"test:run:general": "pnpm run preflight:workspace-links && pnpm --filter @paperclipai/plugin-sdk build && node scripts/run-vitest-stable.mjs --mode general",
|
||||
"test:run:serialized": "pnpm run preflight:workspace-links && pnpm --filter @paperclipai/plugin-sdk build && node scripts/run-vitest-stable.mjs --mode serialized",
|
||||
"db:generate": "pnpm --filter @paperclipai/db generate",
|
||||
"db:migrate": "pnpm --filter @paperclipai/db migrate",
|
||||
"issue-references:backfill": "pnpm run preflight:workspace-links && tsx scripts/backfill-issue-reference-mentions.ts",
|
||||
@@ -30,18 +33,22 @@
|
||||
"release:stable": "./scripts/release.sh stable",
|
||||
"release:github": "./scripts/create-github-release.sh",
|
||||
"release:rollback": "./scripts/rollback-latest.sh",
|
||||
"release:bootstrap-package": "node scripts/bootstrap-npm-package.mjs",
|
||||
"check:tokens": "node scripts/check-forbidden-tokens.mjs",
|
||||
"docs:dev": "cd docs && npx mintlify dev",
|
||||
"smoke:openclaw-join": "./scripts/smoke/openclaw-join.sh",
|
||||
"smoke:openclaw-docker-ui": "./scripts/smoke/openclaw-docker-ui.sh",
|
||||
"smoke:openclaw-sse-standalone": "./scripts/smoke/openclaw-sse-standalone.sh",
|
||||
"smoke:terminal-bench-loop-skill": "node scripts/smoke/terminal-bench-loop-skill-smoke.mjs",
|
||||
"test:release-registry": "node --test scripts/verify-release-registry-state.test.mjs scripts/release-package-map.test.mjs scripts/check-release-package-bootstrap.test.mjs",
|
||||
"test:e2e": "npx playwright test --config tests/e2e/playwright.config.ts",
|
||||
"test:e2e:headed": "npx playwright test --config tests/e2e/playwright.config.ts --headed",
|
||||
"test:e2e:multiuser-authenticated": "npx playwright test --config tests/e2e/playwright-multiuser-authenticated.config.ts",
|
||||
"evals:smoke": "cd evals/promptfoo && npx promptfoo@0.103.3 eval",
|
||||
"test:release-smoke": "npx playwright test --config tests/release-smoke/playwright.config.ts",
|
||||
"test:release-smoke:headed": "npx playwright test --config tests/release-smoke/playwright.config.ts --headed",
|
||||
"metrics:paperclip-commits": "tsx scripts/paperclip-commit-metrics.ts"
|
||||
"metrics:paperclip-commits": "tsx scripts/paperclip-commit-metrics.ts",
|
||||
"perf:issue-chat-long-thread": "node scripts/measure-issue-chat-long-thread.mjs"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@playwright/test": "^1.58.2",
|
||||
|
||||
134
packages/adapter-utils/src/command-managed-runtime.test.ts
Normal file
@@ -0,0 +1,134 @@
|
||||
import { mkdir, mkdtemp, readFile, rm, writeFile } from "node:fs/promises";
|
||||
import os from "node:os";
|
||||
import path from "node:path";
|
||||
import { execFile as execFileCallback } from "node:child_process";
|
||||
import { promisify } from "node:util";
|
||||
import { afterEach, describe, expect, it } from "vitest";
|
||||
|
||||
import { prepareCommandManagedRuntime } from "./command-managed-runtime.js";
|
||||
import type { RunProcessResult } from "./server-utils.js";
|
||||
|
||||
const execFile = promisify(execFileCallback);
|
||||
|
||||
describe("command managed runtime", () => {
|
||||
const cleanupDirs: string[] = [];
|
||||
|
||||
afterEach(async () => {
|
||||
while (cleanupDirs.length > 0) {
|
||||
const dir = cleanupDirs.pop();
|
||||
if (!dir) continue;
|
||||
await rm(dir, { recursive: true, force: true }).catch(() => undefined);
|
||||
}
|
||||
});
|
||||
|
||||
it("keeps the runtime overlay out of sandbox workspace sync by default", async () => {
|
||||
const rootDir = await mkdtemp(path.join(os.tmpdir(), "paperclip-command-runtime-"));
|
||||
cleanupDirs.push(rootDir);
|
||||
|
||||
const localWorkspaceDir = path.join(rootDir, "local-workspace");
|
||||
const remoteWorkspaceDir = path.join(rootDir, "remote-workspace");
|
||||
await mkdir(path.join(localWorkspaceDir, ".paperclip-runtime"), { recursive: true });
|
||||
await mkdir(remoteWorkspaceDir, { recursive: true });
|
||||
await writeFile(path.join(localWorkspaceDir, "README.md"), "local workspace\n", "utf8");
|
||||
await writeFile(path.join(localWorkspaceDir, ".paperclip-runtime", "state.json"), "{\"keep\":true}\n", "utf8");
|
||||
|
||||
const calls: Array<{
|
||||
command: string;
|
||||
args?: string[];
|
||||
cwd?: string;
|
||||
env?: Record<string, string>;
|
||||
stdin?: string;
|
||||
timeoutMs?: number;
|
||||
}> = [];
|
||||
const runner = {
|
||||
execute: async (input: {
|
||||
command: string;
|
||||
args?: string[];
|
||||
cwd?: string;
|
||||
env?: Record<string, string>;
|
||||
stdin?: string;
|
||||
timeoutMs?: number;
|
||||
}): Promise<RunProcessResult> => {
|
||||
calls.push({ ...input });
|
||||
const startedAt = new Date().toISOString();
|
||||
const env = {
|
||||
...process.env,
|
||||
...input.env,
|
||||
};
|
||||
const command =
|
||||
input.command === "sh" ? "/bin/sh" : input.command === "bash" ? "/bin/bash" : input.command;
|
||||
const args = [...(input.args ?? [])];
|
||||
if (
|
||||
input.stdin != null &&
|
||||
(input.command === "sh" || input.command === "bash") &&
|
||||
args[0] === "-lc" &&
|
||||
typeof args[1] === "string"
|
||||
) {
|
||||
env.PAPERCLIP_TEST_STDIN = input.stdin;
|
||||
args[1] = `printf '%s' \"$PAPERCLIP_TEST_STDIN\" | (${args[1]})`;
|
||||
}
|
||||
try {
|
||||
const result = await execFile(command, args, {
|
||||
cwd: input.cwd,
|
||||
env,
|
||||
maxBuffer: 32 * 1024 * 1024,
|
||||
timeout: input.timeoutMs,
|
||||
});
|
||||
return {
|
||||
exitCode: 0,
|
||||
signal: null,
|
||||
timedOut: false,
|
||||
stdout: result.stdout,
|
||||
stderr: result.stderr,
|
||||
pid: null,
|
||||
startedAt,
|
||||
};
|
||||
} catch (error) {
|
||||
const err = error as NodeJS.ErrnoException & {
|
||||
stdout?: string;
|
||||
stderr?: string;
|
||||
code?: string | number | null;
|
||||
signal?: NodeJS.Signals | null;
|
||||
killed?: boolean;
|
||||
};
|
||||
return {
|
||||
exitCode: typeof err.code === "number" ? err.code : null,
|
||||
signal: err.signal ?? null,
|
||||
timedOut: Boolean(err.killed && input.timeoutMs),
|
||||
stdout: err.stdout ?? "",
|
||||
stderr: err.stderr ?? "",
|
||||
pid: null,
|
||||
startedAt,
|
||||
};
|
||||
}
|
||||
},
|
||||
};
|
||||
|
||||
const prepared = await prepareCommandManagedRuntime({
|
||||
runner,
|
||||
spec: {
|
||||
remoteCwd: remoteWorkspaceDir,
|
||||
timeoutMs: 30_000,
|
||||
},
|
||||
adapterKey: "claude",
|
||||
workspaceLocalDir: localWorkspaceDir,
|
||||
});
|
||||
|
||||
await expect(readFile(path.join(remoteWorkspaceDir, "README.md"), "utf8")).resolves.toBe("local workspace\n");
|
||||
await expect(readFile(path.join(remoteWorkspaceDir, ".paperclip-runtime", "state.json"), "utf8")).rejects
|
||||
.toMatchObject({ code: "ENOENT" });
|
||||
expect(calls.every((call) => call.stdin == null)).toBe(true);
|
||||
|
||||
await mkdir(path.join(remoteWorkspaceDir, ".paperclip-runtime"), { recursive: true });
|
||||
await writeFile(path.join(remoteWorkspaceDir, "README.md"), "remote workspace\n", "utf8");
|
||||
await writeFile(path.join(remoteWorkspaceDir, ".paperclip-runtime", "remote-state.json"), "{\"remote\":true}\n", "utf8");
|
||||
await prepared.restoreWorkspace();
|
||||
|
||||
await expect(readFile(path.join(localWorkspaceDir, "README.md"), "utf8")).resolves.toBe("remote workspace\n");
|
||||
await expect(readFile(path.join(localWorkspaceDir, ".paperclip-runtime", "state.json"), "utf8")).resolves
|
||||
.toBe("{\"keep\":true}\n");
|
||||
await expect(readFile(path.join(localWorkspaceDir, ".paperclip-runtime", "remote-state.json"), "utf8")).rejects
|
||||
.toMatchObject({ code: "ENOENT" });
|
||||
expect(calls.every((call) => call.stdin == null)).toBe(true);
|
||||
});
|
||||
});
|
||||
@@ -6,6 +6,7 @@ import {
|
||||
type SandboxManagedRuntimeClient,
|
||||
type SandboxRemoteExecutionSpec,
|
||||
} from "./sandbox-managed-runtime.js";
|
||||
import { preferredShellForSandbox } from "./sandbox-shell.js";
|
||||
import type { RunProcessResult } from "./server-utils.js";
|
||||
|
||||
export interface CommandManagedRuntimeRunner {
|
||||
@@ -23,10 +24,10 @@ export interface CommandManagedRuntimeRunner {
|
||||
|
||||
export interface CommandManagedRuntimeSpec {
|
||||
providerKey?: string | null;
|
||||
shellCommand?: "bash" | "sh" | null;
|
||||
leaseId?: string | null;
|
||||
remoteCwd: string;
|
||||
timeoutMs?: number | null;
|
||||
paperclipApiUrl?: string | null;
|
||||
}
|
||||
|
||||
export type CommandManagedRuntimeAsset = SandboxManagedRuntimeAsset;
|
||||
@@ -35,6 +36,12 @@ function shellQuote(value: string) {
|
||||
return `'${value.replace(/'/g, `'"'"'`)}'`;
|
||||
}
|
||||
|
||||
function mergeRuntimeExcludes(entries: string[] | undefined): string[] {
|
||||
return [...new Set([".paperclip-runtime", ...(entries ?? [])])];
|
||||
}
|
||||
|
||||
const REMOTE_WRITE_BASE64_CHUNK_SIZE = 32 * 1024;
|
||||
|
||||
function toBuffer(bytes: Buffer | Uint8Array | ArrayBuffer): Buffer {
|
||||
if (Buffer.isBuffer(bytes)) return bytes;
|
||||
if (bytes instanceof ArrayBuffer) return Buffer.from(bytes);
|
||||
@@ -48,14 +55,16 @@ function requireSuccessfulResult(result: RunProcessResult, action: string): void
|
||||
throw new Error(`${action} failed with exit code ${result.exitCode ?? "null"}${detail}`);
|
||||
}
|
||||
|
||||
function createCommandManagedRuntimeClient(input: {
|
||||
export function createCommandManagedRuntimeClient(input: {
|
||||
runner: CommandManagedRuntimeRunner;
|
||||
remoteCwd: string;
|
||||
timeoutMs: number;
|
||||
shellCommand?: "bash" | "sh" | null;
|
||||
}): SandboxManagedRuntimeClient {
|
||||
const shellCommand = preferredShellForSandbox(input.shellCommand);
|
||||
const runShell = async (script: string, opts: { stdin?: string; timeoutMs?: number } = {}) => {
|
||||
const result = await input.runner.execute({
|
||||
command: "sh",
|
||||
command: shellCommand,
|
||||
args: ["-lc", script],
|
||||
cwd: input.remoteCwd,
|
||||
stdin: opts.stdin,
|
||||
@@ -71,18 +80,42 @@ function createCommandManagedRuntimeClient(input: {
|
||||
},
|
||||
writeFile: async (remotePath, bytes) => {
|
||||
const body = toBuffer(bytes).toString("base64");
|
||||
const remoteDir = path.posix.dirname(remotePath);
|
||||
const remoteTempPath = `${remotePath}.paperclip-upload.b64`;
|
||||
|
||||
await runShell(
|
||||
`mkdir -p ${shellQuote(path.posix.dirname(remotePath))} && base64 -d > ${shellQuote(remotePath)}`,
|
||||
{ stdin: body },
|
||||
`mkdir -p ${shellQuote(remoteDir)} && rm -f ${shellQuote(remoteTempPath)} && : > ${shellQuote(remoteTempPath)}`,
|
||||
);
|
||||
for (let offset = 0; offset < body.length; offset += REMOTE_WRITE_BASE64_CHUNK_SIZE) {
|
||||
const chunk = body.slice(offset, offset + REMOTE_WRITE_BASE64_CHUNK_SIZE);
|
||||
await runShell(`printf '%s' ${shellQuote(chunk)} >> ${shellQuote(remoteTempPath)}`);
|
||||
}
|
||||
await runShell(
|
||||
`base64 -d < ${shellQuote(remoteTempPath)} > ${shellQuote(remotePath)} && rm -f ${shellQuote(remoteTempPath)}`,
|
||||
);
|
||||
},
|
||||
readFile: async (remotePath) => {
|
||||
const result = await runShell(`base64 < ${shellQuote(remotePath)}`);
|
||||
return Buffer.from(result.stdout.replace(/\s+/g, ""), "base64");
|
||||
},
|
||||
listFiles: async (remotePath) => {
|
||||
const result = await runShell(
|
||||
`if [ -d ${shellQuote(remotePath)} ]; then ` +
|
||||
`for entry in ${shellQuote(remotePath)}/*; do ` +
|
||||
`[ -f "$entry" ] || continue; ` +
|
||||
`basename "$entry"; ` +
|
||||
`done; ` +
|
||||
`fi`,
|
||||
);
|
||||
return result.stdout
|
||||
.split(/\r?\n/)
|
||||
.map((entry) => entry.trim())
|
||||
.filter((entry) => entry.length > 0)
|
||||
.sort((left, right) => left.localeCompare(right));
|
||||
},
|
||||
remove: async (remotePath) => {
|
||||
const result = await input.runner.execute({
|
||||
command: "sh",
|
||||
command: shellCommand,
|
||||
args: ["-lc", `rm -rf ${shellQuote(remotePath)}`],
|
||||
cwd: input.remoteCwd,
|
||||
timeoutMs: input.timeoutMs,
|
||||
@@ -91,7 +124,7 @@ function createCommandManagedRuntimeClient(input: {
|
||||
},
|
||||
run: async (command, options) => {
|
||||
const result = await input.runner.execute({
|
||||
command: "sh",
|
||||
command: shellCommand,
|
||||
args: ["-lc", command],
|
||||
cwd: input.remoteCwd,
|
||||
timeoutMs: options.timeoutMs,
|
||||
@@ -121,17 +154,18 @@ export async function prepareCommandManagedRuntime(input: {
|
||||
remoteCwd: workspaceRemoteDir,
|
||||
timeoutMs,
|
||||
apiKey: null,
|
||||
paperclipApiUrl: input.spec.paperclipApiUrl ?? null,
|
||||
};
|
||||
const client = createCommandManagedRuntimeClient({
|
||||
runner: input.runner,
|
||||
remoteCwd: workspaceRemoteDir,
|
||||
timeoutMs,
|
||||
shellCommand: input.spec.shellCommand,
|
||||
});
|
||||
const shellCommand = preferredShellForSandbox(input.spec.shellCommand);
|
||||
|
||||
if (input.installCommand?.trim()) {
|
||||
const result = await input.runner.execute({
|
||||
command: "sh",
|
||||
command: shellCommand,
|
||||
args: ["-lc", input.installCommand.trim()],
|
||||
cwd: workspaceRemoteDir,
|
||||
timeoutMs,
|
||||
@@ -145,7 +179,7 @@ export async function prepareCommandManagedRuntime(input: {
|
||||
adapterKey: input.adapterKey,
|
||||
workspaceLocalDir: input.workspaceLocalDir,
|
||||
workspaceRemoteDir,
|
||||
workspaceExclude: input.workspaceExclude,
|
||||
workspaceExclude: mergeRuntimeExcludes(input.workspaceExclude),
|
||||
preserveAbsentOnRestore: input.preserveAbsentOnRestore,
|
||||
assets: input.assets,
|
||||
});
|
||||
|
||||
21
packages/adapter-utils/src/command-redaction.ts
Normal file
@@ -0,0 +1,21 @@
|
||||
export const REDACTED_COMMAND_TEXT_VALUE = "***REDACTED***";
|
||||
|
||||
const COMMAND_CLI_SECRET_OPTION_RE =
|
||||
/(\B-{1,2}(?:api[-_]?key|(?:access[-_]?|auth[-_]?)?token|token|authorization|bearer|secret|passwd|password|credential|jwt|private[-_]?key|cookie|connectionstring)(?:\s+|=)(["']?))[^\s"'`]+(\2)/gi;
|
||||
const COMMAND_ENV_SECRET_ASSIGNMENT_RE =
|
||||
/(\b[A-Za-z0-9_]*(?:TOKEN|KEY|SECRET|PASSWORD|PASSWD|AUTHORIZATION|JWT)[A-Za-z0-9_]*\s*=\s*)[^\s"'`]+/gi;
|
||||
const COMMAND_AUTHORIZATION_BEARER_RE = /(\bAuthorization\s*:\s*Bearer\s+)[^\s"'`]+/gi;
|
||||
const COMMAND_OPENAI_KEY_RE = /\bsk-[A-Za-z0-9_-]{12,}\b/g;
|
||||
const COMMAND_GITHUB_TOKEN_RE = /\bgh[pousr]_[A-Za-z0-9_]{20,}\b/g;
|
||||
const COMMAND_JWT_RE =
|
||||
/\b[A-Za-z0-9_-]{8,}\.[A-Za-z0-9_-]{8,}\.[A-Za-z0-9_-]{8,}(?:\.[A-Za-z0-9_-]{8,})?\b/g;
|
||||
|
||||
export function redactCommandText(command: string, redactedValue = REDACTED_COMMAND_TEXT_VALUE): string {
|
||||
return command
|
||||
.replace(COMMAND_AUTHORIZATION_BEARER_RE, `$1${redactedValue}`)
|
||||
.replace(COMMAND_CLI_SECRET_OPTION_RE, `$1${redactedValue}$3`)
|
||||
.replace(COMMAND_ENV_SECRET_ASSIGNMENT_RE, `$1${redactedValue}`)
|
||||
.replace(COMMAND_OPENAI_KEY_RE, redactedValue)
|
||||
.replace(COMMAND_GITHUB_TOKEN_RE, redactedValue)
|
||||
.replace(COMMAND_JWT_RE, redactedValue);
|
||||
}
|
||||
@@ -1,14 +1,61 @@
|
||||
import { describe, expect, it, vi } from "vitest";
|
||||
import { createServer } from "node:http";
|
||||
import { mkdir, mkdtemp, rm } from "node:fs/promises";
|
||||
import os from "node:os";
|
||||
import path from "node:path";
|
||||
import { afterEach, describe, expect, it, vi } from "vitest";
|
||||
|
||||
import {
|
||||
adapterExecutionTargetSessionIdentity,
|
||||
adapterExecutionTargetToRemoteSpec,
|
||||
adapterExecutionTargetUsesPaperclipBridge,
|
||||
runAdapterExecutionTargetProcess,
|
||||
runAdapterExecutionTargetShellCommand,
|
||||
startAdapterExecutionTargetPaperclipBridge,
|
||||
type AdapterSandboxExecutionTarget,
|
||||
} from "./execution-target.js";
|
||||
import { runChildProcess } from "./server-utils.js";
|
||||
|
||||
describe("sandbox adapter execution targets", () => {
|
||||
const cleanupDirs: string[] = [];
|
||||
|
||||
afterEach(async () => {
|
||||
while (cleanupDirs.length > 0) {
|
||||
const dir = cleanupDirs.pop();
|
||||
if (!dir) continue;
|
||||
await rm(dir, { recursive: true, force: true }).catch(() => undefined);
|
||||
}
|
||||
});
|
||||
|
||||
function createLocalSandboxRunner() {
|
||||
let counter = 0;
|
||||
return {
|
||||
execute: async (input: {
|
||||
command: string;
|
||||
args?: string[];
|
||||
cwd?: string;
|
||||
env?: Record<string, string>;
|
||||
stdin?: string;
|
||||
timeoutMs?: number;
|
||||
onLog?: (stream: "stdout" | "stderr", chunk: string) => Promise<void>;
|
||||
onSpawn?: (meta: { pid: number; startedAt: string }) => Promise<void>;
|
||||
}) => {
|
||||
counter += 1;
|
||||
const command = input.command === "bash" ? "/bin/bash" : input.command;
|
||||
return runChildProcess(`sandbox-run-${counter}`, command, input.args ?? [], {
|
||||
cwd: input.cwd ?? process.cwd(),
|
||||
env: input.env ?? {},
|
||||
stdin: input.stdin,
|
||||
timeoutSec: Math.max(1, Math.ceil((input.timeoutMs ?? 30_000) / 1000)),
|
||||
graceSec: 5,
|
||||
onLog: input.onLog ?? (async () => {}),
|
||||
onSpawn: input.onSpawn
|
||||
? async (meta) => input.onSpawn?.({ pid: meta.pid, startedAt: meta.startedAt })
|
||||
: undefined,
|
||||
});
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
it("executes through the provider-neutral runner without a remote spec", async () => {
|
||||
const runner = {
|
||||
execute: vi.fn(async () => ({
|
||||
@@ -93,4 +140,214 @@ describe("sandbox adapter execution targets", () => {
|
||||
timeoutMs: 7000,
|
||||
}));
|
||||
});
|
||||
|
||||
it("treats SSH targets as bridge-only", () => {
|
||||
const target = {
|
||||
kind: "remote" as const,
|
||||
transport: "ssh" as const,
|
||||
remoteCwd: "/workspace",
|
||||
spec: {
|
||||
host: "ssh.example.test",
|
||||
port: 22,
|
||||
username: "paperclip",
|
||||
remoteWorkspacePath: "/workspace",
|
||||
remoteCwd: "/workspace",
|
||||
privateKey: null,
|
||||
knownHosts: null,
|
||||
strictHostKeyChecking: true,
|
||||
},
|
||||
};
|
||||
|
||||
expect(adapterExecutionTargetUsesPaperclipBridge(target)).toBe(true);
|
||||
expect(adapterExecutionTargetSessionIdentity(target)).toEqual({
|
||||
transport: "ssh",
|
||||
host: "ssh.example.test",
|
||||
port: 22,
|
||||
username: "paperclip",
|
||||
remoteCwd: "/workspace",
|
||||
});
|
||||
});
|
||||
|
||||
it("uses the provider-declared shell for sandbox helper commands", async () => {
|
||||
const runner = {
|
||||
execute: vi.fn(async () => ({
|
||||
exitCode: 0,
|
||||
signal: null,
|
||||
timedOut: false,
|
||||
stdout: "/home/sandbox",
|
||||
stderr: "",
|
||||
pid: null,
|
||||
startedAt: new Date().toISOString(),
|
||||
})),
|
||||
};
|
||||
const target: AdapterSandboxExecutionTarget = {
|
||||
kind: "remote",
|
||||
transport: "sandbox",
|
||||
providerKey: "custom-provider",
|
||||
shellCommand: "bash",
|
||||
remoteCwd: "/workspace",
|
||||
runner,
|
||||
};
|
||||
|
||||
await runAdapterExecutionTargetShellCommand("run-2b", target, 'printf %s "$HOME"', {
|
||||
cwd: "/local/workspace",
|
||||
env: {},
|
||||
timeoutSec: 7,
|
||||
});
|
||||
|
||||
expect(runner.execute).toHaveBeenCalledWith(expect.objectContaining({
|
||||
command: "bash",
|
||||
args: ["-lc", 'printf %s "$HOME"'],
|
||||
cwd: "/workspace",
|
||||
timeoutMs: 7000,
|
||||
}));
|
||||
});
|
||||
|
||||
it("starts a localhost Paperclip bridge for sandbox targets in bridge mode", async () => {
|
||||
const rootDir = await mkdtemp(path.join(os.tmpdir(), "paperclip-execution-target-bridge-"));
|
||||
cleanupDirs.push(rootDir);
|
||||
const remoteCwd = path.join(rootDir, "workspace");
|
||||
const runtimeRootDir = path.join(remoteCwd, ".paperclip-runtime", "codex");
|
||||
await mkdir(runtimeRootDir, { recursive: true });
|
||||
|
||||
const requests: Array<{ method: string; url: string; auth: string | null; runId: string | null }> = [];
|
||||
const apiServer = createServer((req, res) => {
|
||||
requests.push({
|
||||
method: req.method ?? "GET",
|
||||
url: req.url ?? "/",
|
||||
auth: req.headers.authorization ?? null,
|
||||
runId: typeof req.headers["x-paperclip-run-id"] === "string" ? req.headers["x-paperclip-run-id"] : null,
|
||||
});
|
||||
res.writeHead(200, { "content-type": "application/json" });
|
||||
res.end(JSON.stringify({ ok: true }));
|
||||
});
|
||||
await new Promise<void>((resolve, reject) => {
|
||||
apiServer.once("error", reject);
|
||||
apiServer.listen(0, "127.0.0.1", () => resolve());
|
||||
});
|
||||
const address = apiServer.address();
|
||||
if (!address || typeof address === "string") {
|
||||
throw new Error("Expected the bridge test API server to listen on a TCP port.");
|
||||
}
|
||||
|
||||
const target: AdapterSandboxExecutionTarget = {
|
||||
kind: "remote",
|
||||
transport: "sandbox",
|
||||
providerKey: "e2b",
|
||||
environmentId: "env-1",
|
||||
leaseId: "lease-1",
|
||||
remoteCwd,
|
||||
runner: createLocalSandboxRunner(),
|
||||
timeoutMs: 30_000,
|
||||
};
|
||||
|
||||
const bridge = await startAdapterExecutionTargetPaperclipBridge({
|
||||
runId: "run-bridge",
|
||||
target,
|
||||
runtimeRootDir,
|
||||
adapterKey: "codex",
|
||||
hostApiToken: "real-run-jwt",
|
||||
hostApiUrl: `http://127.0.0.1:${address.port}`,
|
||||
});
|
||||
try {
|
||||
expect(bridge).not.toBeNull();
|
||||
expect(bridge?.env.PAPERCLIP_API_URL).toMatch(/^http:\/\/127\.0\.0\.1:\d+$/);
|
||||
expect(bridge?.env.PAPERCLIP_API_KEY).not.toBe("real-run-jwt");
|
||||
expect(bridge?.env.PAPERCLIP_API_BRIDGE_MODE).toBe("queue_v1");
|
||||
|
||||
const response = await fetch(`${bridge!.env.PAPERCLIP_API_URL}/api/agents/me`, {
|
||||
headers: {
|
||||
authorization: `Bearer ${bridge!.env.PAPERCLIP_API_KEY}`,
|
||||
accept: "application/json",
|
||||
},
|
||||
});
|
||||
|
||||
expect(response.status).toBe(200);
|
||||
expect(await response.json()).toEqual({ ok: true });
|
||||
expect(requests).toEqual([{
|
||||
method: "GET",
|
||||
url: "/api/agents/me",
|
||||
auth: "Bearer real-run-jwt",
|
||||
runId: "run-bridge",
|
||||
}]);
|
||||
} finally {
|
||||
await bridge?.stop();
|
||||
await new Promise<void>((resolve) => apiServer.close(() => resolve()));
|
||||
}
|
||||
});
|
||||
|
||||
it("fails oversized host responses with a 502 before returning them to the sandbox client", async () => {
|
||||
const rootDir = await mkdtemp(path.join(os.tmpdir(), "paperclip-execution-target-bridge-limit-"));
|
||||
cleanupDirs.push(rootDir);
|
||||
const remoteCwd = path.join(rootDir, "workspace");
|
||||
const runtimeRootDir = path.join(remoteCwd, ".paperclip-runtime", "codex");
|
||||
await mkdir(runtimeRootDir, { recursive: true });
|
||||
|
||||
const requests: Array<{ method: string; url: string; auth: string | null; runId: string | null }> = [];
|
||||
const largeBody = "x".repeat(64);
|
||||
const apiServer = createServer((req, res) => {
|
||||
requests.push({
|
||||
method: req.method ?? "GET",
|
||||
url: req.url ?? "/",
|
||||
auth: req.headers.authorization ?? null,
|
||||
runId: typeof req.headers["x-paperclip-run-id"] === "string" ? req.headers["x-paperclip-run-id"] : null,
|
||||
});
|
||||
res.writeHead(200, {
|
||||
"content-type": "application/json",
|
||||
"content-length": String(Buffer.byteLength(largeBody, "utf8")),
|
||||
});
|
||||
res.end(largeBody);
|
||||
});
|
||||
await new Promise<void>((resolve, reject) => {
|
||||
apiServer.once("error", reject);
|
||||
apiServer.listen(0, "127.0.0.1", () => resolve());
|
||||
});
|
||||
const address = apiServer.address();
|
||||
if (!address || typeof address === "string") {
|
||||
throw new Error("Expected the bridge test API server to listen on a TCP port.");
|
||||
}
|
||||
|
||||
const target: AdapterSandboxExecutionTarget = {
|
||||
kind: "remote",
|
||||
transport: "sandbox",
|
||||
providerKey: "e2b",
|
||||
environmentId: "env-1",
|
||||
leaseId: "lease-1",
|
||||
remoteCwd,
|
||||
runner: createLocalSandboxRunner(),
|
||||
timeoutMs: 30_000,
|
||||
};
|
||||
|
||||
const bridge = await startAdapterExecutionTargetPaperclipBridge({
|
||||
runId: "run-bridge-limit",
|
||||
target,
|
||||
runtimeRootDir,
|
||||
adapterKey: "codex",
|
||||
hostApiToken: "real-run-jwt",
|
||||
hostApiUrl: `http://127.0.0.1:${address.port}`,
|
||||
maxBodyBytes: 32,
|
||||
});
|
||||
try {
|
||||
const response = await fetch(`${bridge!.env.PAPERCLIP_API_URL}/api/agents/me`, {
|
||||
headers: {
|
||||
authorization: `Bearer ${bridge!.env.PAPERCLIP_API_KEY}`,
|
||||
accept: "application/json",
|
||||
},
|
||||
});
|
||||
|
||||
expect(response.status).toBe(502);
|
||||
await expect(response.json()).resolves.toEqual({
|
||||
error: "Bridge response body exceeded the configured size limit of 32 bytes.",
|
||||
});
|
||||
expect(requests).toEqual([{
|
||||
method: "GET",
|
||||
url: "/api/agents/me",
|
||||
auth: "Bearer real-run-jwt",
|
||||
runId: "run-bridge-limit",
|
||||
}]);
|
||||
} finally {
|
||||
await bridge?.stop();
|
||||
await new Promise<void>((resolve) => apiServer.close(() => resolve()));
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
@@ -2,6 +2,8 @@ import { afterEach, describe, expect, it, vi } from "vitest";
|
||||
import * as ssh from "./ssh.js";
|
||||
import {
|
||||
adapterExecutionTargetUsesManagedHome,
|
||||
ensureAdapterExecutionTargetRuntimeCommandInstalled,
|
||||
resolveAdapterExecutionTargetCwd,
|
||||
runAdapterExecutionTargetShellCommand,
|
||||
} from "./execution-target.js";
|
||||
|
||||
@@ -159,3 +161,123 @@ describe("runAdapterExecutionTargetShellCommand", () => {
|
||||
})).toBe(false);
|
||||
});
|
||||
});
|
||||
|
||||
describe("ensureAdapterExecutionTargetRuntimeCommandInstalled", () => {
|
||||
afterEach(() => {
|
||||
vi.restoreAllMocks();
|
||||
});
|
||||
|
||||
it("runs install commands for sandbox targets", async () => {
|
||||
const runner = {
|
||||
execute: vi.fn(async () => ({
|
||||
exitCode: 0,
|
||||
signal: null,
|
||||
timedOut: false,
|
||||
stdout: "",
|
||||
stderr: "",
|
||||
pid: null,
|
||||
startedAt: new Date().toISOString(),
|
||||
})),
|
||||
};
|
||||
|
||||
await ensureAdapterExecutionTargetRuntimeCommandInstalled({
|
||||
runId: "run-install",
|
||||
target: {
|
||||
kind: "remote",
|
||||
transport: "sandbox",
|
||||
providerKey: "e2b",
|
||||
remoteCwd: "/remote/workspace",
|
||||
runner,
|
||||
},
|
||||
installCommand: "npm install -g @google/gemini-cli",
|
||||
cwd: "/local/workspace",
|
||||
env: { PATH: "/usr/bin" },
|
||||
timeoutSec: 30,
|
||||
});
|
||||
|
||||
expect(runner.execute).toHaveBeenCalledWith(expect.objectContaining({
|
||||
command: "sh",
|
||||
args: ["-lc", "npm install -g @google/gemini-cli"],
|
||||
cwd: "/remote/workspace",
|
||||
env: { PATH: "/usr/bin" },
|
||||
timeoutMs: 30_000,
|
||||
}));
|
||||
});
|
||||
|
||||
it("skips install commands for SSH targets", async () => {
|
||||
const runSshCommandSpy = vi.spyOn(ssh, "runSshCommand").mockResolvedValue({
|
||||
stdout: "",
|
||||
stderr: "",
|
||||
});
|
||||
|
||||
await ensureAdapterExecutionTargetRuntimeCommandInstalled({
|
||||
runId: "run-skip",
|
||||
target: {
|
||||
kind: "remote",
|
||||
transport: "ssh",
|
||||
remoteCwd: "/srv/paperclip/workspace",
|
||||
spec: {
|
||||
host: "ssh.example.test",
|
||||
port: 22,
|
||||
username: "ssh-user",
|
||||
remoteCwd: "/srv/paperclip/workspace",
|
||||
remoteWorkspacePath: "/srv/paperclip/workspace",
|
||||
privateKey: null,
|
||||
knownHosts: null,
|
||||
strictHostKeyChecking: true,
|
||||
},
|
||||
},
|
||||
installCommand: "npm install -g @google/gemini-cli",
|
||||
cwd: "/tmp/local",
|
||||
env: {},
|
||||
});
|
||||
|
||||
expect(runSshCommandSpy).not.toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
|
||||
describe("resolveAdapterExecutionTargetCwd", () => {
|
||||
const sshTarget = {
|
||||
kind: "remote" as const,
|
||||
transport: "ssh" as const,
|
||||
remoteCwd: "/srv/paperclip/workspace",
|
||||
spec: {
|
||||
host: "ssh.example.test",
|
||||
port: 22,
|
||||
username: "ssh-user",
|
||||
remoteCwd: "/srv/paperclip/workspace",
|
||||
remoteWorkspacePath: "/srv/paperclip/workspace",
|
||||
privateKey: null,
|
||||
knownHosts: null,
|
||||
strictHostKeyChecking: true,
|
||||
},
|
||||
};
|
||||
|
||||
it("falls back to the remote cwd when no adapter cwd is configured", () => {
|
||||
expect(resolveAdapterExecutionTargetCwd(sshTarget, "", "/Users/host/repo/server")).toBe(
|
||||
"/srv/paperclip/workspace",
|
||||
);
|
||||
expect(resolveAdapterExecutionTargetCwd(sshTarget, " ", "/Users/host/repo/server")).toBe(
|
||||
"/srv/paperclip/workspace",
|
||||
);
|
||||
expect(resolveAdapterExecutionTargetCwd(sshTarget, null, "/Users/host/repo/server")).toBe(
|
||||
"/srv/paperclip/workspace",
|
||||
);
|
||||
});
|
||||
|
||||
it("preserves an explicit adapter cwd when one is configured", () => {
|
||||
expect(
|
||||
resolveAdapterExecutionTargetCwd(
|
||||
sshTarget,
|
||||
"/srv/paperclip/custom-agent-dir",
|
||||
"/Users/host/repo/server",
|
||||
),
|
||||
).toBe("/srv/paperclip/custom-agent-dir");
|
||||
});
|
||||
|
||||
it("keeps the local fallback cwd for local targets", () => {
|
||||
expect(resolveAdapterExecutionTargetCwd(null, "", "/Users/host/repo/server")).toBe(
|
||||
"/Users/host/repo/server",
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
@@ -10,7 +10,15 @@ import {
|
||||
remoteExecutionSessionMatches,
|
||||
type RemoteManagedRuntimeAsset,
|
||||
} from "./remote-managed-runtime.js";
|
||||
import { parseSshRemoteExecutionSpec, runSshCommand, shellQuote } from "./ssh.js";
|
||||
import {
|
||||
createCommandManagedSandboxCallbackBridgeQueueClient,
|
||||
createSandboxCallbackBridgeAsset,
|
||||
createSandboxCallbackBridgeToken,
|
||||
DEFAULT_SANDBOX_CALLBACK_BRIDGE_MAX_BODY_BYTES,
|
||||
startSandboxCallbackBridgeServer,
|
||||
startSandboxCallbackBridgeWorker,
|
||||
} from "./sandbox-callback-bridge.js";
|
||||
import { createSshCommandManagedRuntimeRunner, parseSshRemoteExecutionSpec, runSshCommand, shellQuote } from "./ssh.js";
|
||||
import {
|
||||
ensureCommandResolvable,
|
||||
resolveCommandForLogs,
|
||||
@@ -18,6 +26,7 @@ import {
|
||||
type RunProcessResult,
|
||||
type TerminalResultCleanupOptions,
|
||||
} from "./server-utils.js";
|
||||
import { preferredShellForSandbox } from "./sandbox-shell.js";
|
||||
|
||||
export interface AdapterLocalExecutionTarget {
|
||||
kind: "local";
|
||||
@@ -31,7 +40,6 @@ export interface AdapterSshExecutionTarget {
|
||||
environmentId?: string | null;
|
||||
leaseId?: string | null;
|
||||
remoteCwd: string;
|
||||
paperclipApiUrl?: string | null;
|
||||
spec: SshRemoteExecutionSpec;
|
||||
}
|
||||
|
||||
@@ -39,10 +47,10 @@ export interface AdapterSandboxExecutionTarget {
|
||||
kind: "remote";
|
||||
transport: "sandbox";
|
||||
providerKey?: string | null;
|
||||
shellCommand?: "bash" | "sh" | null;
|
||||
environmentId?: string | null;
|
||||
leaseId?: string | null;
|
||||
remoteCwd: string;
|
||||
paperclipApiUrl?: string | null;
|
||||
timeoutMs?: number | null;
|
||||
runner?: CommandManagedRuntimeRunner;
|
||||
}
|
||||
@@ -82,6 +90,11 @@ export interface AdapterExecutionTargetShellOptions {
|
||||
onLog?: (stream: "stdout" | "stderr", chunk: string) => Promise<void>;
|
||||
}
|
||||
|
||||
export interface AdapterExecutionTargetPaperclipBridgeHandle {
|
||||
env: Record<string, string>;
|
||||
stop(): Promise<void>;
|
||||
}
|
||||
|
||||
function parseObject(value: unknown): Record<string, unknown> {
|
||||
return value && typeof value === "object" && !Array.isArray(value)
|
||||
? (value as Record<string, unknown>)
|
||||
@@ -96,6 +109,27 @@ function readStringMeta(parsed: Record<string, unknown>, key: string): string |
|
||||
return readString(parsed[key]);
|
||||
}
|
||||
|
||||
function resolveHostForUrl(rawHost: string): string {
|
||||
const host = rawHost.trim();
|
||||
if (!host || host === "0.0.0.0" || host === "::") return "localhost";
|
||||
if (host.includes(":") && !host.startsWith("[") && !host.endsWith("]")) return `[${host}]`;
|
||||
return host;
|
||||
}
|
||||
|
||||
function resolveDefaultPaperclipApiUrl(): string {
|
||||
const runtimeHost = resolveHostForUrl(
|
||||
process.env.PAPERCLIP_LISTEN_HOST ?? process.env.HOST ?? "localhost",
|
||||
);
|
||||
// 3100 matches the default Paperclip dev server port when the runtime does not provide one.
|
||||
const runtimePort = process.env.PAPERCLIP_LISTEN_PORT ?? process.env.PORT ?? "3100";
|
||||
return `http://${runtimeHost}:${runtimePort}`;
|
||||
}
|
||||
|
||||
function isBridgeDebugEnabled(env: NodeJS.ProcessEnv): boolean {
|
||||
const value = env.PAPERCLIP_BRIDGE_DEBUG?.trim().toLowerCase();
|
||||
return value === "1" || value === "true" || value === "yes";
|
||||
}
|
||||
|
||||
function isAdapterExecutionTargetInstance(value: unknown): value is AdapterExecutionTarget {
|
||||
const parsed = parseObject(value);
|
||||
if (parsed.kind === "local") return true;
|
||||
@@ -130,12 +164,21 @@ export function adapterExecutionTargetRemoteCwd(
|
||||
return target?.kind === "remote" ? target.remoteCwd : localCwd;
|
||||
}
|
||||
|
||||
export function adapterExecutionTargetPaperclipApiUrl(
|
||||
export function resolveAdapterExecutionTargetCwd(
|
||||
target: AdapterExecutionTarget | null | undefined,
|
||||
): string | null {
|
||||
if (target?.kind !== "remote") return null;
|
||||
if (target.transport === "ssh") return target.paperclipApiUrl ?? target.spec.paperclipApiUrl ?? null;
|
||||
return target.paperclipApiUrl ?? null;
|
||||
configuredCwd: string | null | undefined,
|
||||
localFallbackCwd: string,
|
||||
): string {
|
||||
if (typeof configuredCwd === "string" && configuredCwd.trim().length > 0) {
|
||||
return configuredCwd;
|
||||
}
|
||||
return adapterExecutionTargetRemoteCwd(target, localFallbackCwd);
|
||||
}
|
||||
|
||||
export function adapterExecutionTargetUsesPaperclipBridge(
|
||||
target: AdapterExecutionTarget | null | undefined,
|
||||
): boolean {
|
||||
return target?.kind === "remote";
|
||||
}
|
||||
|
||||
export function describeAdapterExecutionTarget(
|
||||
@@ -155,6 +198,33 @@ function requireSandboxRunner(target: AdapterSandboxExecutionTarget): CommandMan
|
||||
);
|
||||
}
|
||||
|
||||
function preferredSandboxShell(target: AdapterSandboxExecutionTarget): "bash" | "sh" {
|
||||
return preferredShellForSandbox(target.shellCommand);
|
||||
}
|
||||
|
||||
type AdapterCommandCapableExecutionTarget = AdapterSshExecutionTarget | AdapterSandboxExecutionTarget;
|
||||
|
||||
function adapterExecutionTargetCommandRunner(target: AdapterCommandCapableExecutionTarget): CommandManagedRuntimeRunner {
|
||||
if (target.transport === "ssh") {
|
||||
return createSshCommandManagedRuntimeRunner({
|
||||
spec: target.spec,
|
||||
defaultCwd: target.remoteCwd,
|
||||
maxBufferBytes: DEFAULT_SANDBOX_CALLBACK_BRIDGE_MAX_BODY_BYTES * 4,
|
||||
});
|
||||
}
|
||||
return requireSandboxRunner(target);
|
||||
}
|
||||
|
||||
function adapterExecutionTargetShellCommand(target: AdapterCommandCapableExecutionTarget): "bash" | "sh" {
|
||||
return target.transport === "ssh" ? "sh" : preferredSandboxShell(target);
|
||||
}
|
||||
|
||||
function adapterExecutionTargetTimeoutMs(
|
||||
target: AdapterCommandCapableExecutionTarget,
|
||||
): number | null | undefined {
|
||||
return target.transport === "sandbox" ? target.timeoutMs : undefined;
|
||||
}
|
||||
|
||||
export async function ensureAdapterExecutionTargetCommandResolvable(
|
||||
command: string,
|
||||
target: AdapterExecutionTarget | null | undefined,
|
||||
@@ -282,8 +352,9 @@ export async function runAdapterExecutionTargetShellCommand(
|
||||
}
|
||||
}
|
||||
|
||||
const shellCommand = preferredSandboxShell(target);
|
||||
return await requireSandboxRunner(target).execute({
|
||||
command: "sh",
|
||||
command: shellCommand,
|
||||
args: ["-lc", command],
|
||||
cwd: target.remoteCwd,
|
||||
env: options.env,
|
||||
@@ -322,6 +393,60 @@ export async function readAdapterExecutionTargetHomeDir(
|
||||
return homeDir.length > 0 ? homeDir : null;
|
||||
}
|
||||
|
||||
export async function ensureAdapterExecutionTargetRuntimeCommandInstalled(input: {
|
||||
runId: string;
|
||||
target: AdapterExecutionTarget | null | undefined;
|
||||
installCommand?: string | null;
|
||||
detectCommand?: string | null;
|
||||
cwd: string;
|
||||
env: Record<string, string>;
|
||||
timeoutSec?: number;
|
||||
graceSec?: number;
|
||||
onLog?: AdapterExecutionTargetShellOptions["onLog"];
|
||||
}): Promise<void> {
|
||||
const installCommand = input.installCommand?.trim();
|
||||
if (!installCommand || input.target?.kind !== "remote" || input.target.transport !== "sandbox") {
|
||||
return;
|
||||
}
|
||||
|
||||
const detectCommand = input.detectCommand?.trim();
|
||||
if (detectCommand) {
|
||||
const probe = await runAdapterExecutionTargetShellCommand(
|
||||
input.runId,
|
||||
input.target,
|
||||
`command -v ${shellQuote(detectCommand)} >/dev/null 2>&1`,
|
||||
{
|
||||
cwd: input.cwd,
|
||||
env: input.env,
|
||||
timeoutSec: input.timeoutSec,
|
||||
graceSec: input.graceSec,
|
||||
},
|
||||
);
|
||||
if (!probe.timedOut && probe.exitCode === 0) {
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
const result = await runAdapterExecutionTargetShellCommand(
|
||||
input.runId,
|
||||
input.target,
|
||||
installCommand,
|
||||
{
|
||||
cwd: input.cwd,
|
||||
env: input.env,
|
||||
timeoutSec: input.timeoutSec,
|
||||
graceSec: input.graceSec,
|
||||
onLog: input.onLog,
|
||||
},
|
||||
);
|
||||
if (result.timedOut) {
|
||||
throw new Error(`Timed out while installing the adapter runtime command via: ${installCommand}`);
|
||||
}
|
||||
if ((result.exitCode ?? 0) !== 0) {
|
||||
throw new Error(`Failed to install the adapter runtime command via: ${installCommand}`);
|
||||
}
|
||||
}
|
||||
|
||||
export async function ensureAdapterExecutionTargetFile(
|
||||
runId: string,
|
||||
target: AdapterExecutionTarget | null | undefined,
|
||||
@@ -336,6 +461,64 @@ export async function ensureAdapterExecutionTargetFile(
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* Ensure a working directory exists (and is a directory) on the execution target.
|
||||
*
|
||||
* For local targets this delegates to the local `ensureAbsoluteDirectory` helper
|
||||
* (Node fs). For remote (SSH/sandbox) targets it shells out and runs
|
||||
* `mkdir -p` (when allowed) followed by a `[ -d ]` check so the result reflects
|
||||
* the directory state inside the environment, not on the Paperclip host.
|
||||
*
|
||||
* Throws an Error with a human-readable message on failure.
|
||||
*/
|
||||
export async function ensureAdapterExecutionTargetDirectory(
|
||||
runId: string,
|
||||
target: AdapterExecutionTarget | null | undefined,
|
||||
cwd: string,
|
||||
options: AdapterExecutionTargetShellOptions & { createIfMissing?: boolean },
|
||||
): Promise<void> {
|
||||
const createIfMissing = options.createIfMissing ?? false;
|
||||
|
||||
if (!target || target.kind === "local") {
|
||||
const { ensureAbsoluteDirectory } = await import("./server-utils.js");
|
||||
await ensureAbsoluteDirectory(cwd, { createIfMissing });
|
||||
return;
|
||||
}
|
||||
|
||||
// Remote (SSH or sandbox): both expect POSIX absolute paths inside the env.
|
||||
if (!cwd.startsWith("/")) {
|
||||
throw new Error(`Working directory must be an absolute POSIX path on the remote target: "${cwd}"`);
|
||||
}
|
||||
|
||||
const quoted = shellQuote(cwd);
|
||||
const script = createIfMissing
|
||||
? `mkdir -p ${quoted} && [ -d ${quoted} ]`
|
||||
: `[ -d ${quoted} ]`;
|
||||
|
||||
const result = await runAdapterExecutionTargetShellCommand(runId, target, script, {
|
||||
cwd: target.kind === "remote" ? target.remoteCwd : cwd,
|
||||
env: options.env,
|
||||
timeoutSec: options.timeoutSec ?? 15,
|
||||
graceSec: options.graceSec ?? 5,
|
||||
onLog: options.onLog,
|
||||
});
|
||||
|
||||
if (result.timedOut) {
|
||||
throw new Error(`Timed out checking working directory on remote target: "${cwd}"`);
|
||||
}
|
||||
if ((result.exitCode ?? 1) !== 0) {
|
||||
const detail = (result.stderr || result.stdout || "").trim();
|
||||
if (createIfMissing) {
|
||||
throw new Error(
|
||||
`Could not create working directory "${cwd}" on remote target${detail ? `: ${detail}` : "."}`,
|
||||
);
|
||||
}
|
||||
throw new Error(
|
||||
`Working directory does not exist on remote target: "${cwd}"${detail ? ` (${detail})` : ""}`,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
export function adapterExecutionTargetSessionIdentity(
|
||||
target: AdapterExecutionTarget | null | undefined,
|
||||
): Record<string, unknown> | null {
|
||||
@@ -347,7 +530,6 @@ export function adapterExecutionTargetSessionIdentity(
|
||||
environmentId: target.environmentId ?? null,
|
||||
leaseId: target.leaseId ?? null,
|
||||
remoteCwd: target.remoteCwd,
|
||||
...(target.paperclipApiUrl ? { paperclipApiUrl: target.paperclipApiUrl } : {}),
|
||||
};
|
||||
}
|
||||
|
||||
@@ -366,8 +548,7 @@ export function adapterExecutionTargetSessionMatches(
|
||||
readStringMeta(parsedSaved, "providerKey") === current?.providerKey &&
|
||||
readStringMeta(parsedSaved, "environmentId") === current?.environmentId &&
|
||||
readStringMeta(parsedSaved, "leaseId") === current?.leaseId &&
|
||||
readStringMeta(parsedSaved, "remoteCwd") === current?.remoteCwd &&
|
||||
readStringMeta(parsedSaved, "paperclipApiUrl") === (current?.paperclipApiUrl ?? null)
|
||||
readStringMeta(parsedSaved, "remoteCwd") === current?.remoteCwd
|
||||
);
|
||||
}
|
||||
|
||||
@@ -392,7 +573,6 @@ export function parseAdapterExecutionTarget(value: unknown): AdapterExecutionTar
|
||||
environmentId: readStringMeta(parsed, "environmentId"),
|
||||
leaseId: readStringMeta(parsed, "leaseId"),
|
||||
remoteCwd: spec.remoteCwd,
|
||||
paperclipApiUrl: readStringMeta(parsed, "paperclipApiUrl") ?? spec.paperclipApiUrl ?? null,
|
||||
spec,
|
||||
};
|
||||
}
|
||||
@@ -407,7 +587,6 @@ export function parseAdapterExecutionTarget(value: unknown): AdapterExecutionTar
|
||||
environmentId: readStringMeta(parsed, "environmentId"),
|
||||
leaseId: readStringMeta(parsed, "leaseId"),
|
||||
remoteCwd,
|
||||
paperclipApiUrl: readStringMeta(parsed, "paperclipApiUrl"),
|
||||
timeoutMs: typeof parsed.timeoutMs === "number" ? parsed.timeoutMs : null,
|
||||
};
|
||||
}
|
||||
@@ -428,7 +607,6 @@ export function adapterExecutionTargetFromRemoteExecution(
|
||||
environmentId: metadata.environmentId ?? null,
|
||||
leaseId: metadata.leaseId ?? null,
|
||||
remoteCwd: ssh.remoteCwd,
|
||||
paperclipApiUrl: ssh.paperclipApiUrl ?? null,
|
||||
spec: ssh,
|
||||
};
|
||||
}
|
||||
@@ -487,10 +665,10 @@ export async function prepareAdapterExecutionTargetRuntime(input: {
|
||||
runner: requireSandboxRunner(target),
|
||||
spec: {
|
||||
providerKey: target.providerKey,
|
||||
shellCommand: target.shellCommand,
|
||||
leaseId: target.leaseId,
|
||||
remoteCwd: target.remoteCwd,
|
||||
timeoutMs: target.timeoutMs,
|
||||
paperclipApiUrl: target.paperclipApiUrl,
|
||||
},
|
||||
adapterKey: input.adapterKey,
|
||||
workspaceLocalDir: input.workspaceLocalDir,
|
||||
@@ -514,3 +692,195 @@ export function runtimeAssetDir(
|
||||
): string {
|
||||
return prepared.assetDirs[key] ?? path.posix.join(fallbackRemoteCwd, ".paperclip-runtime", key);
|
||||
}
|
||||
|
||||
function buildBridgeResponseHeaders(response: Response): Record<string, string> {
|
||||
const out: Record<string, string> = {};
|
||||
for (const key of ["content-type", "etag", "last-modified"]) {
|
||||
const value = response.headers.get(key);
|
||||
if (value && value.trim().length > 0) out[key] = value.trim();
|
||||
}
|
||||
return out;
|
||||
}
|
||||
|
||||
function buildBridgeForwardUrl(baseUrl: string, request: { path: string; query: string }): URL {
|
||||
const url = new URL(request.path, baseUrl);
|
||||
const query = request.query.trim();
|
||||
url.search = query.startsWith("?") ? query.slice(1) : query;
|
||||
return url;
|
||||
}
|
||||
|
||||
function bridgeResponseBodyLimitError(maxBodyBytes: number): Error {
|
||||
return new Error(`Bridge response body exceeded the configured size limit of ${maxBodyBytes} bytes.`);
|
||||
}
|
||||
|
||||
async function readBridgeForwardResponseBody(response: Response, maxBodyBytes: number): Promise<string> {
|
||||
const rawContentLength = response.headers.get("content-length");
|
||||
if (rawContentLength) {
|
||||
const contentLength = Number.parseInt(rawContentLength, 10);
|
||||
if (Number.isFinite(contentLength) && contentLength > maxBodyBytes) {
|
||||
throw bridgeResponseBodyLimitError(maxBodyBytes);
|
||||
}
|
||||
}
|
||||
|
||||
if (!response.body) {
|
||||
return "";
|
||||
}
|
||||
|
||||
const reader = response.body.getReader();
|
||||
const chunks: Buffer[] = [];
|
||||
let totalBytes = 0;
|
||||
while (true) {
|
||||
const { done, value } = await reader.read();
|
||||
if (done) break;
|
||||
if (!value) continue;
|
||||
totalBytes += value.byteLength;
|
||||
if (totalBytes > maxBodyBytes) {
|
||||
await reader.cancel().catch(() => undefined);
|
||||
throw bridgeResponseBodyLimitError(maxBodyBytes);
|
||||
}
|
||||
chunks.push(Buffer.from(value));
|
||||
}
|
||||
return Buffer.concat(chunks, totalBytes).toString("utf8");
|
||||
}
|
||||
|
||||
export async function startAdapterExecutionTargetPaperclipBridge(input: {
|
||||
runId: string;
|
||||
target: AdapterExecutionTarget | null | undefined;
|
||||
runtimeRootDir: string | null | undefined;
|
||||
adapterKey: string;
|
||||
hostApiToken: string | null | undefined;
|
||||
hostApiUrl?: string | null;
|
||||
onLog?: (stream: "stdout" | "stderr", chunk: string) => Promise<void>;
|
||||
maxBodyBytes?: number | null;
|
||||
}): Promise<AdapterExecutionTargetPaperclipBridgeHandle | null> {
|
||||
if (!adapterExecutionTargetUsesPaperclipBridge(input.target)) {
|
||||
return null;
|
||||
}
|
||||
if (!input.target || input.target.kind !== "remote") {
|
||||
return null;
|
||||
}
|
||||
|
||||
const target = input.target;
|
||||
const onLog = input.onLog ?? (async () => {});
|
||||
const hostApiToken = input.hostApiToken?.trim() ?? "";
|
||||
if (hostApiToken.length === 0) {
|
||||
throw new Error("Sandbox bridge mode requires a host-side Paperclip API token.");
|
||||
}
|
||||
|
||||
const runtimeRootDir =
|
||||
input.runtimeRootDir?.trim().length
|
||||
? input.runtimeRootDir.trim()
|
||||
: path.posix.join(target.remoteCwd, ".paperclip-runtime", input.adapterKey);
|
||||
const bridgeRuntimeDir = path.posix.join(runtimeRootDir, "paperclip-bridge");
|
||||
const queueDir = path.posix.join(bridgeRuntimeDir, "queue");
|
||||
const assetRemoteDir = path.posix.join(bridgeRuntimeDir, "server");
|
||||
const bridgeToken = createSandboxCallbackBridgeToken();
|
||||
const maxBodyBytes =
|
||||
typeof input.maxBodyBytes === "number" && Number.isFinite(input.maxBodyBytes) && input.maxBodyBytes > 0
|
||||
? Math.trunc(input.maxBodyBytes)
|
||||
: DEFAULT_SANDBOX_CALLBACK_BRIDGE_MAX_BODY_BYTES;
|
||||
const hostApiUrl =
|
||||
input.hostApiUrl?.trim() ||
|
||||
process.env.PAPERCLIP_RUNTIME_API_URL?.trim() ||
|
||||
process.env.PAPERCLIP_API_URL?.trim() ||
|
||||
resolveDefaultPaperclipApiUrl();
|
||||
const shellCommand = adapterExecutionTargetShellCommand(target);
|
||||
const runner = adapterExecutionTargetCommandRunner(target);
|
||||
|
||||
await onLog(
|
||||
"stdout",
|
||||
`[paperclip] Starting sandbox callback bridge for ${input.adapterKey} in ${bridgeRuntimeDir}.\n`,
|
||||
);
|
||||
|
||||
const bridgeAsset = await createSandboxCallbackBridgeAsset();
|
||||
let server: Awaited<ReturnType<typeof startSandboxCallbackBridgeServer>> | null = null;
|
||||
let worker: Awaited<ReturnType<typeof startSandboxCallbackBridgeWorker>> | null = null;
|
||||
try {
|
||||
const client = createCommandManagedSandboxCallbackBridgeQueueClient({
|
||||
runner,
|
||||
remoteCwd: target.remoteCwd,
|
||||
timeoutMs: adapterExecutionTargetTimeoutMs(target),
|
||||
shellCommand,
|
||||
});
|
||||
// PAPERCLIP_BRIDGE_DEBUG opts into verbose stdout logs of every bridge
|
||||
// proxy request/response. The query string is logged verbatim, so callers
|
||||
// who pass auth tokens or other sensitive values as query parameters
|
||||
// should be aware those values appear in the host process's stdout when
|
||||
// this flag is enabled. Only intended for active debugging in trusted
|
||||
// environments.
|
||||
const bridgeDebugEnabled = isBridgeDebugEnabled(process.env);
|
||||
worker = await startSandboxCallbackBridgeWorker({
|
||||
client,
|
||||
queueDir,
|
||||
maxBodyBytes,
|
||||
handleRequest: async (request) => {
|
||||
const method = request.method.trim().toUpperCase() || "GET";
|
||||
if (bridgeDebugEnabled) {
|
||||
await onLog(
|
||||
"stdout",
|
||||
`[paperclip] Bridge proxy ${method} ${request.path}${request.query ? `?${request.query}` : ""}\n`,
|
||||
);
|
||||
}
|
||||
const headers = new Headers();
|
||||
for (const [key, value] of Object.entries(request.headers)) {
|
||||
if (value.trim().length === 0) continue;
|
||||
headers.set(key, value);
|
||||
}
|
||||
headers.set("authorization", `Bearer ${hostApiToken}`);
|
||||
headers.set("x-paperclip-run-id", input.runId);
|
||||
const response = await fetch(buildBridgeForwardUrl(hostApiUrl, request), {
|
||||
method,
|
||||
headers,
|
||||
...(method === "GET" || method === "HEAD" ? {} : { body: request.body }),
|
||||
signal: AbortSignal.timeout(30_000),
|
||||
});
|
||||
if (bridgeDebugEnabled) {
|
||||
await onLog(
|
||||
"stdout",
|
||||
`[paperclip] Bridge proxy response ${response.status} for ${method} ${request.path}${request.query ? `?${request.query}` : ""}\n`,
|
||||
);
|
||||
}
|
||||
return {
|
||||
status: response.status,
|
||||
headers: buildBridgeResponseHeaders(response),
|
||||
body: await readBridgeForwardResponseBody(response, maxBodyBytes),
|
||||
};
|
||||
},
|
||||
});
|
||||
server = await startSandboxCallbackBridgeServer({
|
||||
runner,
|
||||
remoteCwd: target.remoteCwd,
|
||||
assetRemoteDir,
|
||||
queueDir,
|
||||
bridgeToken,
|
||||
bridgeAsset,
|
||||
timeoutMs: adapterExecutionTargetTimeoutMs(target),
|
||||
maxBodyBytes,
|
||||
shellCommand,
|
||||
});
|
||||
} catch (error) {
|
||||
await Promise.allSettled([
|
||||
server?.stop(),
|
||||
worker?.stop(),
|
||||
bridgeAsset.cleanup(),
|
||||
]);
|
||||
throw error;
|
||||
}
|
||||
|
||||
return {
|
||||
env: {
|
||||
PAPERCLIP_API_URL: server.baseUrl,
|
||||
PAPERCLIP_API_KEY: bridgeToken,
|
||||
PAPERCLIP_API_BRIDGE_MODE: "queue_v1",
|
||||
},
|
||||
stop: async () => {
|
||||
await Promise.allSettled([
|
||||
server?.stop(),
|
||||
]);
|
||||
await Promise.allSettled([
|
||||
worker?.stop(),
|
||||
bridgeAsset.cleanup(),
|
||||
]);
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
@@ -20,11 +20,14 @@ export type {
|
||||
AdapterSkillContext,
|
||||
AdapterSessionCodec,
|
||||
AdapterModel,
|
||||
AdapterModelProfileKey,
|
||||
AdapterModelProfileDefinition,
|
||||
HireApprovedPayload,
|
||||
HireApprovedHookResult,
|
||||
ConfigFieldOption,
|
||||
ConfigFieldSchema,
|
||||
AdapterConfigSchema,
|
||||
AdapterRuntimeCommandSpec,
|
||||
ServerAdapterModule,
|
||||
QuotaWindow,
|
||||
ProviderQuotaResult,
|
||||
@@ -53,4 +56,20 @@ export {
|
||||
redactHomePathUserSegmentsInValue,
|
||||
redactTranscriptEntryPaths,
|
||||
} from "./log-redaction.js";
|
||||
export {
|
||||
REDACTED_COMMAND_TEXT_VALUE,
|
||||
redactCommandText,
|
||||
} from "./command-redaction.js";
|
||||
export { inferOpenAiCompatibleBiller } from "./billing.js";
|
||||
// Keep the root adapter-utils entry browser-safe because the UI imports it.
|
||||
// The sandbox callback bridge stays available via its dedicated subpath export.
|
||||
export type {
|
||||
SandboxCallbackBridgeRequest,
|
||||
SandboxCallbackBridgeResponse,
|
||||
SandboxCallbackBridgeAsset,
|
||||
SandboxCallbackBridgeDirectories,
|
||||
SandboxCallbackBridgeRouteRule,
|
||||
SandboxCallbackBridgeQueueClient,
|
||||
SandboxCallbackBridgeWorkerHandle,
|
||||
StartedSandboxCallbackBridgeServer,
|
||||
} from "./sandbox-callback-bridge.js";
|
||||
|
||||
@@ -44,7 +44,6 @@ export function buildRemoteExecutionSessionIdentity(spec: SshRemoteExecutionSpec
|
||||
port: spec.port,
|
||||
username: spec.username,
|
||||
remoteCwd: spec.remoteCwd,
|
||||
...(spec.paperclipApiUrl ? { paperclipApiUrl: spec.paperclipApiUrl } : {}),
|
||||
} as const;
|
||||
}
|
||||
|
||||
@@ -58,8 +57,7 @@ export function remoteExecutionSessionMatches(saved: unknown, current: SshRemote
|
||||
asString(parsedSaved.host) === currentIdentity.host &&
|
||||
asNumber(parsedSaved.port) === currentIdentity.port &&
|
||||
asString(parsedSaved.username) === currentIdentity.username &&
|
||||
asString(parsedSaved.remoteCwd) === currentIdentity.remoteCwd &&
|
||||
asString(parsedSaved.paperclipApiUrl) === asString(currentIdentity.paperclipApiUrl)
|
||||
asString(parsedSaved.remoteCwd) === currentIdentity.remoteCwd
|
||||
);
|
||||
}
|
||||
|
||||
|
||||
616
packages/adapter-utils/src/sandbox-callback-bridge.test.ts
Normal file
@@ -0,0 +1,616 @@
|
||||
import { execFile as execFileCallback } from "node:child_process";
|
||||
import { mkdir, mkdtemp, readFile, readdir, rm, writeFile } from "node:fs/promises";
|
||||
import os from "node:os";
|
||||
import path from "node:path";
|
||||
import { promisify } from "node:util";
|
||||
import { afterEach, describe, expect, it } from "vitest";
|
||||
|
||||
import { prepareCommandManagedRuntime } from "./command-managed-runtime.js";
|
||||
import {
|
||||
createFileSystemSandboxCallbackBridgeQueueClient,
|
||||
createSandboxCallbackBridgeAsset,
|
||||
createSandboxCallbackBridgeToken,
|
||||
sandboxCallbackBridgeDirectories,
|
||||
startSandboxCallbackBridgeServer,
|
||||
startSandboxCallbackBridgeWorker,
|
||||
} from "./sandbox-callback-bridge.js";
|
||||
import type { RunProcessResult } from "./server-utils.js";
|
||||
|
||||
const execFile = promisify(execFileCallback);
|
||||
|
||||
describe("sandbox callback bridge", () => {
|
||||
const cleanupDirs: string[] = [];
|
||||
const cleanupFns: Array<() => Promise<void>> = [];
|
||||
|
||||
function createExecRunner() {
|
||||
return {
|
||||
execute: async (input: {
|
||||
command: string;
|
||||
args?: string[];
|
||||
cwd?: string;
|
||||
env?: Record<string, string>;
|
||||
stdin?: string;
|
||||
timeoutMs?: number;
|
||||
}): Promise<RunProcessResult> => {
|
||||
const startedAt = new Date().toISOString();
|
||||
const env = {
|
||||
...process.env,
|
||||
...input.env,
|
||||
};
|
||||
const command =
|
||||
input.command === "sh" ? "/bin/sh" : input.command === "bash" ? "/bin/bash" : input.command;
|
||||
const args = [...(input.args ?? [])];
|
||||
if (
|
||||
input.stdin != null &&
|
||||
(input.command === "sh" || input.command === "bash") &&
|
||||
args[0] === "-lc" &&
|
||||
typeof args[1] === "string"
|
||||
) {
|
||||
env.PAPERCLIP_TEST_STDIN = input.stdin;
|
||||
args[1] = `printf '%s' \"$PAPERCLIP_TEST_STDIN\" | (${args[1]})`;
|
||||
}
|
||||
try {
|
||||
const result = await execFile(command, args, {
|
||||
cwd: input.cwd,
|
||||
env,
|
||||
maxBuffer: 32 * 1024 * 1024,
|
||||
timeout: input.timeoutMs,
|
||||
});
|
||||
return {
|
||||
exitCode: 0,
|
||||
signal: null,
|
||||
timedOut: false,
|
||||
stdout: result.stdout,
|
||||
stderr: result.stderr,
|
||||
pid: null,
|
||||
startedAt,
|
||||
};
|
||||
} catch (error) {
|
||||
const err = error as NodeJS.ErrnoException & {
|
||||
stdout?: string;
|
||||
stderr?: string;
|
||||
code?: string | number | null;
|
||||
signal?: NodeJS.Signals | null;
|
||||
killed?: boolean;
|
||||
};
|
||||
return {
|
||||
exitCode: typeof err.code === "number" ? err.code : null,
|
||||
signal: err.signal ?? null,
|
||||
timedOut: Boolean(err.killed && input.timeoutMs),
|
||||
stdout: err.stdout ?? "",
|
||||
stderr: err.stderr ?? "",
|
||||
pid: null,
|
||||
startedAt,
|
||||
};
|
||||
}
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
async function waitForJsonFile(directory: string, timeoutMs = 2_000): Promise<string> {
|
||||
const deadline = Date.now() + timeoutMs;
|
||||
while (Date.now() < deadline) {
|
||||
const entries = await readdir(directory).catch(() => []);
|
||||
const match = entries.find((entry) => entry.endsWith(".json"));
|
||||
if (match) return match;
|
||||
await new Promise((resolve) => setTimeout(resolve, 10));
|
||||
}
|
||||
throw new Error(`Timed out waiting for a JSON file in ${directory}.`);
|
||||
}
|
||||
|
||||
afterEach(async () => {
|
||||
while (cleanupFns.length > 0) {
|
||||
const cleanup = cleanupFns.pop();
|
||||
if (!cleanup) continue;
|
||||
await cleanup().catch(() => undefined);
|
||||
}
|
||||
while (cleanupDirs.length > 0) {
|
||||
const dir = cleanupDirs.pop();
|
||||
if (!dir) continue;
|
||||
await rm(dir, { recursive: true, force: true }).catch(() => undefined);
|
||||
}
|
||||
});
|
||||
|
||||
it("round-trips localhost bridge requests over the sandbox queue without forwarding the bridge token", async () => {
|
||||
const rootDir = await mkdtemp(path.join(os.tmpdir(), "paperclip-bridge-runtime-"));
|
||||
cleanupDirs.push(rootDir);
|
||||
|
||||
const localWorkspaceDir = path.join(rootDir, "local-workspace");
|
||||
const remoteWorkspaceDir = path.join(rootDir, "remote-workspace");
|
||||
await mkdir(localWorkspaceDir, { recursive: true });
|
||||
await mkdir(remoteWorkspaceDir, { recursive: true });
|
||||
await writeFile(path.join(localWorkspaceDir, "README.md"), "bridge test\n", "utf8");
|
||||
|
||||
const runner = createExecRunner();
|
||||
|
||||
const bridgeAsset = await createSandboxCallbackBridgeAsset();
|
||||
cleanupFns.push(bridgeAsset.cleanup);
|
||||
|
||||
const prepared = await prepareCommandManagedRuntime({
|
||||
runner,
|
||||
spec: {
|
||||
remoteCwd: remoteWorkspaceDir,
|
||||
timeoutMs: 30_000,
|
||||
},
|
||||
adapterKey: "codex",
|
||||
workspaceLocalDir: localWorkspaceDir,
|
||||
assets: [
|
||||
{
|
||||
key: "bridge",
|
||||
localDir: bridgeAsset.localDir,
|
||||
},
|
||||
],
|
||||
});
|
||||
|
||||
const queueDir = path.posix.join(prepared.runtimeRootDir, "paperclip-bridge");
|
||||
const directories = sandboxCallbackBridgeDirectories(queueDir);
|
||||
const bridgeToken = createSandboxCallbackBridgeToken();
|
||||
const seenRequests: Array<{
|
||||
method: string;
|
||||
path: string;
|
||||
query: string;
|
||||
headers: Record<string, string>;
|
||||
body: string;
|
||||
}> = [];
|
||||
|
||||
const worker = await startSandboxCallbackBridgeWorker({
|
||||
client: createFileSystemSandboxCallbackBridgeQueueClient(),
|
||||
queueDir,
|
||||
authorizeRequest: async (request) =>
|
||||
request.path === "/api/agents/me" ? null : `Route not allowed: ${request.method} ${request.path}`,
|
||||
handleRequest: async (request) => {
|
||||
seenRequests.push({
|
||||
method: request.method,
|
||||
path: request.path,
|
||||
query: request.query,
|
||||
headers: request.headers,
|
||||
body: request.body,
|
||||
});
|
||||
return {
|
||||
status: 200,
|
||||
headers: {
|
||||
"content-type": "application/json",
|
||||
etag: '"bridge-rev-1"',
|
||||
"last-modified": "Tue, 01 Apr 2025 00:00:00 GMT",
|
||||
},
|
||||
body: JSON.stringify({
|
||||
ok: true,
|
||||
method: request.method,
|
||||
path: request.path,
|
||||
}),
|
||||
};
|
||||
},
|
||||
});
|
||||
cleanupFns.push(async () => {
|
||||
await worker.stop();
|
||||
});
|
||||
|
||||
const bridge = await startSandboxCallbackBridgeServer({
|
||||
runner,
|
||||
remoteCwd: remoteWorkspaceDir,
|
||||
assetRemoteDir: prepared.assetDirs.bridge,
|
||||
queueDir,
|
||||
bridgeToken,
|
||||
timeoutMs: 30_000,
|
||||
});
|
||||
cleanupFns.push(async () => {
|
||||
await bridge.stop();
|
||||
});
|
||||
|
||||
const okResponse = await fetch(`${bridge.baseUrl}/api/agents/me?view=compact`, {
|
||||
headers: {
|
||||
authorization: `Bearer ${bridgeToken}`,
|
||||
accept: "application/json",
|
||||
"if-none-match": '"client-cache-key"',
|
||||
"x-paperclip-run-id": "run-bridge-1",
|
||||
"x-bridge-debug": "drop-me",
|
||||
},
|
||||
});
|
||||
expect(okResponse.status).toBe(200);
|
||||
expect(okResponse.headers.get("content-type")).toContain("application/json");
|
||||
expect(okResponse.headers.get("etag")).toBe('"bridge-rev-1"');
|
||||
expect(okResponse.headers.get("last-modified")).toBe("Tue, 01 Apr 2025 00:00:00 GMT");
|
||||
await expect(okResponse.json()).resolves.toMatchObject({
|
||||
ok: true,
|
||||
method: "GET",
|
||||
path: "/api/agents/me",
|
||||
});
|
||||
|
||||
const deniedResponse = await fetch(`${bridge.baseUrl}/api/issues/issue-1`, {
|
||||
method: "PATCH",
|
||||
headers: {
|
||||
authorization: `Bearer ${bridgeToken}`,
|
||||
"content-type": "application/json",
|
||||
},
|
||||
body: JSON.stringify({ status: "in_progress" }),
|
||||
});
|
||||
expect(deniedResponse.status).toBe(403);
|
||||
await expect(deniedResponse.json()).resolves.toMatchObject({
|
||||
error: "Route not allowed: PATCH /api/issues/issue-1",
|
||||
});
|
||||
|
||||
const unauthorizedResponse = await fetch(`${bridge.baseUrl}/api/agents/me`, {
|
||||
headers: {
|
||||
authorization: "Bearer wrong-token",
|
||||
},
|
||||
});
|
||||
expect(unauthorizedResponse.status).toBe(401);
|
||||
await expect(unauthorizedResponse.json()).resolves.toMatchObject({
|
||||
error: "Invalid bridge token.",
|
||||
});
|
||||
|
||||
expect(seenRequests).toHaveLength(1);
|
||||
expect(seenRequests[0]).toMatchObject({
|
||||
method: "GET",
|
||||
path: "/api/agents/me",
|
||||
query: "?view=compact",
|
||||
body: "",
|
||||
headers: {
|
||||
accept: "application/json",
|
||||
"if-none-match": '"client-cache-key"',
|
||||
},
|
||||
});
|
||||
expect(seenRequests[0]?.headers.authorization).toBeUndefined();
|
||||
expect(seenRequests[0]?.headers["x-paperclip-run-id"]).toBeUndefined();
|
||||
|
||||
});
|
||||
|
||||
it("denies non-allowlisted requests by default", async () => {
|
||||
const rootDir = await mkdtemp(path.join(os.tmpdir(), "paperclip-bridge-default-policy-"));
|
||||
cleanupDirs.push(rootDir);
|
||||
|
||||
const queueDir = path.posix.join(rootDir, "queue");
|
||||
const directories = sandboxCallbackBridgeDirectories(queueDir);
|
||||
let handled = 0;
|
||||
|
||||
const worker = await startSandboxCallbackBridgeWorker({
|
||||
client: createFileSystemSandboxCallbackBridgeQueueClient(),
|
||||
queueDir,
|
||||
handleRequest: async () => {
|
||||
handled += 1;
|
||||
return {
|
||||
status: 200,
|
||||
body: "should not happen",
|
||||
};
|
||||
},
|
||||
});
|
||||
|
||||
await writeFile(
|
||||
path.posix.join(directories.requestsDir, "req-1.json"),
|
||||
`${JSON.stringify({
|
||||
id: "req-1",
|
||||
method: "DELETE",
|
||||
path: "/api/secrets",
|
||||
query: "",
|
||||
headers: {},
|
||||
body: "",
|
||||
createdAt: new Date().toISOString(),
|
||||
})}\n`,
|
||||
"utf8",
|
||||
);
|
||||
|
||||
await worker.stop({ drainTimeoutMs: 1_000 });
|
||||
|
||||
const response = JSON.parse(
|
||||
await readFile(path.posix.join(directories.responsesDir, "req-1.json"), "utf8"),
|
||||
) as { status: number; body: string };
|
||||
expect(handled).toBe(0);
|
||||
expect(response.status).toBe(403);
|
||||
expect(JSON.parse(response.body)).toEqual({
|
||||
error: "Route not allowed: DELETE /api/secrets",
|
||||
});
|
||||
});
|
||||
|
||||
it("drains already-queued requests on stop", async () => {
|
||||
const rootDir = await mkdtemp(path.join(os.tmpdir(), "paperclip-bridge-drain-"));
|
||||
cleanupDirs.push(rootDir);
|
||||
|
||||
const queueDir = path.posix.join(rootDir, "queue");
|
||||
const directories = sandboxCallbackBridgeDirectories(queueDir);
|
||||
const processed: string[] = [];
|
||||
|
||||
const worker = await startSandboxCallbackBridgeWorker({
|
||||
client: createFileSystemSandboxCallbackBridgeQueueClient(),
|
||||
queueDir,
|
||||
authorizeRequest: async () => null,
|
||||
handleRequest: async (request) => {
|
||||
processed.push(request.id);
|
||||
await new Promise((resolve) => setTimeout(resolve, 25));
|
||||
return {
|
||||
status: 200,
|
||||
body: request.id,
|
||||
};
|
||||
},
|
||||
});
|
||||
|
||||
await writeFile(
|
||||
path.posix.join(directories.requestsDir, "req-a.json"),
|
||||
`${JSON.stringify({
|
||||
id: "req-a",
|
||||
method: "GET",
|
||||
path: "/api/agents/me",
|
||||
query: "",
|
||||
headers: {},
|
||||
body: "",
|
||||
createdAt: new Date().toISOString(),
|
||||
})}\n`,
|
||||
"utf8",
|
||||
);
|
||||
await writeFile(
|
||||
path.posix.join(directories.requestsDir, "req-b.json"),
|
||||
`${JSON.stringify({
|
||||
id: "req-b",
|
||||
method: "GET",
|
||||
path: "/api/agents/me",
|
||||
query: "",
|
||||
headers: {},
|
||||
body: "",
|
||||
createdAt: new Date().toISOString(),
|
||||
})}\n`,
|
||||
"utf8",
|
||||
);
|
||||
|
||||
await worker.stop({ drainTimeoutMs: 1_000 });
|
||||
|
||||
expect(processed).toEqual(["req-a", "req-b"]);
|
||||
await expect(readFile(path.posix.join(directories.responsesDir, "req-a.json"), "utf8")).resolves.toContain("\"req-a\"");
|
||||
await expect(readFile(path.posix.join(directories.responsesDir, "req-b.json"), "utf8")).resolves.toContain("\"req-b\"");
|
||||
});
|
||||
|
||||
it("writes fast 503 responses for queued requests that miss the drain deadline", async () => {
|
||||
const rootDir = await mkdtemp(path.join(os.tmpdir(), "paperclip-bridge-drain-timeout-"));
|
||||
cleanupDirs.push(rootDir);
|
||||
|
||||
const queueDir = path.posix.join(rootDir, "queue");
|
||||
const directories = sandboxCallbackBridgeDirectories(queueDir);
|
||||
const processed: string[] = [];
|
||||
|
||||
const worker = await startSandboxCallbackBridgeWorker({
|
||||
client: createFileSystemSandboxCallbackBridgeQueueClient(),
|
||||
queueDir,
|
||||
authorizeRequest: async () => null,
|
||||
handleRequest: async (request) => {
|
||||
processed.push(request.id);
|
||||
await new Promise((resolve) => setTimeout(resolve, 100));
|
||||
return {
|
||||
status: 200,
|
||||
body: request.id,
|
||||
};
|
||||
},
|
||||
});
|
||||
|
||||
await writeFile(
|
||||
path.posix.join(directories.requestsDir, "req-a.json"),
|
||||
`${JSON.stringify({
|
||||
id: "req-a",
|
||||
method: "GET",
|
||||
path: "/api/agents/me",
|
||||
query: "",
|
||||
headers: {},
|
||||
body: "",
|
||||
createdAt: new Date().toISOString(),
|
||||
})}\n`,
|
||||
"utf8",
|
||||
);
|
||||
await writeFile(
|
||||
path.posix.join(directories.requestsDir, "req-b.json"),
|
||||
`${JSON.stringify({
|
||||
id: "req-b",
|
||||
method: "GET",
|
||||
path: "/api/agents/me",
|
||||
query: "",
|
||||
headers: {},
|
||||
body: "",
|
||||
createdAt: new Date().toISOString(),
|
||||
})}\n`,
|
||||
"utf8",
|
||||
);
|
||||
|
||||
for (let attempt = 0; attempt < 50 && processed.length === 0; attempt += 1) {
|
||||
await new Promise((resolve) => setTimeout(resolve, 5));
|
||||
}
|
||||
|
||||
await worker.stop({ drainTimeoutMs: 10 });
|
||||
|
||||
expect(processed).toEqual(["req-a"]);
|
||||
await expect(readFile(path.posix.join(directories.responsesDir, "req-a.json"), "utf8")).resolves.toContain("\"req-a\"");
|
||||
await expect(readFile(path.posix.join(directories.responsesDir, "req-b.json"), "utf8")).resolves.toContain(
|
||||
"Bridge worker stopped before request could be handled.",
|
||||
);
|
||||
});
|
||||
|
||||
it("rejects non-JSON request bodies and full queues at the bridge server", async () => {
|
||||
const rootDir = await mkdtemp(path.join(os.tmpdir(), "paperclip-bridge-server-guards-"));
|
||||
cleanupDirs.push(rootDir);
|
||||
|
||||
const localWorkspaceDir = path.join(rootDir, "local-workspace");
|
||||
const remoteWorkspaceDir = path.join(rootDir, "remote-workspace");
|
||||
await mkdir(localWorkspaceDir, { recursive: true });
|
||||
await mkdir(remoteWorkspaceDir, { recursive: true });
|
||||
await writeFile(path.join(localWorkspaceDir, "README.md"), "bridge guard test\n", "utf8");
|
||||
|
||||
const runner = createExecRunner();
|
||||
|
||||
const bridgeAsset = await createSandboxCallbackBridgeAsset();
|
||||
cleanupFns.push(bridgeAsset.cleanup);
|
||||
const prepared = await prepareCommandManagedRuntime({
|
||||
runner,
|
||||
spec: {
|
||||
remoteCwd: remoteWorkspaceDir,
|
||||
timeoutMs: 30_000,
|
||||
},
|
||||
adapterKey: "codex",
|
||||
workspaceLocalDir: localWorkspaceDir,
|
||||
assets: [{ key: "bridge", localDir: bridgeAsset.localDir }],
|
||||
});
|
||||
|
||||
const queueDir = path.posix.join(prepared.runtimeRootDir, "paperclip-bridge");
|
||||
const directories = sandboxCallbackBridgeDirectories(queueDir);
|
||||
const bridgeToken = createSandboxCallbackBridgeToken();
|
||||
|
||||
const bridge = await startSandboxCallbackBridgeServer({
|
||||
runner,
|
||||
remoteCwd: remoteWorkspaceDir,
|
||||
assetRemoteDir: prepared.assetDirs.bridge,
|
||||
queueDir,
|
||||
bridgeToken,
|
||||
timeoutMs: 30_000,
|
||||
maxQueueDepth: 1,
|
||||
});
|
||||
cleanupFns.push(async () => {
|
||||
await bridge.stop();
|
||||
});
|
||||
|
||||
await writeFile(
|
||||
path.posix.join(directories.requestsDir, "existing.json"),
|
||||
`${JSON.stringify({
|
||||
id: "existing",
|
||||
method: "GET",
|
||||
path: "/api/agents/me",
|
||||
query: "",
|
||||
headers: {},
|
||||
body: "",
|
||||
createdAt: new Date().toISOString(),
|
||||
})}\n`,
|
||||
"utf8",
|
||||
);
|
||||
|
||||
const queueFullResponse = await fetch(`${bridge.baseUrl}/api/agents/me`, {
|
||||
headers: {
|
||||
authorization: `Bearer ${bridgeToken}`,
|
||||
},
|
||||
});
|
||||
expect(queueFullResponse.status).toBe(503);
|
||||
await expect(queueFullResponse.json()).resolves.toEqual({
|
||||
error: "Bridge request queue is full.",
|
||||
});
|
||||
|
||||
await rm(path.posix.join(directories.requestsDir, "existing.json"), { force: true });
|
||||
|
||||
const nonJsonResponse = await fetch(`${bridge.baseUrl}/api/issues/issue-1/comments`, {
|
||||
method: "POST",
|
||||
headers: {
|
||||
authorization: `Bearer ${bridgeToken}`,
|
||||
"content-type": "text/plain",
|
||||
},
|
||||
body: "not json",
|
||||
});
|
||||
expect(nonJsonResponse.status).toBe(415);
|
||||
await expect(nonJsonResponse.json()).resolves.toEqual({
|
||||
error: "Bridge only accepts JSON request bodies.",
|
||||
});
|
||||
});
|
||||
|
||||
it("returns a 502 when the host response times out", async () => {
|
||||
const rootDir = await mkdtemp(path.join(os.tmpdir(), "paperclip-bridge-timeout-"));
|
||||
cleanupDirs.push(rootDir);
|
||||
|
||||
const localWorkspaceDir = path.join(rootDir, "local-workspace");
|
||||
const remoteWorkspaceDir = path.join(rootDir, "remote-workspace");
|
||||
await mkdir(localWorkspaceDir, { recursive: true });
|
||||
await mkdir(remoteWorkspaceDir, { recursive: true });
|
||||
await writeFile(path.join(localWorkspaceDir, "README.md"), "bridge timeout test\n", "utf8");
|
||||
|
||||
const runner = createExecRunner();
|
||||
const bridgeAsset = await createSandboxCallbackBridgeAsset();
|
||||
cleanupFns.push(bridgeAsset.cleanup);
|
||||
const prepared = await prepareCommandManagedRuntime({
|
||||
runner,
|
||||
spec: {
|
||||
remoteCwd: remoteWorkspaceDir,
|
||||
timeoutMs: 30_000,
|
||||
},
|
||||
adapterKey: "codex",
|
||||
workspaceLocalDir: localWorkspaceDir,
|
||||
assets: [{ key: "bridge", localDir: bridgeAsset.localDir }],
|
||||
});
|
||||
|
||||
const queueDir = path.posix.join(prepared.runtimeRootDir, "paperclip-bridge");
|
||||
const bridgeToken = createSandboxCallbackBridgeToken();
|
||||
const bridge = await startSandboxCallbackBridgeServer({
|
||||
runner,
|
||||
remoteCwd: remoteWorkspaceDir,
|
||||
assetRemoteDir: prepared.assetDirs.bridge,
|
||||
queueDir,
|
||||
bridgeToken,
|
||||
timeoutMs: 30_000,
|
||||
pollIntervalMs: 10,
|
||||
responseTimeoutMs: 75,
|
||||
});
|
||||
cleanupFns.push(async () => {
|
||||
await bridge.stop();
|
||||
});
|
||||
|
||||
const response = await fetch(`${bridge.baseUrl}/api/agents/me`, {
|
||||
headers: {
|
||||
authorization: `Bearer ${bridgeToken}`,
|
||||
},
|
||||
});
|
||||
|
||||
expect(response.status).toBe(502);
|
||||
await expect(response.json()).resolves.toEqual({
|
||||
error: "Timed out waiting for host bridge response.",
|
||||
});
|
||||
});
|
||||
|
||||
it("returns a 502 for malformed host response files", async () => {
|
||||
const rootDir = await mkdtemp(path.join(os.tmpdir(), "paperclip-bridge-malformed-response-"));
|
||||
cleanupDirs.push(rootDir);
|
||||
|
||||
const localWorkspaceDir = path.join(rootDir, "local-workspace");
|
||||
const remoteWorkspaceDir = path.join(rootDir, "remote-workspace");
|
||||
await mkdir(localWorkspaceDir, { recursive: true });
|
||||
await mkdir(remoteWorkspaceDir, { recursive: true });
|
||||
await writeFile(path.join(localWorkspaceDir, "README.md"), "bridge malformed response test\n", "utf8");
|
||||
|
||||
const runner = createExecRunner();
|
||||
const bridgeAsset = await createSandboxCallbackBridgeAsset();
|
||||
cleanupFns.push(bridgeAsset.cleanup);
|
||||
const prepared = await prepareCommandManagedRuntime({
|
||||
runner,
|
||||
spec: {
|
||||
remoteCwd: remoteWorkspaceDir,
|
||||
timeoutMs: 30_000,
|
||||
},
|
||||
adapterKey: "codex",
|
||||
workspaceLocalDir: localWorkspaceDir,
|
||||
assets: [{ key: "bridge", localDir: bridgeAsset.localDir }],
|
||||
});
|
||||
|
||||
const queueDir = path.posix.join(prepared.runtimeRootDir, "paperclip-bridge");
|
||||
const directories = sandboxCallbackBridgeDirectories(queueDir);
|
||||
const bridgeToken = createSandboxCallbackBridgeToken();
|
||||
const bridge = await startSandboxCallbackBridgeServer({
|
||||
runner,
|
||||
remoteCwd: remoteWorkspaceDir,
|
||||
assetRemoteDir: prepared.assetDirs.bridge,
|
||||
queueDir,
|
||||
bridgeToken,
|
||||
timeoutMs: 30_000,
|
||||
pollIntervalMs: 10,
|
||||
responseTimeoutMs: 1_000,
|
||||
});
|
||||
cleanupFns.push(async () => {
|
||||
await bridge.stop();
|
||||
});
|
||||
|
||||
const responsePromise = fetch(`${bridge.baseUrl}/api/agents/me`, {
|
||||
headers: {
|
||||
authorization: `Bearer ${bridgeToken}`,
|
||||
},
|
||||
});
|
||||
|
||||
const requestFile = await waitForJsonFile(directories.requestsDir);
|
||||
await writeFile(
|
||||
path.posix.join(directories.responsesDir, requestFile),
|
||||
'{"status":200,"headers":{"content-type":"application/json"},"body"',
|
||||
"utf8",
|
||||
);
|
||||
|
||||
const response = await responsePromise;
|
||||
expect(response.status).toBe(502);
|
||||
await expect(response.json()).resolves.toMatchObject({
|
||||
error: expect.stringMatching(/JSON|Unexpected|Unterminated/i),
|
||||
});
|
||||
});
|
||||
});
|
||||
831
packages/adapter-utils/src/sandbox-callback-bridge.ts
Normal file
@@ -0,0 +1,831 @@
|
||||
import { randomBytes, randomUUID } from "node:crypto";
|
||||
import { promises as fs } from "node:fs";
|
||||
import os from "node:os";
|
||||
import path from "node:path";
|
||||
|
||||
import type { CommandManagedRuntimeRunner } from "./command-managed-runtime.js";
|
||||
import { preferredShellForSandbox } from "./sandbox-shell.js";
|
||||
import type { RunProcessResult } from "./server-utils.js";
|
||||
|
||||
const DEFAULT_BRIDGE_TOKEN_BYTES = 24;
|
||||
const DEFAULT_BRIDGE_POLL_INTERVAL_MS = 100;
|
||||
const DEFAULT_BRIDGE_RESPONSE_TIMEOUT_MS = 30_000;
|
||||
const DEFAULT_BRIDGE_STOP_TIMEOUT_MS = 2_000;
|
||||
const DEFAULT_BRIDGE_MAX_QUEUE_DEPTH = 64;
|
||||
const DEFAULT_BRIDGE_MAX_BODY_BYTES = 256 * 1024;
|
||||
const REMOTE_WRITE_BASE64_CHUNK_SIZE = 32 * 1024;
|
||||
const SANDBOX_CALLBACK_BRIDGE_ENTRYPOINT = "paperclip-bridge-server.mjs";
|
||||
|
||||
export const DEFAULT_SANDBOX_CALLBACK_BRIDGE_MAX_BODY_BYTES = DEFAULT_BRIDGE_MAX_BODY_BYTES;
|
||||
|
||||
export interface SandboxCallbackBridgeRouteRule {
|
||||
method: string;
|
||||
path: RegExp;
|
||||
}
|
||||
|
||||
export const DEFAULT_SANDBOX_CALLBACK_BRIDGE_ROUTE_ALLOWLIST: readonly SandboxCallbackBridgeRouteRule[] = [
|
||||
{ method: "GET", path: /^\/api\/agents\/me$/ },
|
||||
{ method: "GET", path: /^\/api\/issues\/[^/]+\/heartbeat-context$/ },
|
||||
{ method: "GET", path: /^\/api\/issues\/[^/]+\/comments(?:\/[^/]+)?$/ },
|
||||
{ method: "GET", path: /^\/api\/issues\/[^/]+\/documents(?:\/[^/]+)?$/ },
|
||||
{ method: "POST", path: /^\/api\/issues\/[^/]+\/checkout$/ },
|
||||
{ method: "POST", path: /^\/api\/issues\/[^/]+\/comments$/ },
|
||||
{ method: "POST", path: /^\/api\/issues\/[^/]+\/interactions(?:\/[^/]+)?$/ },
|
||||
{ method: "PATCH", path: /^\/api\/issues\/[^/]+$/ },
|
||||
] as const;
|
||||
|
||||
export const DEFAULT_SANDBOX_CALLBACK_BRIDGE_HEADER_ALLOWLIST = [
|
||||
"accept",
|
||||
"content-type",
|
||||
"if-match",
|
||||
"if-none-match",
|
||||
] as const;
|
||||
|
||||
export interface SandboxCallbackBridgeRequest {
|
||||
id: string;
|
||||
method: string;
|
||||
path: string;
|
||||
query: string;
|
||||
headers: Record<string, string>;
|
||||
/**
|
||||
* UTF-8 body contents. The bridge rejects non-JSON request bodies; binary
|
||||
* payloads are intentionally out of scope for this queue protocol.
|
||||
*/
|
||||
body: string;
|
||||
createdAt: string;
|
||||
}
|
||||
|
||||
export interface SandboxCallbackBridgeResponse {
|
||||
id: string;
|
||||
status: number;
|
||||
headers: Record<string, string>;
|
||||
body: string;
|
||||
completedAt: string;
|
||||
}
|
||||
|
||||
export interface SandboxCallbackBridgeAsset {
|
||||
localDir: string;
|
||||
entrypoint: string;
|
||||
cleanup(): Promise<void>;
|
||||
}
|
||||
|
||||
export interface SandboxCallbackBridgeDirectories {
|
||||
rootDir: string;
|
||||
requestsDir: string;
|
||||
responsesDir: string;
|
||||
logsDir: string;
|
||||
readyFile: string;
|
||||
pidFile: string;
|
||||
logFile: string;
|
||||
}
|
||||
|
||||
export interface SandboxCallbackBridgeQueueClient {
|
||||
makeDir(remotePath: string): Promise<void>;
|
||||
listJsonFiles(remotePath: string): Promise<string[]>;
|
||||
readTextFile(remotePath: string): Promise<string>;
|
||||
writeTextFile(remotePath: string, body: string): Promise<void>;
|
||||
rename(fromPath: string, toPath: string): Promise<void>;
|
||||
remove(remotePath: string): Promise<void>;
|
||||
}
|
||||
|
||||
export interface SandboxCallbackBridgeWorkerHandle {
|
||||
stop(options?: { drainTimeoutMs?: number }): Promise<void>;
|
||||
}
|
||||
|
||||
export interface StartedSandboxCallbackBridgeServer {
|
||||
baseUrl: string;
|
||||
host: string;
|
||||
port: number;
|
||||
pid: number;
|
||||
directories: SandboxCallbackBridgeDirectories;
|
||||
stop(): Promise<void>;
|
||||
}
|
||||
|
||||
function shellQuote(value: string) {
|
||||
return `'${value.replace(/'/g, `'"'"'`)}'`;
|
||||
}
|
||||
|
||||
function normalizeMethod(value: string | null | undefined): string {
|
||||
return typeof value === "string" && value.trim().length > 0 ? value.trim().toUpperCase() : "GET";
|
||||
}
|
||||
|
||||
function normalizeTimeoutMs(value: number | null | undefined, fallback: number): number {
|
||||
return typeof value === "number" && Number.isFinite(value) && value > 0 ? Math.trunc(value) : fallback;
|
||||
}
|
||||
|
||||
function toBuffer(bytes: Buffer | Uint8Array | ArrayBuffer): Buffer {
|
||||
if (Buffer.isBuffer(bytes)) return bytes;
|
||||
if (bytes instanceof ArrayBuffer) return Buffer.from(bytes);
|
||||
return Buffer.from(bytes.buffer, bytes.byteOffset, bytes.byteLength);
|
||||
}
|
||||
|
||||
function buildRunnerFailureMessage(action: string, result: RunProcessResult): string {
|
||||
const stderr = result.stderr.trim();
|
||||
const stdout = result.stdout.trim();
|
||||
const detail = stderr || stdout;
|
||||
if (result.timedOut) {
|
||||
return `${action} timed out${detail ? `: ${detail}` : ""}`;
|
||||
}
|
||||
return `${action} failed with exit code ${result.exitCode ?? "null"}${detail ? `: ${detail}` : ""}`;
|
||||
}
|
||||
|
||||
async function runShell(
|
||||
runner: CommandManagedRuntimeRunner,
|
||||
cwd: string,
|
||||
script: string,
|
||||
timeoutMs: number,
|
||||
shellCommand: "bash" | "sh" = "sh",
|
||||
): Promise<RunProcessResult> {
|
||||
return await runner.execute({
|
||||
command: shellCommand,
|
||||
args: ["-lc", script],
|
||||
cwd,
|
||||
timeoutMs,
|
||||
});
|
||||
}
|
||||
|
||||
function requireSuccessfulResult(action: string, result: RunProcessResult): RunProcessResult {
|
||||
if (!result.timedOut && result.exitCode === 0) return result;
|
||||
throw new Error(buildRunnerFailureMessage(action, result));
|
||||
}
|
||||
|
||||
function base64Chunks(body: string): string[] {
|
||||
const out: string[] = [];
|
||||
for (let offset = 0; offset < body.length; offset += REMOTE_WRITE_BASE64_CHUNK_SIZE) {
|
||||
out.push(body.slice(offset, offset + REMOTE_WRITE_BASE64_CHUNK_SIZE));
|
||||
}
|
||||
return out;
|
||||
}
|
||||
|
||||
export function createSandboxCallbackBridgeToken(bytes = DEFAULT_BRIDGE_TOKEN_BYTES): string {
|
||||
return randomBytes(bytes).toString("base64url");
|
||||
}
|
||||
|
||||
export function authorizeSandboxCallbackBridgeRequestWithRoutes(
|
||||
request: Pick<SandboxCallbackBridgeRequest, "method" | "path">,
|
||||
routes: readonly SandboxCallbackBridgeRouteRule[] = DEFAULT_SANDBOX_CALLBACK_BRIDGE_ROUTE_ALLOWLIST,
|
||||
): string | null {
|
||||
const method = normalizeMethod(request.method);
|
||||
return routes.some((route) => route.method === method && route.path.test(request.path))
|
||||
? null
|
||||
: `Route not allowed: ${method} ${request.path}`;
|
||||
}
|
||||
|
||||
export function sanitizeSandboxCallbackBridgeHeaders(
|
||||
headers: Record<string, string>,
|
||||
allowlist: readonly string[] = DEFAULT_SANDBOX_CALLBACK_BRIDGE_HEADER_ALLOWLIST,
|
||||
): Record<string, string> {
|
||||
const allowed = new Set(allowlist.map((header) => header.toLowerCase()));
|
||||
return Object.fromEntries(
|
||||
Object.entries(headers).filter(([key]) => allowed.has(key.toLowerCase())),
|
||||
);
|
||||
}
|
||||
|
||||
export function sandboxCallbackBridgeDirectories(rootDir: string): SandboxCallbackBridgeDirectories {
|
||||
return {
|
||||
rootDir,
|
||||
requestsDir: path.posix.join(rootDir, "requests"),
|
||||
responsesDir: path.posix.join(rootDir, "responses"),
|
||||
logsDir: path.posix.join(rootDir, "logs"),
|
||||
readyFile: path.posix.join(rootDir, "ready.json"),
|
||||
pidFile: path.posix.join(rootDir, "server.pid"),
|
||||
logFile: path.posix.join(rootDir, "logs", "bridge.log"),
|
||||
};
|
||||
}
|
||||
|
||||
export function buildSandboxCallbackBridgeEnv(input: {
|
||||
queueDir: string;
|
||||
bridgeToken: string;
|
||||
host?: string;
|
||||
port?: number | null;
|
||||
pollIntervalMs?: number | null;
|
||||
responseTimeoutMs?: number | null;
|
||||
maxQueueDepth?: number | null;
|
||||
maxBodyBytes?: number | null;
|
||||
}): Record<string, string> {
|
||||
return {
|
||||
PAPERCLIP_API_BRIDGE_MODE: "queue_v1",
|
||||
PAPERCLIP_BRIDGE_QUEUE_DIR: input.queueDir,
|
||||
PAPERCLIP_BRIDGE_TOKEN: input.bridgeToken,
|
||||
PAPERCLIP_BRIDGE_HOST: input.host?.trim() || "127.0.0.1",
|
||||
PAPERCLIP_BRIDGE_PORT: String(input.port && input.port > 0 ? Math.trunc(input.port) : 0),
|
||||
PAPERCLIP_BRIDGE_POLL_INTERVAL_MS: String(
|
||||
normalizeTimeoutMs(input.pollIntervalMs, DEFAULT_BRIDGE_POLL_INTERVAL_MS),
|
||||
),
|
||||
PAPERCLIP_BRIDGE_RESPONSE_TIMEOUT_MS: String(
|
||||
normalizeTimeoutMs(input.responseTimeoutMs, DEFAULT_BRIDGE_RESPONSE_TIMEOUT_MS),
|
||||
),
|
||||
PAPERCLIP_BRIDGE_MAX_QUEUE_DEPTH: String(
|
||||
normalizeTimeoutMs(input.maxQueueDepth, DEFAULT_BRIDGE_MAX_QUEUE_DEPTH),
|
||||
),
|
||||
PAPERCLIP_BRIDGE_MAX_BODY_BYTES: String(
|
||||
normalizeTimeoutMs(input.maxBodyBytes, DEFAULT_BRIDGE_MAX_BODY_BYTES),
|
||||
),
|
||||
};
|
||||
}
|
||||
|
||||
export async function createSandboxCallbackBridgeAsset(): Promise<SandboxCallbackBridgeAsset> {
|
||||
const localDir = await fs.mkdtemp(path.join(os.tmpdir(), "paperclip-bridge-asset-"));
|
||||
const entrypoint = path.join(localDir, SANDBOX_CALLBACK_BRIDGE_ENTRYPOINT);
|
||||
await fs.writeFile(entrypoint, getSandboxCallbackBridgeServerSource(), "utf8");
|
||||
return {
|
||||
localDir,
|
||||
entrypoint,
|
||||
cleanup: async () => {
|
||||
await fs.rm(localDir, { recursive: true, force: true }).catch(() => undefined);
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
export function createFileSystemSandboxCallbackBridgeQueueClient(): SandboxCallbackBridgeQueueClient {
|
||||
return {
|
||||
makeDir: async (remotePath) => {
|
||||
await fs.mkdir(remotePath, { recursive: true });
|
||||
},
|
||||
listJsonFiles: async (remotePath) => {
|
||||
const entries = await fs.readdir(remotePath, { withFileTypes: true }).catch(() => []);
|
||||
return entries
|
||||
.filter((entry) => entry.isFile() && entry.name.endsWith(".json"))
|
||||
.map((entry) => entry.name)
|
||||
.sort((left, right) => left.localeCompare(right));
|
||||
},
|
||||
readTextFile: async (remotePath) => await fs.readFile(remotePath, "utf8"),
|
||||
writeTextFile: async (remotePath, body) => {
|
||||
await fs.mkdir(path.posix.dirname(remotePath), { recursive: true });
|
||||
await fs.writeFile(remotePath, body, "utf8");
|
||||
},
|
||||
rename: async (fromPath, toPath) => {
|
||||
await fs.mkdir(path.posix.dirname(toPath), { recursive: true });
|
||||
await fs.rename(fromPath, toPath);
|
||||
},
|
||||
remove: async (remotePath) => {
|
||||
await fs.rm(remotePath, { recursive: true, force: true }).catch(() => undefined);
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
export function createCommandManagedSandboxCallbackBridgeQueueClient(input: {
|
||||
runner: CommandManagedRuntimeRunner;
|
||||
remoteCwd: string;
|
||||
timeoutMs?: number | null;
|
||||
shellCommand?: "bash" | "sh" | null;
|
||||
}): SandboxCallbackBridgeQueueClient {
|
||||
const timeoutMs = normalizeTimeoutMs(input.timeoutMs, DEFAULT_BRIDGE_RESPONSE_TIMEOUT_MS);
|
||||
const shellCommand = preferredShellForSandbox(input.shellCommand);
|
||||
const runChecked = async (action: string, script: string) =>
|
||||
requireSuccessfulResult(action, await runShell(input.runner, input.remoteCwd, script, timeoutMs, shellCommand));
|
||||
|
||||
return {
|
||||
makeDir: async (remotePath) => {
|
||||
await runChecked(`mkdir ${remotePath}`, `mkdir -p ${shellQuote(remotePath)}`);
|
||||
},
|
||||
listJsonFiles: async (remotePath) => {
|
||||
const result = await runShell(
|
||||
input.runner,
|
||||
input.remoteCwd,
|
||||
[
|
||||
`if [ -d ${shellQuote(remotePath)} ]; then`,
|
||||
` for file in ${shellQuote(remotePath)}/*.json; do`,
|
||||
` [ -f "$file" ] || continue`,
|
||||
" basename \"$file\"",
|
||||
" done",
|
||||
"fi",
|
||||
].join("\n"),
|
||||
timeoutMs,
|
||||
shellCommand,
|
||||
);
|
||||
requireSuccessfulResult(`list ${remotePath}`, result);
|
||||
return result.stdout
|
||||
.split(/\r?\n/)
|
||||
.map((line) => line.trim())
|
||||
.filter((line) => line.length > 0)
|
||||
.sort((left, right) => left.localeCompare(right));
|
||||
},
|
||||
readTextFile: async (remotePath) => {
|
||||
const result = await runChecked(`read ${remotePath}`, `base64 < ${shellQuote(remotePath)}`);
|
||||
return Buffer.from(result.stdout.replace(/\s+/g, ""), "base64").toString("utf8");
|
||||
},
|
||||
writeTextFile: async (remotePath, body) => {
|
||||
const remoteDir = path.posix.dirname(remotePath);
|
||||
const tempPath = `${remotePath}.paperclip-upload.b64`;
|
||||
await runChecked(
|
||||
`prepare upload ${remotePath}`,
|
||||
`mkdir -p ${shellQuote(remoteDir)} && rm -f ${shellQuote(tempPath)} && : > ${shellQuote(tempPath)}`,
|
||||
);
|
||||
const base64Body = toBuffer(Buffer.from(body, "utf8")).toString("base64");
|
||||
for (const chunk of base64Chunks(base64Body)) {
|
||||
await runChecked(
|
||||
`append upload chunk ${remotePath}`,
|
||||
`printf '%s' ${shellQuote(chunk)} >> ${shellQuote(tempPath)}`,
|
||||
);
|
||||
}
|
||||
await runChecked(
|
||||
`finalize upload ${remotePath}`,
|
||||
`base64 -d < ${shellQuote(tempPath)} > ${shellQuote(remotePath)} && rm -f ${shellQuote(tempPath)}`,
|
||||
);
|
||||
},
|
||||
rename: async (fromPath, toPath) => {
|
||||
await runChecked(
|
||||
`rename ${fromPath}`,
|
||||
`mkdir -p ${shellQuote(path.posix.dirname(toPath))} && mv ${shellQuote(fromPath)} ${shellQuote(toPath)}`,
|
||||
);
|
||||
},
|
||||
remove: async (remotePath) => {
|
||||
await runChecked(`remove ${remotePath}`, `rm -rf ${shellQuote(remotePath)}`);
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
async function writeBridgeResponse(
|
||||
client: SandboxCallbackBridgeQueueClient,
|
||||
responsePath: string,
|
||||
response: SandboxCallbackBridgeResponse,
|
||||
) {
|
||||
const tempPath = `${responsePath}.tmp`;
|
||||
await client.writeTextFile(tempPath, `${JSON.stringify(response)}\n`);
|
||||
await client.rename(tempPath, responsePath);
|
||||
}
|
||||
|
||||
export async function startSandboxCallbackBridgeWorker(input: {
|
||||
client: SandboxCallbackBridgeQueueClient;
|
||||
queueDir: string;
|
||||
pollIntervalMs?: number | null;
|
||||
authorizeRequest?: (request: SandboxCallbackBridgeRequest) => string | null | Promise<string | null>;
|
||||
handleRequest: (request: SandboxCallbackBridgeRequest) => Promise<{
|
||||
status: number;
|
||||
headers?: Record<string, string>;
|
||||
body?: string;
|
||||
}>;
|
||||
maxBodyBytes?: number | null;
|
||||
}): Promise<SandboxCallbackBridgeWorkerHandle> {
|
||||
const pollIntervalMs = normalizeTimeoutMs(input.pollIntervalMs, DEFAULT_BRIDGE_POLL_INTERVAL_MS);
|
||||
const maxBodyBytes = normalizeTimeoutMs(input.maxBodyBytes, DEFAULT_BRIDGE_MAX_BODY_BYTES);
|
||||
const directories = sandboxCallbackBridgeDirectories(input.queueDir);
|
||||
await input.client.makeDir(directories.rootDir);
|
||||
await input.client.makeDir(directories.requestsDir);
|
||||
await input.client.makeDir(directories.responsesDir);
|
||||
await input.client.makeDir(directories.logsDir);
|
||||
|
||||
let stopping = false;
|
||||
let inFlight = 0;
|
||||
let settled = false;
|
||||
let stopDeadline = Number.POSITIVE_INFINITY;
|
||||
let settleResolve: (() => void) | null = null;
|
||||
const settledPromise = new Promise<void>((resolve) => {
|
||||
settleResolve = resolve;
|
||||
});
|
||||
const authorizeRequest = input.authorizeRequest ??
|
||||
((request: SandboxCallbackBridgeRequest) => authorizeSandboxCallbackBridgeRequestWithRoutes(request));
|
||||
|
||||
const processRequestFile = async (fileName: string) => {
|
||||
const requestPath = path.posix.join(directories.requestsDir, fileName);
|
||||
const responsePath = path.posix.join(directories.responsesDir, fileName);
|
||||
const raw = await input.client.readTextFile(requestPath);
|
||||
let request: SandboxCallbackBridgeRequest;
|
||||
try {
|
||||
request = JSON.parse(raw) as SandboxCallbackBridgeRequest;
|
||||
} catch {
|
||||
const requestId = fileName.replace(/\.json$/i, "") || randomUUID();
|
||||
await writeBridgeResponse(input.client, responsePath, {
|
||||
id: requestId,
|
||||
status: 400,
|
||||
headers: { "content-type": "application/json" },
|
||||
body: JSON.stringify({ error: "Invalid bridge request payload." }),
|
||||
completedAt: new Date().toISOString(),
|
||||
});
|
||||
await input.client.remove(requestPath);
|
||||
return;
|
||||
}
|
||||
|
||||
const denialReason = await authorizeRequest(request);
|
||||
if (denialReason) {
|
||||
await writeBridgeResponse(input.client, responsePath, {
|
||||
id: request.id,
|
||||
status: 403,
|
||||
headers: { "content-type": "application/json" },
|
||||
body: JSON.stringify({ error: denialReason }),
|
||||
completedAt: new Date().toISOString(),
|
||||
});
|
||||
await input.client.remove(requestPath);
|
||||
return;
|
||||
}
|
||||
|
||||
try {
|
||||
const result = await input.handleRequest(request);
|
||||
const responseBody = result.body ?? "";
|
||||
if (Buffer.byteLength(responseBody, "utf8") > maxBodyBytes) {
|
||||
throw new Error(`Bridge response body exceeded the configured size limit of ${maxBodyBytes} bytes.`);
|
||||
}
|
||||
await writeBridgeResponse(input.client, responsePath, {
|
||||
id: request.id,
|
||||
status: result.status,
|
||||
headers: result.headers ?? {},
|
||||
body: responseBody,
|
||||
completedAt: new Date().toISOString(),
|
||||
});
|
||||
} catch (error) {
|
||||
console.warn(
|
||||
`[paperclip] sandbox callback bridge handler failed for ${request.id}: ${error instanceof Error ? error.message : String(error)}`,
|
||||
);
|
||||
await writeBridgeResponse(input.client, responsePath, {
|
||||
id: request.id,
|
||||
status: 502,
|
||||
headers: { "content-type": "application/json" },
|
||||
body: JSON.stringify({
|
||||
error: error instanceof Error ? error.message : String(error),
|
||||
}),
|
||||
completedAt: new Date().toISOString(),
|
||||
});
|
||||
} finally {
|
||||
await input.client.remove(requestPath);
|
||||
}
|
||||
};
|
||||
|
||||
const failPendingRequests = async (message: string) => {
|
||||
const fileNames = await input.client.listJsonFiles(directories.requestsDir).catch(() => []);
|
||||
for (const fileName of fileNames) {
|
||||
const requestPath = path.posix.join(directories.requestsDir, fileName);
|
||||
const responsePath = path.posix.join(directories.responsesDir, fileName);
|
||||
const requestId = fileName.replace(/\.json$/i, "") || randomUUID();
|
||||
try {
|
||||
const raw = await input.client.readTextFile(requestPath);
|
||||
const parsed = JSON.parse(raw) as Partial<SandboxCallbackBridgeRequest>;
|
||||
await writeBridgeResponse(input.client, responsePath, {
|
||||
id: typeof parsed.id === "string" && parsed.id.length > 0 ? parsed.id : requestId,
|
||||
status: 503,
|
||||
headers: { "content-type": "application/json" },
|
||||
body: JSON.stringify({ error: message }),
|
||||
completedAt: new Date().toISOString(),
|
||||
});
|
||||
} catch (error) {
|
||||
console.warn(
|
||||
`[paperclip] sandbox callback bridge failed to abort pending request ${requestId}: ${error instanceof Error ? error.message : String(error)}`,
|
||||
);
|
||||
} finally {
|
||||
await input.client.remove(requestPath).catch(() => undefined);
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
const loop = (async () => {
|
||||
try {
|
||||
while (true) {
|
||||
const fileNames = await input.client.listJsonFiles(directories.requestsDir);
|
||||
if (fileNames.length === 0) {
|
||||
if (stopping) {
|
||||
break;
|
||||
}
|
||||
await new Promise((resolve) => setTimeout(resolve, pollIntervalMs));
|
||||
continue;
|
||||
}
|
||||
for (const fileName of fileNames) {
|
||||
if (stopping && Date.now() >= stopDeadline) break;
|
||||
inFlight += 1;
|
||||
try {
|
||||
await processRequestFile(fileName);
|
||||
} finally {
|
||||
inFlight -= 1;
|
||||
}
|
||||
}
|
||||
if (stopping && Date.now() >= stopDeadline) {
|
||||
break;
|
||||
}
|
||||
}
|
||||
} finally {
|
||||
settled = true;
|
||||
if (settleResolve) {
|
||||
settleResolve();
|
||||
}
|
||||
}
|
||||
})();
|
||||
|
||||
void loop;
|
||||
|
||||
return {
|
||||
stop: async (options = {}) => {
|
||||
stopping = true;
|
||||
const drainMs = normalizeTimeoutMs(options.drainTimeoutMs, DEFAULT_BRIDGE_STOP_TIMEOUT_MS);
|
||||
stopDeadline = Date.now() + drainMs;
|
||||
if (!settled) {
|
||||
await Promise.race([
|
||||
settledPromise,
|
||||
new Promise<void>((resolve) => setTimeout(resolve, drainMs)),
|
||||
]);
|
||||
}
|
||||
await failPendingRequests("Bridge worker stopped before request could be handled.");
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
export async function startSandboxCallbackBridgeServer(input: {
|
||||
runner: CommandManagedRuntimeRunner;
|
||||
remoteCwd: string;
|
||||
assetRemoteDir: string;
|
||||
queueDir: string;
|
||||
bridgeToken: string;
|
||||
bridgeAsset?: SandboxCallbackBridgeAsset | null;
|
||||
host?: string;
|
||||
port?: number | null;
|
||||
pollIntervalMs?: number | null;
|
||||
responseTimeoutMs?: number | null;
|
||||
timeoutMs?: number | null;
|
||||
nodeCommand?: string;
|
||||
shellCommand?: "bash" | "sh" | null;
|
||||
maxQueueDepth?: number | null;
|
||||
maxBodyBytes?: number | null;
|
||||
}): Promise<StartedSandboxCallbackBridgeServer> {
|
||||
const timeoutMs = normalizeTimeoutMs(input.timeoutMs, DEFAULT_BRIDGE_RESPONSE_TIMEOUT_MS);
|
||||
const shellCommand = preferredShellForSandbox(input.shellCommand);
|
||||
const directories = sandboxCallbackBridgeDirectories(input.queueDir);
|
||||
const remoteEntrypoint = path.posix.join(input.assetRemoteDir, SANDBOX_CALLBACK_BRIDGE_ENTRYPOINT);
|
||||
if (input.bridgeAsset) {
|
||||
const assetClient = createCommandManagedSandboxCallbackBridgeQueueClient({
|
||||
runner: input.runner,
|
||||
remoteCwd: input.remoteCwd,
|
||||
timeoutMs,
|
||||
shellCommand,
|
||||
});
|
||||
await assetClient.makeDir(input.assetRemoteDir);
|
||||
const entrypointSource = await fs.readFile(input.bridgeAsset.entrypoint, "utf8");
|
||||
await assetClient.writeTextFile(remoteEntrypoint, entrypointSource);
|
||||
}
|
||||
const env = buildSandboxCallbackBridgeEnv({
|
||||
queueDir: input.queueDir,
|
||||
bridgeToken: input.bridgeToken,
|
||||
host: input.host,
|
||||
port: input.port,
|
||||
pollIntervalMs: input.pollIntervalMs,
|
||||
responseTimeoutMs: input.responseTimeoutMs,
|
||||
maxQueueDepth: input.maxQueueDepth,
|
||||
maxBodyBytes: input.maxBodyBytes,
|
||||
});
|
||||
const nodeCommand = input.nodeCommand?.trim() || "node";
|
||||
const startResult = await input.runner.execute({
|
||||
command: shellCommand,
|
||||
args: [
|
||||
"-lc",
|
||||
[
|
||||
`mkdir -p ${shellQuote(directories.requestsDir)} ${shellQuote(directories.responsesDir)} ${shellQuote(directories.logsDir)}`,
|
||||
`rm -f ${shellQuote(directories.readyFile)} ${shellQuote(directories.pidFile)}`,
|
||||
`nohup env ${Object.entries(env).map(([key, value]) => `${key}=${shellQuote(value)}`).join(" ")} ` +
|
||||
`${shellQuote(nodeCommand)} ${shellQuote(remoteEntrypoint)} ` +
|
||||
`>> ${shellQuote(directories.logFile)} 2>&1 < /dev/null &`,
|
||||
"pid=$!",
|
||||
`printf '%s\\n' \"$pid\" > ${shellQuote(directories.pidFile)}`,
|
||||
"printf '{\"pid\":%s}\\n' \"$pid\"",
|
||||
].join("\n"),
|
||||
],
|
||||
cwd: input.remoteCwd,
|
||||
timeoutMs,
|
||||
});
|
||||
requireSuccessfulResult("start sandbox callback bridge", startResult);
|
||||
|
||||
const readyResult = await runShell(
|
||||
input.runner,
|
||||
input.remoteCwd,
|
||||
[
|
||||
"i=0",
|
||||
`while [ \"$i\" -lt 200 ]; do`,
|
||||
` if [ -s ${shellQuote(directories.readyFile)} ]; then`,
|
||||
` cat ${shellQuote(directories.readyFile)}`,
|
||||
" exit 0",
|
||||
" fi",
|
||||
` if [ -s ${shellQuote(directories.logFile)} ] && ! kill -0 \"$(cat ${shellQuote(directories.pidFile)} 2>/dev/null)\" 2>/dev/null; then`,
|
||||
` cat ${shellQuote(directories.logFile)} >&2`,
|
||||
" exit 1",
|
||||
" fi",
|
||||
" i=$((i + 1))",
|
||||
" sleep 0.05",
|
||||
"done",
|
||||
`echo "Timed out waiting for bridge readiness." >&2`,
|
||||
`if [ -s ${shellQuote(directories.logFile)} ]; then cat ${shellQuote(directories.logFile)} >&2; fi`,
|
||||
"exit 1",
|
||||
].join("\n"),
|
||||
timeoutMs,
|
||||
shellCommand,
|
||||
);
|
||||
requireSuccessfulResult("wait for sandbox callback bridge readiness", readyResult);
|
||||
|
||||
let readyData: { host?: string; port?: number; baseUrl?: string; pid?: number };
|
||||
try {
|
||||
readyData = JSON.parse(readyResult.stdout.trim()) as { host?: string; port?: number; baseUrl?: string; pid?: number };
|
||||
} catch (error) {
|
||||
throw new Error(
|
||||
`Sandbox callback bridge wrote invalid readiness JSON: ${error instanceof Error ? error.message : String(error)}`,
|
||||
);
|
||||
}
|
||||
|
||||
const host = typeof readyData.host === "string" && readyData.host.trim().length > 0
|
||||
? readyData.host.trim()
|
||||
: "127.0.0.1";
|
||||
const port = typeof readyData.port === "number" && Number.isFinite(readyData.port) ? readyData.port : 0;
|
||||
if (!port) {
|
||||
throw new Error("Sandbox callback bridge did not report a listening port.");
|
||||
}
|
||||
const baseUrl =
|
||||
typeof readyData.baseUrl === "string" && readyData.baseUrl.trim().length > 0
|
||||
? readyData.baseUrl.trim()
|
||||
: `http://${host}:${port}`;
|
||||
|
||||
return {
|
||||
baseUrl,
|
||||
host,
|
||||
port,
|
||||
pid: typeof readyData.pid === "number" && Number.isFinite(readyData.pid) ? readyData.pid : 0,
|
||||
directories,
|
||||
stop: async () => {
|
||||
const stopResult = await input.runner.execute({
|
||||
command: shellCommand,
|
||||
args: [
|
||||
"-lc",
|
||||
[
|
||||
`if [ -s ${shellQuote(directories.pidFile)} ]; then`,
|
||||
` pid="$(cat ${shellQuote(directories.pidFile)})"`,
|
||||
" kill \"$pid\" 2>/dev/null || true",
|
||||
" i=0",
|
||||
" while kill -0 \"$pid\" 2>/dev/null && [ \"$i\" -lt 40 ]; do",
|
||||
" i=$((i + 1))",
|
||||
" sleep 0.05",
|
||||
" done",
|
||||
"fi",
|
||||
`rm -f ${shellQuote(directories.pidFile)} ${shellQuote(directories.readyFile)}`,
|
||||
].join("\n"),
|
||||
],
|
||||
cwd: input.remoteCwd,
|
||||
timeoutMs,
|
||||
});
|
||||
if (stopResult.timedOut) {
|
||||
throw new Error(buildRunnerFailureMessage("stop sandbox callback bridge", stopResult));
|
||||
}
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
function getSandboxCallbackBridgeServerSource(): string {
|
||||
return `import { randomUUID, timingSafeEqual } from "node:crypto";
|
||||
import { createServer } from "node:http";
|
||||
import { promises as fs } from "node:fs";
|
||||
import path from "node:path";
|
||||
|
||||
const queueDir = process.env.PAPERCLIP_BRIDGE_QUEUE_DIR;
|
||||
const bridgeToken = process.env.PAPERCLIP_BRIDGE_TOKEN;
|
||||
const host = process.env.PAPERCLIP_BRIDGE_HOST || "127.0.0.1";
|
||||
const port = Number(process.env.PAPERCLIP_BRIDGE_PORT || "0");
|
||||
const pollIntervalMs = Number(process.env.PAPERCLIP_BRIDGE_POLL_INTERVAL_MS || "100");
|
||||
const responseTimeoutMs = Number(process.env.PAPERCLIP_BRIDGE_RESPONSE_TIMEOUT_MS || "30000");
|
||||
const maxQueueDepth = Number(process.env.PAPERCLIP_BRIDGE_MAX_QUEUE_DEPTH || "${DEFAULT_BRIDGE_MAX_QUEUE_DEPTH}");
|
||||
const maxBodyBytes = Number(process.env.PAPERCLIP_BRIDGE_MAX_BODY_BYTES || "${DEFAULT_BRIDGE_MAX_BODY_BYTES}");
|
||||
const allowedHeaders = new Set(${JSON.stringify([...DEFAULT_SANDBOX_CALLBACK_BRIDGE_HEADER_ALLOWLIST])});
|
||||
|
||||
if (!queueDir || !bridgeToken) {
|
||||
throw new Error("PAPERCLIP_BRIDGE_QUEUE_DIR and PAPERCLIP_BRIDGE_TOKEN are required.");
|
||||
}
|
||||
|
||||
const requestsDir = path.posix.join(queueDir, "requests");
|
||||
const responsesDir = path.posix.join(queueDir, "responses");
|
||||
const logsDir = path.posix.join(queueDir, "logs");
|
||||
const readyFile = path.posix.join(queueDir, "ready.json");
|
||||
|
||||
function sleep(ms) {
|
||||
return new Promise((resolve) => setTimeout(resolve, ms));
|
||||
}
|
||||
|
||||
function normalizeHeaders(headers) {
|
||||
const out = {};
|
||||
for (const [key, value] of Object.entries(headers)) {
|
||||
if (value == null) continue;
|
||||
const normalizedKey = key.toLowerCase();
|
||||
if (!allowedHeaders.has(normalizedKey)) {
|
||||
continue;
|
||||
}
|
||||
out[normalizedKey] = Array.isArray(value) ? value.join(", ") : String(value);
|
||||
}
|
||||
return out;
|
||||
}
|
||||
|
||||
async function readBody(req) {
|
||||
const chunks = [];
|
||||
let totalBytes = 0;
|
||||
for await (const chunk of req) {
|
||||
const nextChunk = Buffer.isBuffer(chunk) ? chunk : Buffer.from(chunk);
|
||||
chunks.push(nextChunk);
|
||||
totalBytes += nextChunk.byteLength;
|
||||
if (totalBytes > maxBodyBytes) {
|
||||
throw new Error("Bridge request body exceeded the configured size limit.");
|
||||
}
|
||||
}
|
||||
return Buffer.concat(chunks).toString("utf8");
|
||||
}
|
||||
|
||||
async function queueDepth() {
|
||||
const entries = await fs.readdir(requestsDir, { withFileTypes: true }).catch(() => []);
|
||||
return entries.filter((entry) => entry.isFile() && entry.name.endsWith(".json")).length;
|
||||
}
|
||||
|
||||
function tokensMatch(received) {
|
||||
const expected = Buffer.from(bridgeToken, "utf8");
|
||||
const actual = Buffer.from(typeof received === "string" ? received : "", "utf8");
|
||||
if (expected.length !== actual.length) return false;
|
||||
return timingSafeEqual(expected, actual);
|
||||
}
|
||||
|
||||
async function waitForResponse(requestId) {
|
||||
const responsePath = path.posix.join(responsesDir, \`\${requestId}.json\`);
|
||||
const deadline = Date.now() + responseTimeoutMs;
|
||||
while (Date.now() < deadline) {
|
||||
const body = await fs.readFile(responsePath, "utf8").catch(() => null);
|
||||
if (body != null) {
|
||||
await fs.rm(responsePath, { force: true }).catch(() => undefined);
|
||||
return JSON.parse(body);
|
||||
}
|
||||
await sleep(pollIntervalMs);
|
||||
}
|
||||
throw new Error("Timed out waiting for host bridge response.");
|
||||
}
|
||||
|
||||
const server = createServer(async (req, res) => {
|
||||
try {
|
||||
const auth = req.headers.authorization || "";
|
||||
const receivedToken = auth.startsWith("Bearer ") ? auth.slice("Bearer ".length) : "";
|
||||
if (!tokensMatch(receivedToken)) {
|
||||
res.statusCode = 401;
|
||||
res.setHeader("content-type", "application/json");
|
||||
res.end(JSON.stringify({ error: "Invalid bridge token." }));
|
||||
return;
|
||||
}
|
||||
|
||||
if (await queueDepth() >= maxQueueDepth) {
|
||||
res.statusCode = 503;
|
||||
res.setHeader("content-type", "application/json");
|
||||
res.end(JSON.stringify({ error: "Bridge request queue is full." }));
|
||||
return;
|
||||
}
|
||||
|
||||
const url = new URL(req.url || "/", "http://127.0.0.1");
|
||||
const contentType = typeof req.headers["content-type"] === "string" ? req.headers["content-type"] : "";
|
||||
if (req.method && req.method !== "GET" && req.method !== "HEAD" && !/json/i.test(contentType)) {
|
||||
res.statusCode = 415;
|
||||
res.setHeader("content-type", "application/json");
|
||||
res.end(JSON.stringify({ error: "Bridge only accepts JSON request bodies." }));
|
||||
return;
|
||||
}
|
||||
const requestId = randomUUID();
|
||||
const requestBody = await readBody(req);
|
||||
const payload = {
|
||||
id: requestId,
|
||||
method: req.method || "GET",
|
||||
path: url.pathname,
|
||||
query: url.search,
|
||||
headers: normalizeHeaders(req.headers),
|
||||
body: requestBody,
|
||||
createdAt: new Date().toISOString(),
|
||||
};
|
||||
const requestPath = path.posix.join(requestsDir, \`\${requestId}.json\`);
|
||||
const tempPath = \`\${requestPath}.tmp\`;
|
||||
await fs.writeFile(tempPath, \`\${JSON.stringify(payload)}\\n\`, "utf8");
|
||||
await fs.rename(tempPath, requestPath);
|
||||
|
||||
const response = await waitForResponse(requestId);
|
||||
res.statusCode = typeof response.status === "number" ? response.status : 200;
|
||||
for (const [key, value] of Object.entries(response.headers || {})) {
|
||||
if (typeof value !== "string" || key.toLowerCase() === "content-length") continue;
|
||||
res.setHeader(key, value);
|
||||
}
|
||||
res.end(typeof response.body === "string" ? response.body : "");
|
||||
} catch (error) {
|
||||
res.statusCode = 502;
|
||||
res.setHeader("content-type", "application/json");
|
||||
res.end(JSON.stringify({ error: error instanceof Error ? error.message : String(error) }));
|
||||
}
|
||||
});
|
||||
|
||||
async function shutdown() {
|
||||
server.close(() => {
|
||||
process.exit(0);
|
||||
});
|
||||
}
|
||||
|
||||
process.on("SIGINT", () => void shutdown());
|
||||
process.on("SIGTERM", () => void shutdown());
|
||||
|
||||
await fs.mkdir(requestsDir, { recursive: true });
|
||||
await fs.mkdir(responsesDir, { recursive: true });
|
||||
await fs.mkdir(logsDir, { recursive: true });
|
||||
|
||||
server.listen(port, host, async () => {
|
||||
const address = server.address();
|
||||
if (!address || typeof address === "string") {
|
||||
throw new Error("Bridge server did not expose a TCP address.");
|
||||
}
|
||||
const ready = {
|
||||
pid: process.pid,
|
||||
host,
|
||||
port: address.port,
|
||||
baseUrl: \`http://\${host}:\${address.port}\`,
|
||||
startedAt: new Date().toISOString(),
|
||||
};
|
||||
const tempReadyFile = \`\${readyFile}.tmp\`;
|
||||
await fs.writeFile(tempReadyFile, JSON.stringify(ready), "utf8");
|
||||
await fs.rename(tempReadyFile, readyFile);
|
||||
});`;
|
||||
}
|
||||
@@ -1,4 +1,4 @@
|
||||
import { lstat, mkdir, mkdtemp, readFile, rm, symlink, writeFile } from "node:fs/promises";
|
||||
import { lstat, mkdir, mkdtemp, readFile, readdir, rm, symlink, writeFile } from "node:fs/promises";
|
||||
import os from "node:os";
|
||||
import path from "node:path";
|
||||
import { execFile as execFileCallback } from "node:child_process";
|
||||
@@ -73,6 +73,13 @@ describe("sandbox managed runtime", () => {
|
||||
await writeFile(remotePath, Buffer.from(bytes));
|
||||
},
|
||||
readFile: async (remotePath) => await readFile(remotePath),
|
||||
listFiles: async (remotePath) => {
|
||||
const entries = await readdir(remotePath, { withFileTypes: true }).catch(() => []);
|
||||
return entries
|
||||
.filter((entry) => entry.isFile())
|
||||
.map((entry) => entry.name)
|
||||
.sort((left, right) => left.localeCompare(right));
|
||||
},
|
||||
remove: async (remotePath) => {
|
||||
await rm(remotePath, { recursive: true, force: true });
|
||||
},
|
||||
|
||||
@@ -13,7 +13,6 @@ export interface SandboxRemoteExecutionSpec {
|
||||
remoteCwd: string;
|
||||
timeoutMs: number;
|
||||
apiKey: string | null;
|
||||
paperclipApiUrl?: string | null;
|
||||
}
|
||||
|
||||
export interface SandboxManagedRuntimeAsset {
|
||||
@@ -27,6 +26,7 @@ export interface SandboxManagedRuntimeClient {
|
||||
makeDir(remotePath: string): Promise<void>;
|
||||
writeFile(remotePath: string, bytes: ArrayBuffer): Promise<void>;
|
||||
readFile(remotePath: string): Promise<Buffer | Uint8Array | ArrayBuffer>;
|
||||
listFiles(remotePath: string): Promise<string[]>;
|
||||
remove(remotePath: string): Promise<void>;
|
||||
run(command: string, options: { timeoutMs: number }): Promise<void>;
|
||||
}
|
||||
@@ -84,7 +84,6 @@ export function parseSandboxRemoteExecutionSpec(value: unknown): SandboxRemoteEx
|
||||
remoteCwd,
|
||||
timeoutMs,
|
||||
apiKey: asString(parsed.apiKey).trim() || null,
|
||||
paperclipApiUrl: asString(parsed.paperclipApiUrl).trim() || null,
|
||||
};
|
||||
}
|
||||
|
||||
@@ -95,7 +94,6 @@ export function buildSandboxExecutionSessionIdentity(spec: SandboxRemoteExecutio
|
||||
provider: spec.provider,
|
||||
sandboxId: spec.sandboxId,
|
||||
remoteCwd: spec.remoteCwd,
|
||||
...(spec.paperclipApiUrl ? { paperclipApiUrl: spec.paperclipApiUrl } : {}),
|
||||
} as const;
|
||||
}
|
||||
|
||||
@@ -107,8 +105,7 @@ export function sandboxExecutionSessionMatches(saved: unknown, current: SandboxR
|
||||
asString(parsedSaved.transport) === currentIdentity.transport &&
|
||||
asString(parsedSaved.provider) === currentIdentity.provider &&
|
||||
asString(parsedSaved.sandboxId) === currentIdentity.sandboxId &&
|
||||
asString(parsedSaved.remoteCwd) === currentIdentity.remoteCwd &&
|
||||
asString(parsedSaved.paperclipApiUrl) === asString(currentIdentity.paperclipApiUrl)
|
||||
asString(parsedSaved.remoteCwd) === currentIdentity.remoteCwd
|
||||
);
|
||||
}
|
||||
|
||||
|
||||
3
packages/adapter-utils/src/sandbox-shell.ts
Normal file
@@ -0,0 +1,3 @@
|
||||
export function preferredShellForSandbox(shellCommand: string | null | undefined): "bash" | "sh" {
|
||||
return shellCommand === "bash" ? "bash" : "sh";
|
||||
}
|
||||
@@ -1,12 +1,19 @@
|
||||
import { randomUUID } from "node:crypto";
|
||||
import fs from "node:fs/promises";
|
||||
import os from "node:os";
|
||||
import path from "node:path";
|
||||
import { describe, expect, it } from "vitest";
|
||||
import {
|
||||
applyPaperclipWorkspaceEnv,
|
||||
appendWithByteCap,
|
||||
buildInvocationEnvForLogs,
|
||||
DEFAULT_PAPERCLIP_AGENT_PROMPT_TEMPLATE,
|
||||
materializePaperclipSkillCopy,
|
||||
renderPaperclipWakePrompt,
|
||||
runningProcesses,
|
||||
runChildProcess,
|
||||
sanitizeSshRemoteEnv,
|
||||
shapePaperclipWorkspaceEnvForExecution,
|
||||
stringifyPaperclipWakePayload,
|
||||
} from "./server-utils.js";
|
||||
|
||||
@@ -39,6 +46,162 @@ async function waitForTextMatch(read: () => string, pattern: RegExp, timeoutMs =
|
||||
return read().match(pattern);
|
||||
}
|
||||
|
||||
describe("buildInvocationEnvForLogs", () => {
|
||||
it("redacts inline secrets from resolved command metadata", () => {
|
||||
const loggedEnv = buildInvocationEnvForLogs(
|
||||
{ SAFE_VALUE: "visible" },
|
||||
{
|
||||
resolvedCommand: "env OPENAI_API_KEY=sk-live-example custom-acp --token ghp_example_secret",
|
||||
},
|
||||
);
|
||||
|
||||
expect(loggedEnv.SAFE_VALUE).toBe("visible");
|
||||
expect(loggedEnv.PAPERCLIP_RESOLVED_COMMAND).toBe(
|
||||
"env OPENAI_API_KEY=***REDACTED*** custom-acp --token ***REDACTED***",
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe("sanitizeSshRemoteEnv", () => {
|
||||
it("drops inherited host shell identity variables for SSH remote execution", () => {
|
||||
expect(
|
||||
sanitizeSshRemoteEnv(
|
||||
{
|
||||
PATH: "/host/bin:/usr/bin",
|
||||
HOME: "/Users/local",
|
||||
NVM_DIR: "/Users/local/.nvm",
|
||||
TMPDIR: "/var/folders/local/T",
|
||||
XDG_CONFIG_HOME: "/Users/local/.config",
|
||||
SAFE_VALUE: "visible",
|
||||
},
|
||||
{
|
||||
PATH: "/host/bin:/usr/bin",
|
||||
HOME: "/Users/local",
|
||||
NVM_DIR: "/Users/local/.nvm",
|
||||
TMPDIR: "/var/folders/local/T",
|
||||
XDG_CONFIG_HOME: "/Users/local/.config",
|
||||
},
|
||||
),
|
||||
).toEqual({
|
||||
SAFE_VALUE: "visible",
|
||||
});
|
||||
});
|
||||
|
||||
it("preserves explicit remote overrides even for filtered key names", () => {
|
||||
expect(
|
||||
sanitizeSshRemoteEnv(
|
||||
{
|
||||
PATH: "/custom/remote/bin:/usr/bin",
|
||||
HOME: "/home/agent",
|
||||
TMPDIR: "/tmp",
|
||||
SAFE_VALUE: "visible",
|
||||
},
|
||||
{
|
||||
PATH: "/host/bin:/usr/bin",
|
||||
HOME: "/Users/local",
|
||||
TMPDIR: "/var/folders/local/T",
|
||||
},
|
||||
),
|
||||
).toEqual({
|
||||
PATH: "/custom/remote/bin:/usr/bin",
|
||||
HOME: "/home/agent",
|
||||
TMPDIR: "/tmp",
|
||||
SAFE_VALUE: "visible",
|
||||
});
|
||||
});
|
||||
|
||||
it("filters identity keys via case-insensitive match against the inherited env", () => {
|
||||
expect(
|
||||
sanitizeSshRemoteEnv(
|
||||
{
|
||||
// Caller passed PATH in upper case while the inherited (Windows-style)
|
||||
// host env exposes it as Path. The lookup must still treat them as
|
||||
// equal so the leaked host PATH gets stripped.
|
||||
PATH: "/host/bin:/usr/bin",
|
||||
HOME: "/host/home",
|
||||
},
|
||||
{
|
||||
Path: "/host/bin:/usr/bin",
|
||||
home: "/host/home",
|
||||
},
|
||||
),
|
||||
).toEqual({});
|
||||
});
|
||||
|
||||
it("preserves explicitly-set identity keys when the inherited env disagrees in case but not in value", () => {
|
||||
expect(
|
||||
sanitizeSshRemoteEnv(
|
||||
{
|
||||
PATH: "/explicit/remote/bin",
|
||||
},
|
||||
{
|
||||
Path: "/host/bin:/usr/bin",
|
||||
},
|
||||
),
|
||||
).toEqual({ PATH: "/explicit/remote/bin" });
|
||||
});
|
||||
});
|
||||
|
||||
describe("materializePaperclipSkillCopy", () => {
|
||||
it("refuses to materialize into an ancestor of the source", async () => {
|
||||
const root = await fs.mkdtemp(path.join(os.tmpdir(), "paperclip-skill-copy-"));
|
||||
try {
|
||||
const source = path.join(root, "parent", "skill");
|
||||
await fs.mkdir(source, { recursive: true });
|
||||
await fs.writeFile(path.join(source, "SKILL.md"), "# skill\n", "utf8");
|
||||
|
||||
await expect(materializePaperclipSkillCopy(source, path.join(root, "parent"))).rejects.toThrow(
|
||||
/ancestor/,
|
||||
);
|
||||
await expect(fs.readFile(path.join(source, "SKILL.md"), "utf8")).resolves.toBe("# skill\n");
|
||||
} finally {
|
||||
await fs.rm(root, { recursive: true, force: true });
|
||||
}
|
||||
});
|
||||
|
||||
it("does not delete and recopy an unchanged materialized skill target", async () => {
|
||||
const root = await fs.mkdtemp(path.join(os.tmpdir(), "paperclip-skill-copy-"));
|
||||
try {
|
||||
const source = path.join(root, "source");
|
||||
const target = path.join(root, "target");
|
||||
await fs.mkdir(source, { recursive: true });
|
||||
await fs.writeFile(path.join(source, "SKILL.md"), "# skill\n", "utf8");
|
||||
|
||||
const first = await materializePaperclipSkillCopy(source, target);
|
||||
expect(first.copiedFiles).toBe(1);
|
||||
await fs.writeFile(path.join(target, "local-marker.txt"), "keep\n", "utf8");
|
||||
|
||||
const second = await materializePaperclipSkillCopy(source, target);
|
||||
expect(second.copiedFiles).toBe(0);
|
||||
await expect(fs.readFile(path.join(target, "local-marker.txt"), "utf8")).resolves.toBe("keep\n");
|
||||
} finally {
|
||||
await fs.rm(root, { recursive: true, force: true });
|
||||
}
|
||||
});
|
||||
|
||||
it("breaks stale materialization locks left by dead processes", async () => {
|
||||
const root = await fs.mkdtemp(path.join(os.tmpdir(), "paperclip-skill-copy-"));
|
||||
try {
|
||||
const source = path.join(root, "source");
|
||||
const target = path.join(root, "target");
|
||||
const lock = `${target}.lock`;
|
||||
await fs.mkdir(source, { recursive: true });
|
||||
await fs.writeFile(path.join(source, "SKILL.md"), "# skill\n", "utf8");
|
||||
await fs.mkdir(lock, { recursive: true });
|
||||
await fs.writeFile(
|
||||
path.join(lock, "owner.json"),
|
||||
JSON.stringify({ pid: 999_999_999, createdAt: "2000-01-01T00:00:00.000Z" }),
|
||||
"utf8",
|
||||
);
|
||||
|
||||
await expect(materializePaperclipSkillCopy(source, target)).resolves.toMatchObject({ copiedFiles: 1 });
|
||||
await expect(fs.readFile(path.join(target, "SKILL.md"), "utf8")).resolves.toBe("# skill\n");
|
||||
} finally {
|
||||
await fs.rm(root, { recursive: true, force: true });
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
describe("runChildProcess", () => {
|
||||
it("does not arm a timeout when timeoutSec is 0", async () => {
|
||||
const result = await runChildProcess(
|
||||
@@ -470,6 +633,70 @@ describe("applyPaperclipWorkspaceEnv", () => {
|
||||
});
|
||||
});
|
||||
|
||||
describe("shapePaperclipWorkspaceEnvForExecution", () => {
|
||||
it("rewrites workspace env paths for remote execution", () => {
|
||||
const shaped = shapePaperclipWorkspaceEnvForExecution({
|
||||
workspaceCwd: "/tmp/workspace",
|
||||
workspaceWorktreePath: "/tmp/worktree",
|
||||
workspaceHints: [
|
||||
{
|
||||
workspaceId: "workspace-1",
|
||||
cwd: "/tmp/workspace",
|
||||
repoUrl: "https://github.com/paperclipai/paperclip.git",
|
||||
},
|
||||
{
|
||||
workspaceId: "workspace-2",
|
||||
cwd: "/tmp/other-workspace",
|
||||
repoUrl: "https://github.com/paperclipai/paperclip.git",
|
||||
},
|
||||
{
|
||||
workspaceId: "workspace-3",
|
||||
repoUrl: "https://github.com/paperclipai/paperclip.git",
|
||||
},
|
||||
],
|
||||
executionTargetIsRemote: true,
|
||||
executionCwd: "/remote/workspace",
|
||||
});
|
||||
|
||||
expect(shaped).toEqual({
|
||||
workspaceCwd: "/remote/workspace",
|
||||
workspaceWorktreePath: null,
|
||||
workspaceHints: [
|
||||
{
|
||||
workspaceId: "workspace-1",
|
||||
cwd: "/remote/workspace",
|
||||
repoUrl: "https://github.com/paperclipai/paperclip.git",
|
||||
},
|
||||
{
|
||||
workspaceId: "workspace-2",
|
||||
repoUrl: "https://github.com/paperclipai/paperclip.git",
|
||||
},
|
||||
{
|
||||
workspaceId: "workspace-3",
|
||||
repoUrl: "https://github.com/paperclipai/paperclip.git",
|
||||
},
|
||||
],
|
||||
});
|
||||
});
|
||||
|
||||
it("leaves local execution workspace paths unchanged", () => {
|
||||
const workspaceHints = [{ workspaceId: "workspace-1", cwd: "/tmp/workspace" }];
|
||||
const shaped = shapePaperclipWorkspaceEnvForExecution({
|
||||
workspaceCwd: "/tmp/workspace",
|
||||
workspaceWorktreePath: "/tmp/worktree",
|
||||
workspaceHints,
|
||||
executionTargetIsRemote: false,
|
||||
executionCwd: "/remote/workspace",
|
||||
});
|
||||
|
||||
expect(shaped).toEqual({
|
||||
workspaceCwd: "/tmp/workspace",
|
||||
workspaceWorktreePath: "/tmp/worktree",
|
||||
workspaceHints,
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe("appendWithByteCap", () => {
|
||||
it("keeps valid UTF-8 when trimming through multibyte text", () => {
|
||||
const output = appendWithByteCap("prefix ", "hello — world", 7);
|
||||
|
||||
@@ -1,7 +1,9 @@
|
||||
import { spawn, type ChildProcess } from "node:child_process";
|
||||
import { createHash, randomUUID } from "node:crypto";
|
||||
import { constants as fsConstants, promises as fs, type Dirent } from "node:fs";
|
||||
import path from "node:path";
|
||||
import { buildSshSpawnTarget, type SshRemoteExecutionSpec } from "./ssh.js";
|
||||
import { redactCommandText } from "./command-redaction.js";
|
||||
import type {
|
||||
AdapterSkillEntry,
|
||||
AdapterSkillSnapshot,
|
||||
@@ -76,10 +78,14 @@ export const MAX_CAPTURE_BYTES = 4 * 1024 * 1024;
|
||||
export const MAX_EXCERPT_BYTES = 32 * 1024;
|
||||
const TERMINAL_RESULT_SCAN_OVERLAP_CHARS = 64 * 1024;
|
||||
const SENSITIVE_ENV_KEY = /(key|token|secret|password|passwd|authorization|cookie)/i;
|
||||
const REDACTED_LOG_VALUE = "***REDACTED***";
|
||||
const PAPERCLIP_SKILL_ROOT_RELATIVE_CANDIDATES = [
|
||||
"../../skills",
|
||||
"../../../../../skills",
|
||||
];
|
||||
const MATERIALIZED_SKILL_SENTINEL = ".paperclip-materialized-skill.json";
|
||||
const MATERIALIZED_SKILL_LOCK_OWNER = "owner.json";
|
||||
const MATERIALIZED_SKILL_LOCK_STALE_MS = 30_000;
|
||||
|
||||
export const DEFAULT_PAPERCLIP_AGENT_PROMPT_TEMPLATE = [
|
||||
"You are agent {{agent.id}} ({{agent.name}}). Continue your Paperclip work.",
|
||||
@@ -111,6 +117,11 @@ export interface InstalledSkillTarget {
|
||||
kind: "symlink" | "directory" | "file";
|
||||
}
|
||||
|
||||
export interface MaterializedPaperclipSkillCopyResult {
|
||||
copiedFiles: number;
|
||||
skippedSymlinks: string[];
|
||||
}
|
||||
|
||||
interface PersistentSkillSnapshotOptions {
|
||||
adapterType: string;
|
||||
availableEntries: PaperclipSkillEntry[];
|
||||
@@ -780,11 +791,15 @@ export function renderPaperclipWakePrompt(
|
||||
export function redactEnvForLogs(env: Record<string, string>): Record<string, string> {
|
||||
const redacted: Record<string, string> = {};
|
||||
for (const [key, value] of Object.entries(env)) {
|
||||
redacted[key] = SENSITIVE_ENV_KEY.test(key) ? "***REDACTED***" : value;
|
||||
redacted[key] = SENSITIVE_ENV_KEY.test(key) ? REDACTED_LOG_VALUE : value;
|
||||
}
|
||||
return redacted;
|
||||
}
|
||||
|
||||
export function redactCommandTextForLogs(command: string): string {
|
||||
return redactCommandText(command, REDACTED_LOG_VALUE);
|
||||
}
|
||||
|
||||
export function buildInvocationEnvForLogs(
|
||||
env: Record<string, string>,
|
||||
options: {
|
||||
@@ -806,7 +821,7 @@ export function buildInvocationEnvForLogs(
|
||||
|
||||
const resolvedCommand = options.resolvedCommand?.trim();
|
||||
if (resolvedCommand) {
|
||||
merged[options.resolvedCommandEnvKey ?? "PAPERCLIP_RESOLVED_COMMAND"] = resolvedCommand;
|
||||
merged[options.resolvedCommandEnvKey ?? "PAPERCLIP_RESOLVED_COMMAND"] = redactCommandTextForLogs(resolvedCommand);
|
||||
}
|
||||
|
||||
return redactEnvForLogs(merged);
|
||||
@@ -870,6 +885,79 @@ export function applyPaperclipWorkspaceEnv(
|
||||
return env;
|
||||
}
|
||||
|
||||
export function shapePaperclipWorkspaceEnvForExecution(input: {
|
||||
workspaceCwd?: string | null;
|
||||
workspaceWorktreePath?: string | null;
|
||||
workspaceHints?: Array<Record<string, unknown>>;
|
||||
executionTargetIsRemote?: boolean;
|
||||
executionCwd?: string | null;
|
||||
}): {
|
||||
workspaceCwd: string | null;
|
||||
workspaceWorktreePath: string | null;
|
||||
workspaceHints: Array<Record<string, unknown>>;
|
||||
} {
|
||||
const workspaceCwd =
|
||||
typeof input.workspaceCwd === "string" && input.workspaceCwd.trim().length > 0
|
||||
? input.workspaceCwd.trim()
|
||||
: null;
|
||||
const workspaceWorktreePath =
|
||||
typeof input.workspaceWorktreePath === "string" && input.workspaceWorktreePath.trim().length > 0
|
||||
? input.workspaceWorktreePath.trim()
|
||||
: null;
|
||||
const workspaceHints = Array.isArray(input.workspaceHints) ? input.workspaceHints : [];
|
||||
|
||||
if (!input.executionTargetIsRemote) {
|
||||
return {
|
||||
workspaceCwd,
|
||||
workspaceWorktreePath,
|
||||
workspaceHints,
|
||||
};
|
||||
}
|
||||
|
||||
const executionCwd =
|
||||
typeof input.executionCwd === "string" && input.executionCwd.trim().length > 0
|
||||
? input.executionCwd.trim()
|
||||
: null;
|
||||
// On a remote target we must never fall back to the local workspaceCwd —
|
||||
// doing so leaks host paths into the remote env (the exact failure mode
|
||||
// this helper exists to prevent). Callers are expected to resolve
|
||||
// executionCwd via adapterExecutionTargetRemoteCwd before calling this
|
||||
// helper, which always returns a non-empty string. Surface a warning so
|
||||
// future callers don't silently regress to the leak.
|
||||
if (executionCwd === null) {
|
||||
// eslint-disable-next-line no-console
|
||||
console.warn(
|
||||
"[paperclip] shapePaperclipWorkspaceEnvForExecution called with executionCwd=null on a remote target; " +
|
||||
"stripping workspaceCwd to avoid leaking local paths into the remote environment.",
|
||||
);
|
||||
}
|
||||
const realizedWorkspaceCwd = executionCwd;
|
||||
const localWorkspaceCwd = workspaceCwd ? path.resolve(workspaceCwd) : null;
|
||||
const shapedWorkspaceHints = workspaceHints.map((hint) => {
|
||||
const nextHint = { ...hint };
|
||||
const hintCwd = typeof nextHint.cwd === "string" ? nextHint.cwd.trim() : "";
|
||||
if (!hintCwd) return nextHint;
|
||||
|
||||
if (localWorkspaceCwd && path.resolve(hintCwd) === localWorkspaceCwd) {
|
||||
if (realizedWorkspaceCwd) {
|
||||
nextHint.cwd = realizedWorkspaceCwd;
|
||||
} else {
|
||||
delete nextHint.cwd;
|
||||
}
|
||||
return nextHint;
|
||||
}
|
||||
|
||||
delete nextHint.cwd;
|
||||
return nextHint;
|
||||
});
|
||||
|
||||
return {
|
||||
workspaceCwd: realizedWorkspaceCwd,
|
||||
workspaceWorktreePath: null,
|
||||
workspaceHints: shapedWorkspaceHints,
|
||||
};
|
||||
}
|
||||
|
||||
export function sanitizeInheritedPaperclipEnv(baseEnv: NodeJS.ProcessEnv): NodeJS.ProcessEnv {
|
||||
const env: NodeJS.ProcessEnv = { ...baseEnv };
|
||||
for (const key of Object.keys(env)) {
|
||||
@@ -951,6 +1039,56 @@ function quoteForCmd(arg: string) {
|
||||
return /[\s"&<>|^()]/.test(escaped) ? `"${escaped}"` : escaped;
|
||||
}
|
||||
|
||||
const SSH_REMOTE_ENV_IDENTITY_KEYS = new Set([
|
||||
"PATH",
|
||||
"HOME",
|
||||
"PWD",
|
||||
"SHELL",
|
||||
"USER",
|
||||
"LOGNAME",
|
||||
"NVM_DIR",
|
||||
"TMPDIR",
|
||||
"TMP",
|
||||
"TEMP",
|
||||
"XDG_CONFIG_HOME",
|
||||
"XDG_CACHE_HOME",
|
||||
"XDG_DATA_HOME",
|
||||
"XDG_STATE_HOME",
|
||||
"XDG_RUNTIME_DIR",
|
||||
]);
|
||||
|
||||
function readEnvValueCaseInsensitive(env: NodeJS.ProcessEnv, key: string): string | undefined {
|
||||
const direct = env[key];
|
||||
if (typeof direct === "string") return direct;
|
||||
const upper = key.toUpperCase();
|
||||
for (const [candidateKey, candidateValue] of Object.entries(env)) {
|
||||
if (candidateKey.toUpperCase() === upper && typeof candidateValue === "string") {
|
||||
return candidateValue;
|
||||
}
|
||||
}
|
||||
return undefined;
|
||||
}
|
||||
|
||||
export function sanitizeSshRemoteEnv(
|
||||
env: Record<string, string>,
|
||||
inheritedEnv: NodeJS.ProcessEnv = process.env,
|
||||
): Record<string, string> {
|
||||
const sanitized: Record<string, string> = {};
|
||||
for (const [key, value] of Object.entries(env)) {
|
||||
const normalizedKey = key.toUpperCase();
|
||||
if (!SSH_REMOTE_ENV_IDENTITY_KEYS.has(normalizedKey)) {
|
||||
sanitized[key] = value;
|
||||
continue;
|
||||
}
|
||||
const inheritedValue = readEnvValueCaseInsensitive(inheritedEnv, key);
|
||||
if (typeof inheritedValue === "string" && inheritedValue === value) {
|
||||
continue;
|
||||
}
|
||||
sanitized[key] = value;
|
||||
}
|
||||
return sanitized;
|
||||
}
|
||||
|
||||
function resolveWindowsCmdShell(env: NodeJS.ProcessEnv): string {
|
||||
const fallbackRoot = env.SystemRoot || process.env.SystemRoot || "C:\\Windows";
|
||||
return path.join(fallbackRoot, "System32", "cmd.exe");
|
||||
@@ -976,9 +1114,9 @@ async function resolveSpawnTarget(
|
||||
spec: remote,
|
||||
command,
|
||||
args,
|
||||
env: Object.fromEntries(
|
||||
env: sanitizeSshRemoteEnv(Object.fromEntries(
|
||||
Object.entries(options.remoteEnv ?? {}).filter((entry): entry is [string, string] => typeof entry[1] === "string"),
|
||||
),
|
||||
)),
|
||||
});
|
||||
return {
|
||||
command: sshResolved,
|
||||
@@ -1395,6 +1533,190 @@ export async function ensurePaperclipSkillSymlink(
|
||||
return "repaired";
|
||||
}
|
||||
|
||||
async function hashSkillDirectory(root: string): Promise<string> {
|
||||
const hash = createHash("sha256");
|
||||
|
||||
async function visit(candidate: string, relativePath: string): Promise<void> {
|
||||
const stat = await fs.lstat(candidate);
|
||||
if (stat.isSymbolicLink()) {
|
||||
hash.update(`symlink:${relativePath}\n`);
|
||||
return;
|
||||
}
|
||||
if (stat.isDirectory()) {
|
||||
hash.update(`dir:${relativePath}\n`);
|
||||
const entries = await fs.readdir(candidate, { withFileTypes: true });
|
||||
entries.sort((left, right) => left.name.localeCompare(right.name));
|
||||
for (const entry of entries) {
|
||||
const childRelativePath = relativePath ? `${relativePath}/${entry.name}` : entry.name;
|
||||
await visit(path.join(candidate, entry.name), childRelativePath);
|
||||
}
|
||||
return;
|
||||
}
|
||||
if (stat.isFile()) {
|
||||
hash.update(`file:${relativePath}:${stat.mode}\n`);
|
||||
hash.update(await fs.readFile(candidate));
|
||||
hash.update("\n");
|
||||
return;
|
||||
}
|
||||
hash.update(`other:${relativePath}:${stat.mode}\n`);
|
||||
}
|
||||
|
||||
await visit(root, "");
|
||||
return hash.digest("hex");
|
||||
}
|
||||
|
||||
async function materializedSkillFingerprintMatches(targetRoot: string, sourceFingerprint: string): Promise<boolean> {
|
||||
try {
|
||||
const raw = JSON.parse(await fs.readFile(path.join(targetRoot, MATERIALIZED_SKILL_SENTINEL), "utf8")) as unknown;
|
||||
const parsed = parseObject(raw);
|
||||
return parsed.version === 1 && parsed.sourceFingerprint === sourceFingerprint;
|
||||
} catch {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
async function acquireMaterializeLock(lockDir: string): Promise<() => Promise<void>> {
|
||||
await fs.mkdir(path.dirname(lockDir), { recursive: true });
|
||||
const deadline = Date.now() + MATERIALIZED_SKILL_LOCK_STALE_MS;
|
||||
while (true) {
|
||||
try {
|
||||
await fs.mkdir(lockDir);
|
||||
await fs.writeFile(
|
||||
path.join(lockDir, MATERIALIZED_SKILL_LOCK_OWNER),
|
||||
`${JSON.stringify({ pid: process.pid, createdAt: new Date().toISOString() })}\n`,
|
||||
"utf8",
|
||||
);
|
||||
return async () => {
|
||||
await fs.rm(lockDir, { recursive: true, force: true });
|
||||
};
|
||||
} catch (err) {
|
||||
const code = err && typeof err === "object" ? (err as { code?: unknown }).code : null;
|
||||
if (code !== "EEXIST") throw err;
|
||||
if (await removeStaleMaterializeLock(lockDir, MATERIALIZED_SKILL_LOCK_STALE_MS)) continue;
|
||||
if (Date.now() >= deadline) {
|
||||
throw new Error(`Timed out waiting for Paperclip skill materialization lock at ${lockDir}`);
|
||||
}
|
||||
await new Promise((resolve) => setTimeout(resolve, 50));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function isPidAlive(pid: number): boolean {
|
||||
if (!Number.isInteger(pid) || pid <= 0) return false;
|
||||
try {
|
||||
process.kill(pid, 0);
|
||||
return true;
|
||||
} catch (err) {
|
||||
const code = err && typeof err === "object" ? (err as { code?: unknown }).code : null;
|
||||
return code === "EPERM";
|
||||
}
|
||||
}
|
||||
|
||||
async function removeStaleMaterializeLock(lockDir: string, staleMs: number): Promise<boolean> {
|
||||
const ownerPath = path.join(lockDir, MATERIALIZED_SKILL_LOCK_OWNER);
|
||||
let shouldRemove = false;
|
||||
try {
|
||||
const raw = JSON.parse(await fs.readFile(ownerPath, "utf8")) as unknown;
|
||||
const owner = parseObject(raw);
|
||||
const pid = typeof owner.pid === "number" ? owner.pid : 0;
|
||||
const createdAt = typeof owner.createdAt === "string" ? Date.parse(owner.createdAt) : Number.NaN;
|
||||
const ageMs = Number.isFinite(createdAt) ? Date.now() - createdAt : staleMs + 1;
|
||||
shouldRemove = !isPidAlive(pid) || ageMs > staleMs;
|
||||
} catch {
|
||||
const stat = await fs.stat(lockDir).catch(() => null);
|
||||
shouldRemove = !stat || Date.now() - stat.mtimeMs > staleMs;
|
||||
}
|
||||
if (!shouldRemove) return false;
|
||||
await fs.rm(lockDir, { recursive: true, force: true }).catch(() => {});
|
||||
return true;
|
||||
}
|
||||
|
||||
export async function materializePaperclipSkillCopy(
|
||||
source: string,
|
||||
target: string,
|
||||
): Promise<MaterializedPaperclipSkillCopyResult> {
|
||||
const sourceRoot = path.resolve(source);
|
||||
const targetRoot = path.resolve(target);
|
||||
const relativeTarget = path.relative(sourceRoot, targetRoot);
|
||||
const relativeSource = path.relative(targetRoot, sourceRoot);
|
||||
if (
|
||||
!relativeTarget ||
|
||||
(!relativeTarget.startsWith("..") && !path.isAbsolute(relativeTarget)) ||
|
||||
!relativeSource ||
|
||||
(!relativeSource.startsWith("..") && !path.isAbsolute(relativeSource))
|
||||
) {
|
||||
throw new Error("Refusing to materialize a skill into itself, an ancestor, or one of its descendants.");
|
||||
}
|
||||
|
||||
const rootStat = await fs.lstat(sourceRoot);
|
||||
if (rootStat.isSymbolicLink()) {
|
||||
throw new Error("Refusing to materialize a skill root that is itself a symlink.");
|
||||
}
|
||||
if (!rootStat.isDirectory()) {
|
||||
throw new Error("Paperclip skills must be directories.");
|
||||
}
|
||||
|
||||
const result: MaterializedPaperclipSkillCopyResult = {
|
||||
copiedFiles: 0,
|
||||
skippedSymlinks: [],
|
||||
};
|
||||
|
||||
const lockDir = `${targetRoot}.lock`;
|
||||
const releaseLock = await acquireMaterializeLock(lockDir);
|
||||
const tempRoot = `${targetRoot}.tmp-${process.pid}-${randomUUID()}`;
|
||||
|
||||
async function copyEntry(sourcePath: string, targetPath: string, relativePath: string): Promise<void> {
|
||||
const stat = await fs.lstat(sourcePath);
|
||||
if (stat.isSymbolicLink()) {
|
||||
result.skippedSymlinks.push(relativePath || ".");
|
||||
return;
|
||||
}
|
||||
|
||||
if (stat.isDirectory()) {
|
||||
await fs.mkdir(targetPath, { recursive: true });
|
||||
const entries = await fs.readdir(sourcePath, { withFileTypes: true });
|
||||
entries.sort((left, right) => left.name.localeCompare(right.name));
|
||||
for (const entry of entries) {
|
||||
const childRelativePath = relativePath ? `${relativePath}/${entry.name}` : entry.name;
|
||||
await copyEntry(path.join(sourcePath, entry.name), path.join(targetPath, entry.name), childRelativePath);
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
if (stat.isFile()) {
|
||||
await fs.mkdir(path.dirname(targetPath), { recursive: true });
|
||||
await fs.copyFile(sourcePath, targetPath, fsConstants.COPYFILE_FICLONE).catch(async () => {
|
||||
await fs.copyFile(sourcePath, targetPath);
|
||||
});
|
||||
await fs.chmod(targetPath, stat.mode).catch(() => {});
|
||||
result.copiedFiles += 1;
|
||||
}
|
||||
}
|
||||
|
||||
try {
|
||||
const sourceFingerprint = await hashSkillDirectory(sourceRoot);
|
||||
if (await materializedSkillFingerprintMatches(targetRoot, sourceFingerprint)) return result;
|
||||
await copyEntry(sourceRoot, tempRoot, "");
|
||||
await fs.writeFile(
|
||||
path.join(tempRoot, MATERIALIZED_SKILL_SENTINEL),
|
||||
`${JSON.stringify({
|
||||
version: 1,
|
||||
sourceFingerprint,
|
||||
copiedFiles: result.copiedFiles,
|
||||
skippedSymlinks: result.skippedSymlinks,
|
||||
}, null, 2)}\n`,
|
||||
"utf8",
|
||||
);
|
||||
if (await materializedSkillFingerprintMatches(targetRoot, sourceFingerprint)) return result;
|
||||
await fs.rm(targetRoot, { recursive: true, force: true });
|
||||
await fs.rename(tempRoot, targetRoot);
|
||||
return result;
|
||||
} finally {
|
||||
await fs.rm(tempRoot, { recursive: true, force: true }).catch(() => {});
|
||||
await releaseLock();
|
||||
}
|
||||
}
|
||||
|
||||
export async function removeMaintainerOnlySkillSymlinks(
|
||||
skillsHome: string,
|
||||
allowedSkillNames: Iterable<string>,
|
||||
|
||||
@@ -37,6 +37,7 @@ const ADAPTER_MANAGED_SESSION_POLICY: SessionCompactionPolicy = {
|
||||
};
|
||||
|
||||
export const LEGACY_SESSIONED_ADAPTER_TYPES = new Set([
|
||||
"acpx_local",
|
||||
"claude_local",
|
||||
"codex_local",
|
||||
"cursor",
|
||||
@@ -47,6 +48,11 @@ export const LEGACY_SESSIONED_ADAPTER_TYPES = new Set([
|
||||
]);
|
||||
|
||||
export const ADAPTER_SESSION_MANAGEMENT: Record<string, AdapterSessionManagement> = {
|
||||
acpx_local: {
|
||||
supportsSessionResume: true,
|
||||
nativeContextManagement: "confirmed",
|
||||
defaultSessionCompaction: ADAPTER_MANAGED_SESSION_POLICY,
|
||||
},
|
||||
claude_local: {
|
||||
supportsSessionResume: true,
|
||||
nativeContextManagement: "confirmed",
|
||||
|
||||
@@ -3,6 +3,8 @@ import { constants as fsConstants, createReadStream, createWriteStream, promises
|
||||
import net from "node:net";
|
||||
import os from "node:os";
|
||||
import path from "node:path";
|
||||
import type { CommandManagedRuntimeRunner } from "./command-managed-runtime.js";
|
||||
import type { RunProcessResult } from "./server-utils.js";
|
||||
|
||||
export interface SshConnectionConfig {
|
||||
host: string;
|
||||
@@ -21,7 +23,86 @@ export interface SshCommandResult {
|
||||
|
||||
export interface SshRemoteExecutionSpec extends SshConnectionConfig {
|
||||
remoteCwd: string;
|
||||
paperclipApiUrl?: string | null;
|
||||
}
|
||||
|
||||
export function createSshCommandManagedRuntimeRunner(input: {
|
||||
spec: SshRemoteExecutionSpec;
|
||||
defaultCwd?: string | null;
|
||||
maxBufferBytes?: number | null;
|
||||
}): CommandManagedRuntimeRunner {
|
||||
const defaultCwd = input.defaultCwd?.trim() || input.spec.remoteCwd;
|
||||
const maxBufferBytes =
|
||||
typeof input.maxBufferBytes === "number" && Number.isFinite(input.maxBufferBytes) && input.maxBufferBytes > 0
|
||||
? Math.trunc(input.maxBufferBytes)
|
||||
: 1024 * 1024;
|
||||
|
||||
return {
|
||||
execute: async (commandInput): Promise<RunProcessResult> => {
|
||||
const startedAt = new Date().toISOString();
|
||||
const command = commandInput.command.trim();
|
||||
const args = commandInput.args ?? [];
|
||||
const cwd = commandInput.cwd?.trim() || defaultCwd;
|
||||
const envEntries = Object.entries(commandInput.env ?? {})
|
||||
.filter((entry): entry is [string, string] => typeof entry[1] === "string");
|
||||
const envPrefix = envEntries.length > 0
|
||||
? `env ${envEntries.map(([key, value]) => `${key}=${shellQuote(value)}`).join(" ")} `
|
||||
: "";
|
||||
const exportPrefix = envEntries.length > 0
|
||||
? envEntries.map(([key, value]) => `export ${key}=${shellQuote(value)};`).join(" ") + " "
|
||||
: "";
|
||||
const commandScript = command === "sh" || command === "bash"
|
||||
? args[0] === "-lc" && typeof args[1] === "string"
|
||||
? `${exportPrefix}${args[1]}`
|
||||
: `${envPrefix}exec ${[shellQuote(command), ...args.map((arg) => shellQuote(arg))].join(" ")}`
|
||||
: `${envPrefix}exec ${[shellQuote(command), ...args.map((arg) => shellQuote(arg))].join(" ")}`;
|
||||
const remoteCommand = `${command === "bash" ? "bash" : "sh"} -lc ${
|
||||
shellQuote(`cd ${shellQuote(cwd)} && ${commandScript}`)
|
||||
}`;
|
||||
|
||||
try {
|
||||
const result = await runSshCommand(input.spec, remoteCommand, {
|
||||
timeoutMs: commandInput.timeoutMs,
|
||||
maxBuffer: maxBufferBytes,
|
||||
});
|
||||
if (result.stdout) await commandInput.onLog?.("stdout", result.stdout);
|
||||
if (result.stderr) await commandInput.onLog?.("stderr", result.stderr);
|
||||
return {
|
||||
exitCode: 0,
|
||||
signal: null,
|
||||
timedOut: false,
|
||||
stdout: result.stdout,
|
||||
stderr: result.stderr,
|
||||
pid: null,
|
||||
startedAt,
|
||||
};
|
||||
} catch (error) {
|
||||
const failure = error as {
|
||||
stdout?: unknown;
|
||||
stderr?: unknown;
|
||||
code?: unknown;
|
||||
signal?: unknown;
|
||||
killed?: unknown;
|
||||
};
|
||||
const stdout = typeof failure.stdout === "string" ? failure.stdout : "";
|
||||
const stderr = typeof failure.stderr === "string"
|
||||
? failure.stderr
|
||||
: error instanceof Error
|
||||
? error.message
|
||||
: String(error);
|
||||
if (stdout) await commandInput.onLog?.("stdout", stdout);
|
||||
if (stderr) await commandInput.onLog?.("stderr", stderr);
|
||||
return {
|
||||
exitCode: typeof failure.code === "number" ? failure.code : null,
|
||||
signal: typeof failure.signal === "string" ? failure.signal : null,
|
||||
timedOut: failure.killed === true,
|
||||
stdout,
|
||||
stderr,
|
||||
pid: null,
|
||||
startedAt,
|
||||
};
|
||||
}
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
export interface SshEnvLabSupport {
|
||||
@@ -83,10 +164,6 @@ export function parseSshRemoteExecutionSpec(value: unknown): SshRemoteExecutionS
|
||||
port: portValue,
|
||||
username,
|
||||
remoteCwd,
|
||||
paperclipApiUrl:
|
||||
typeof parsed.paperclipApiUrl === "string" && parsed.paperclipApiUrl.trim().length > 0
|
||||
? parsed.paperclipApiUrl.trim()
|
||||
: null,
|
||||
remoteWorkspacePath:
|
||||
typeof parsed.remoteWorkspacePath === "string" && parsed.remoteWorkspacePath.trim().length > 0
|
||||
? parsed.remoteWorkspacePath.trim()
|
||||
@@ -98,50 +175,6 @@ export function parseSshRemoteExecutionSpec(value: unknown): SshRemoteExecutionS
|
||||
};
|
||||
}
|
||||
|
||||
function normalizeHttpUrlCandidate(value: string): string | null {
|
||||
const trimmed = value.trim();
|
||||
if (!trimmed) return null;
|
||||
try {
|
||||
const parsed = new URL(trimmed);
|
||||
if (parsed.protocol !== "http:" && parsed.protocol !== "https:") {
|
||||
return null;
|
||||
}
|
||||
return parsed.origin;
|
||||
} catch {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
export async function findReachablePaperclipApiUrlOverSsh(input: {
|
||||
config: SshConnectionConfig;
|
||||
candidates: string[];
|
||||
timeoutMs?: number;
|
||||
}): Promise<string | null> {
|
||||
const uniqueCandidates = Array.from(
|
||||
new Set(
|
||||
input.candidates
|
||||
.map((candidate) => normalizeHttpUrlCandidate(candidate))
|
||||
.filter((candidate): candidate is string => candidate !== null),
|
||||
),
|
||||
);
|
||||
|
||||
for (const candidate of uniqueCandidates) {
|
||||
const healthUrl = new URL("/api/health", candidate).toString();
|
||||
try {
|
||||
await runSshCommand(
|
||||
input.config,
|
||||
`sh -lc ${shellQuote(`curl -fsS -m ${Math.max(1, Math.ceil((input.timeoutMs ?? 5_000) / 1000))} ${shellQuote(healthUrl)} >/dev/null`)}`,
|
||||
{ timeoutMs: input.timeoutMs ?? 5_000 },
|
||||
);
|
||||
return candidate;
|
||||
} catch {
|
||||
continue;
|
||||
}
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
async function execFileText(
|
||||
file: string,
|
||||
args: string[],
|
||||
@@ -476,8 +509,8 @@ async function importGitWorkspaceToSsh(input: {
|
||||
`if [ ! -d ${shellQuote(path.posix.join(input.remoteDir, ".git"))} ]; then git init ${shellQuote(input.remoteDir)} >/dev/null; fi`,
|
||||
`git -C ${shellQuote(input.remoteDir)} fetch --force "$tmp_bundle" '${tempRef}:${tempRef}' >/dev/null`,
|
||||
input.snapshot.branchName
|
||||
? `git -C ${shellQuote(input.remoteDir)} checkout -B ${shellQuote(input.snapshot.branchName)} ${shellQuote(input.snapshot.headCommit)} >/dev/null`
|
||||
: `git -C ${shellQuote(input.remoteDir)} -c advice.detachedHead=false checkout --detach ${shellQuote(input.snapshot.headCommit)} >/dev/null`,
|
||||
? `git -C ${shellQuote(input.remoteDir)} checkout --force -B ${shellQuote(input.snapshot.branchName)} ${shellQuote(input.snapshot.headCommit)} >/dev/null`
|
||||
: `git -C ${shellQuote(input.remoteDir)} -c advice.detachedHead=false checkout --force --detach ${shellQuote(input.snapshot.headCommit)} >/dev/null`,
|
||||
`git -C ${shellQuote(input.remoteDir)} reset --hard ${shellQuote(input.snapshot.headCommit)} >/dev/null`,
|
||||
`git -C ${shellQuote(input.remoteDir)} clean -fdx -e .paperclip-runtime >/dev/null`,
|
||||
].join("\n");
|
||||
|
||||
@@ -125,6 +125,7 @@ export interface AdapterExecutionContext {
|
||||
runtime: AdapterRuntime;
|
||||
config: Record<string, unknown>;
|
||||
context: Record<string, unknown>;
|
||||
runtimeCommandSpec?: AdapterRuntimeCommandSpec | null;
|
||||
executionTarget?: AdapterExecutionTarget | null;
|
||||
/**
|
||||
* Legacy remote transport view. Prefer `executionTarget`, which is the
|
||||
@@ -144,6 +145,16 @@ export interface AdapterModel {
|
||||
label: string;
|
||||
}
|
||||
|
||||
export type AdapterModelProfileKey = "cheap";
|
||||
|
||||
export interface AdapterModelProfileDefinition {
|
||||
key: AdapterModelProfileKey;
|
||||
label: string;
|
||||
description?: string;
|
||||
adapterConfig: Record<string, unknown>;
|
||||
source?: "adapter_default" | "discovered";
|
||||
}
|
||||
|
||||
export type AdapterEnvironmentCheckLevel = "info" | "warn" | "error";
|
||||
|
||||
export interface AdapterEnvironmentCheck {
|
||||
@@ -216,6 +227,20 @@ export interface AdapterEnvironmentTestContext {
|
||||
companyId: string;
|
||||
adapterType: string;
|
||||
config: Record<string, unknown>;
|
||||
/**
|
||||
* Optional execution target the adapter should run probes against.
|
||||
*
|
||||
* If omitted (or `kind === "local"`), the adapter tests on the Paperclip
|
||||
* host. For SSH/sandbox targets the adapter should run command/auth probes
|
||||
* inside the remote environment so the result reflects what an agent run
|
||||
* would actually see at execution time.
|
||||
*/
|
||||
executionTarget?: AdapterExecutionTarget | null;
|
||||
/**
|
||||
* Friendly name of the environment being tested (when `executionTarget` is set).
|
||||
* Surfaced in check messages so users see which environment the probe ran in.
|
||||
*/
|
||||
environmentName?: string | null;
|
||||
deployment?: {
|
||||
mode?: "local_trusted" | "authenticated";
|
||||
exposure?: "private" | "public";
|
||||
@@ -304,6 +329,23 @@ export interface AdapterConfigSchema {
|
||||
fields: ConfigFieldSchema[];
|
||||
}
|
||||
|
||||
export interface AdapterRuntimeCommandSpec {
|
||||
/**
|
||||
* The command Paperclip should execute for this adapter in the current config.
|
||||
*/
|
||||
command: string;
|
||||
/**
|
||||
* Optional command name/path to probe for availability before launch.
|
||||
* Defaults to `command` when omitted by the consumer.
|
||||
*/
|
||||
detectCommand?: string | null;
|
||||
/**
|
||||
* Optional shell snippet that can install or expose the adapter command in a
|
||||
* fresh remote runtime. It should be idempotent.
|
||||
*/
|
||||
installCommand?: string | null;
|
||||
}
|
||||
|
||||
export interface ServerAdapterModule {
|
||||
type: string;
|
||||
execute(ctx: AdapterExecutionContext): Promise<AdapterExecutionResult>;
|
||||
@@ -315,6 +357,8 @@ export interface ServerAdapterModule {
|
||||
supportsLocalAgentJwt?: boolean;
|
||||
models?: AdapterModel[];
|
||||
listModels?: () => Promise<AdapterModel[]>;
|
||||
modelProfiles?: AdapterModelProfileDefinition[];
|
||||
listModelProfiles?: () => Promise<AdapterModelProfileDefinition[]>;
|
||||
/**
|
||||
* Optional explicit refresh hook for model discovery.
|
||||
* Use this when the adapter caches discovered models and needs a bypass path
|
||||
@@ -380,6 +424,11 @@ export interface ServerAdapterModule {
|
||||
* rather than reading config.paperclipRuntimeSkills.
|
||||
*/
|
||||
requiresMaterializedRuntimeSkills?: boolean;
|
||||
/**
|
||||
* Optional: describe how this adapter's runtime command should be launched
|
||||
* and provisioned in fresh remote environments such as sandboxes.
|
||||
*/
|
||||
getRuntimeCommandSpec?: (config: Record<string, unknown>) => AdapterRuntimeCommandSpec | null;
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
@@ -421,6 +470,14 @@ export interface CreateConfigValues {
|
||||
promptTemplate: string;
|
||||
model: string;
|
||||
thinkingEffort: string;
|
||||
/**
|
||||
* Optional cheap model profile config for new agents on adapters that
|
||||
* support model profiles. Persisted under
|
||||
* `runtimeConfig.modelProfiles.cheap.adapterConfig`, never on the primary
|
||||
* `adapterConfig`.
|
||||
*/
|
||||
cheapModel?: string;
|
||||
cheapModelEnabled?: boolean;
|
||||
chrome: boolean;
|
||||
dangerouslySkipPermissions: boolean;
|
||||
search: boolean;
|
||||
|
||||
64
packages/adapters/acpx-local/package.json
Normal file
@@ -0,0 +1,64 @@
|
||||
{
|
||||
"name": "@paperclipai/adapter-acpx-local",
|
||||
"version": "0.3.1",
|
||||
"license": "MIT",
|
||||
"homepage": "https://github.com/paperclipai/paperclip",
|
||||
"bugs": {
|
||||
"url": "https://github.com/paperclipai/paperclip/issues"
|
||||
},
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "https://github.com/paperclipai/paperclip",
|
||||
"directory": "packages/adapters/acpx-local"
|
||||
},
|
||||
"type": "module",
|
||||
"exports": {
|
||||
".": "./src/index.ts",
|
||||
"./server": "./src/server/index.ts",
|
||||
"./ui": "./src/ui/index.ts",
|
||||
"./cli": "./src/cli/index.ts"
|
||||
},
|
||||
"publishConfig": {
|
||||
"access": "public",
|
||||
"exports": {
|
||||
".": {
|
||||
"types": "./dist/index.d.ts",
|
||||
"import": "./dist/index.js"
|
||||
},
|
||||
"./server": {
|
||||
"types": "./dist/server/index.d.ts",
|
||||
"import": "./dist/server/index.js"
|
||||
},
|
||||
"./ui": {
|
||||
"types": "./dist/ui/index.d.ts",
|
||||
"import": "./dist/ui/index.js"
|
||||
},
|
||||
"./cli": {
|
||||
"types": "./dist/cli/index.d.ts",
|
||||
"import": "./dist/cli/index.js"
|
||||
}
|
||||
},
|
||||
"main": "./dist/index.js",
|
||||
"types": "./dist/index.d.ts"
|
||||
},
|
||||
"files": [
|
||||
"dist",
|
||||
"skills"
|
||||
],
|
||||
"scripts": {
|
||||
"build": "tsc",
|
||||
"clean": "rm -rf dist",
|
||||
"typecheck": "tsc --noEmit"
|
||||
},
|
||||
"dependencies": {
|
||||
"@agentclientprotocol/claude-agent-acp": "^0.31.4",
|
||||
"@paperclipai/adapter-utils": "workspace:*",
|
||||
"@zed-industries/codex-acp": "^0.12.0",
|
||||
"acpx": "^0.6.1",
|
||||
"picocolors": "^1.1.1"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@types/node": "^24.6.0",
|
||||
"typescript": "^5.7.3"
|
||||
}
|
||||
}
|
||||
121
packages/adapters/acpx-local/src/cli/format-event.test.ts
Normal file
@@ -0,0 +1,121 @@
|
||||
import { afterEach, beforeEach, describe, expect, it, vi } from "vitest";
|
||||
import { printAcpxStreamEvent } from "./format-event.js";
|
||||
|
||||
function emit(payload: Record<string, unknown>): string {
|
||||
return JSON.stringify(payload);
|
||||
}
|
||||
|
||||
interface CapturedOutput {
|
||||
log: string[];
|
||||
stdout: string[];
|
||||
}
|
||||
|
||||
function captureOutput(): { capture: CapturedOutput; restore: () => void } {
|
||||
const log: string[] = [];
|
||||
const stdout: string[] = [];
|
||||
const logSpy = vi.spyOn(console, "log").mockImplementation((value?: unknown) => {
|
||||
log.push(String(value ?? ""));
|
||||
});
|
||||
const stdoutSpy = vi.spyOn(process.stdout, "write").mockImplementation(((chunk: unknown) => {
|
||||
stdout.push(String(chunk ?? ""));
|
||||
return true;
|
||||
}) as typeof process.stdout.write);
|
||||
return {
|
||||
capture: { log, stdout },
|
||||
restore: () => {
|
||||
logSpy.mockRestore();
|
||||
stdoutSpy.mockRestore();
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
function strip(value: string): string {
|
||||
return value.replace(/\x1b\[[0-9;]*m/g, "");
|
||||
}
|
||||
|
||||
describe("printAcpxStreamEvent", () => {
|
||||
let captured: CapturedOutput;
|
||||
let restore: () => void;
|
||||
|
||||
beforeEach(() => {
|
||||
const result = captureOutput();
|
||||
captured = result.capture;
|
||||
restore = result.restore;
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
restore();
|
||||
});
|
||||
|
||||
it("renders acpx.session as a labeled session header", () => {
|
||||
printAcpxStreamEvent(
|
||||
emit({
|
||||
type: "acpx.session",
|
||||
agent: "claude",
|
||||
acpSessionId: "acp-1",
|
||||
mode: "persistent",
|
||||
permissionMode: "approve-all",
|
||||
}),
|
||||
false,
|
||||
);
|
||||
expect(captured.log.map(strip)).toEqual(["claude session: acp-1 [persistent / approve-all]"]);
|
||||
});
|
||||
|
||||
it("streams output text_delta to stdout for live progress", () => {
|
||||
printAcpxStreamEvent(
|
||||
emit({ type: "acpx.text_delta", text: "hello", channel: "output" }),
|
||||
false,
|
||||
);
|
||||
expect(captured.log).toEqual([]);
|
||||
expect(captured.stdout.map(strip)).toEqual(["hello"]);
|
||||
});
|
||||
|
||||
it("renders thought text_delta on its own line", () => {
|
||||
printAcpxStreamEvent(
|
||||
emit({ type: "acpx.text_delta", text: "thinking…", channel: "thought" }),
|
||||
false,
|
||||
);
|
||||
expect(captured.log.map(strip)).toEqual(["thinking…"]);
|
||||
});
|
||||
|
||||
it("renders tool_call with status and id", () => {
|
||||
printAcpxStreamEvent(
|
||||
emit({
|
||||
type: "acpx.tool_call",
|
||||
name: "read",
|
||||
toolCallId: "tool-1",
|
||||
status: "running",
|
||||
text: "read README.md",
|
||||
}),
|
||||
false,
|
||||
);
|
||||
expect(captured.log.map(strip)).toEqual([
|
||||
"tool_call: read [running] (tool-1)",
|
||||
"read README.md",
|
||||
]);
|
||||
});
|
||||
|
||||
it("renders status events with optional context window", () => {
|
||||
printAcpxStreamEvent(
|
||||
emit({ type: "acpx.status", tag: "context_window", used: 100, size: 200000 }),
|
||||
false,
|
||||
);
|
||||
expect(captured.log.map(strip)).toEqual(["status: context_window (100/200000 ctx)"]);
|
||||
});
|
||||
|
||||
it("renders acpx.result and acpx.error", () => {
|
||||
printAcpxStreamEvent(emit({ type: "acpx.result", summary: "completed", stopReason: "end_turn" }), false);
|
||||
printAcpxStreamEvent(emit({ type: "acpx.error", message: "auth required" }), false);
|
||||
expect(captured.log.map(strip)).toEqual(["result: completed", "error: auth required"]);
|
||||
});
|
||||
|
||||
it("falls back to plain output for non-JSON lines", () => {
|
||||
printAcpxStreamEvent("not json", false);
|
||||
expect(captured.log).toEqual(["not json"]);
|
||||
});
|
||||
|
||||
it("still emits unknown / non-JSON lines when debug is enabled", () => {
|
||||
printAcpxStreamEvent("not json", true);
|
||||
expect(strip(captured.log[0])).toBe("not json");
|
||||
});
|
||||
});
|
||||
121
packages/adapters/acpx-local/src/cli/format-event.ts
Normal file
@@ -0,0 +1,121 @@
|
||||
import pc from "picocolors";
|
||||
|
||||
function parseJson(line: string): Record<string, unknown> | null {
|
||||
try {
|
||||
const parsed = JSON.parse(line);
|
||||
if (typeof parsed !== "object" || parsed === null || Array.isArray(parsed)) return null;
|
||||
return parsed as Record<string, unknown>;
|
||||
} catch {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
function asString(value: unknown, fallback = ""): string {
|
||||
return typeof value === "string" ? value : fallback;
|
||||
}
|
||||
|
||||
function asNumber(value: unknown, fallback = 0): number {
|
||||
return typeof value === "number" && Number.isFinite(value) ? value : fallback;
|
||||
}
|
||||
|
||||
function stringify(value: unknown): string {
|
||||
if (typeof value === "string") return value;
|
||||
if (value === null || value === undefined) return "";
|
||||
try {
|
||||
return JSON.stringify(value, null, 2);
|
||||
} catch {
|
||||
return String(value);
|
||||
}
|
||||
}
|
||||
|
||||
function pickToolUseId(parsed: Record<string, unknown>): string {
|
||||
return (
|
||||
asString(parsed.toolCallId) ||
|
||||
asString(parsed.toolUseId) ||
|
||||
asString(parsed.id)
|
||||
);
|
||||
}
|
||||
|
||||
function statusLine(parsed: Record<string, unknown>): string {
|
||||
const text = asString(parsed.text).trim();
|
||||
const tag = asString(parsed.tag).trim();
|
||||
const used = asNumber(parsed.used, -1);
|
||||
const size = asNumber(parsed.size, -1);
|
||||
const parts: string[] = [];
|
||||
if (text) parts.push(text);
|
||||
if (tag && !text) parts.push(tag);
|
||||
if (used >= 0 && size > 0) parts.push(`(${used}/${size} ctx)`);
|
||||
return parts.join(" ") || tag || "status";
|
||||
}
|
||||
|
||||
export function printAcpxStreamEvent(raw: string, debug: boolean): void {
|
||||
const line = raw.trim();
|
||||
if (!line) return;
|
||||
const parsed = parseJson(line);
|
||||
if (!parsed) {
|
||||
if (debug) console.log(pc.gray(line));
|
||||
else console.log(line);
|
||||
return;
|
||||
}
|
||||
|
||||
const type = asString(parsed.type);
|
||||
if (type === "acpx.session") {
|
||||
const agent = asString(parsed.agent, "acpx");
|
||||
const session =
|
||||
asString(parsed.acpSessionId) ||
|
||||
asString(parsed.sessionId) ||
|
||||
asString(parsed.runtimeSessionName);
|
||||
const mode = asString(parsed.mode);
|
||||
const permissionMode = asString(parsed.permissionMode);
|
||||
const tail = [mode, permissionMode].filter(Boolean).join(" / ");
|
||||
const suffix = tail ? ` [${tail}]` : "";
|
||||
console.log(pc.blue(`${agent} session${session ? `: ${session}` : ""}${suffix}`));
|
||||
return;
|
||||
}
|
||||
if (type === "acpx.text_delta") {
|
||||
const text = asString(parsed.text);
|
||||
if (!text) return;
|
||||
const channel = asString(parsed.channel) || asString(parsed.stream);
|
||||
const isThought = channel === "thought" || channel === "thinking";
|
||||
if (isThought) console.log(pc.gray(text));
|
||||
else process.stdout.write(pc.green(text));
|
||||
return;
|
||||
}
|
||||
if (type === "acpx.tool_call") {
|
||||
const name = asString(parsed.name, "acp_tool");
|
||||
const status = asString(parsed.status);
|
||||
const id = pickToolUseId(parsed);
|
||||
const header = status ? `tool_call: ${name} [${status}]` : `tool_call: ${name}`;
|
||||
const idSuffix = id ? ` (${id})` : "";
|
||||
const isError = status === "failed" || status === "cancelled";
|
||||
console.log((isError ? pc.red : pc.yellow)(`${header}${idSuffix}`));
|
||||
if (parsed.input !== undefined) {
|
||||
console.log(pc.gray(stringify(parsed.input)));
|
||||
} else {
|
||||
const text = asString(parsed.text).trim();
|
||||
if (text) console.log(pc.gray(text));
|
||||
}
|
||||
return;
|
||||
}
|
||||
if (type === "acpx.tool_result") {
|
||||
const isError = parsed.isError === true || parsed.error !== undefined;
|
||||
console.log((isError ? pc.red : pc.cyan)(`tool_result: ${asString(parsed.name, "acp_tool")}`));
|
||||
const content = stringify(parsed.content ?? parsed.output ?? parsed.error);
|
||||
if (content) console.log((isError ? pc.red : pc.gray)(content));
|
||||
return;
|
||||
}
|
||||
if (type === "acpx.status") {
|
||||
console.log(pc.gray(`status: ${statusLine(parsed)}`));
|
||||
return;
|
||||
}
|
||||
if (type === "acpx.result") {
|
||||
const summary = asString(parsed.summary, asString(parsed.stopReason, asString(parsed.subtype, "complete")));
|
||||
console.log(pc.blue(`result: ${summary}`));
|
||||
return;
|
||||
}
|
||||
if (type === "acpx.error") {
|
||||
console.log(pc.red(`error: ${asString(parsed.message, line)}`));
|
||||
return;
|
||||
}
|
||||
console.log(debug ? pc.gray(line) : line);
|
||||
}
|
||||
1
packages/adapters/acpx-local/src/cli/index.ts
Normal file
@@ -0,0 +1 @@
|
||||
export { printAcpxStreamEvent } from "./format-event.js";
|
||||
47
packages/adapters/acpx-local/src/index.ts
Normal file
@@ -0,0 +1,47 @@
|
||||
export const type = "acpx_local";
|
||||
export const label = "ACPX (local)";
|
||||
|
||||
export const DEFAULT_ACPX_LOCAL_AGENT = "claude";
|
||||
export const DEFAULT_ACPX_LOCAL_MODE = "persistent";
|
||||
export const DEFAULT_ACPX_LOCAL_PERMISSION_MODE = "approve-all";
|
||||
export const DEFAULT_ACPX_LOCAL_NON_INTERACTIVE_PERMISSIONS = "deny";
|
||||
export const DEFAULT_ACPX_LOCAL_TIMEOUT_SEC = 0;
|
||||
|
||||
export const acpxAgentOptions = [
|
||||
{ id: "claude", label: "Claude via ACPX" },
|
||||
{ id: "codex", label: "Codex via ACPX" },
|
||||
{ id: "custom", label: "Custom ACP command" },
|
||||
] as const;
|
||||
|
||||
export const agentConfigurationDoc = `# acpx_local agent configuration
|
||||
|
||||
Adapter: acpx_local
|
||||
|
||||
Use when:
|
||||
- The agent should run through Agent Client Protocol via ACPX on the Paperclip host or a managed execution environment.
|
||||
- You want one built-in adapter that can target Claude, Codex, or a custom ACP server command.
|
||||
- You need Paperclip-managed session identity and live streamed ACP events in later ACPX runtime phases.
|
||||
|
||||
Don't use when:
|
||||
- You need today's stable Claude Code or Codex CLI wrapper behavior. Use claude_local or codex_local until acpx_local runtime execution is enabled.
|
||||
- The host cannot satisfy ACPX's Node >=22.12.0 prerequisite.
|
||||
- The agent runtime is not an ACP server and cannot be launched through ACPX.
|
||||
|
||||
Core fields:
|
||||
- agent (string, optional): claude, codex, or custom. Defaults to claude.
|
||||
- agentCommand (string, optional): custom ACP command when agent=custom, or an override for a built-in ACP agent command.
|
||||
- mode (string, optional): persistent or oneshot. Defaults to persistent.
|
||||
- cwd (string, optional): default absolute working directory fallback for the agent process.
|
||||
- permissionMode (string, optional): defaults to approve-all, meaning ACPX permission requests are auto-approved.
|
||||
- nonInteractivePermissions (string, optional): fallback behavior when ACPX cannot ask interactively. Supported values are deny and fail.
|
||||
- stateDir (string, optional): ACPX state directory. Defaults to a Paperclip-managed company/agent scoped location.
|
||||
- instructionsFilePath (string, optional): absolute path to a markdown instructions file used by Paperclip prompt construction.
|
||||
- promptTemplate (string, optional): run prompt template.
|
||||
- bootstrapPromptTemplate (string, optional): first-run bootstrap prompt template.
|
||||
- timeoutSec (number, optional): run timeout in seconds. Defaults to 0, meaning no adapter timeout.
|
||||
- env (object, optional): KEY=VALUE environment variables or secret bindings.
|
||||
|
||||
Dependency decision:
|
||||
- acpx_local declares direct dependencies on acpx, @agentclientprotocol/claude-agent-acp, and @zed-industries/codex-acp so the built-in adapter has deterministic package resolution instead of relying on globally installed ACP commands.
|
||||
- ACPX currently requires Node >=22.12.0. Paperclip keeps the repo-wide Node >=20 engine and surfaces the stricter runtime prerequisite through acpx_local diagnostics.
|
||||
`;
|
||||
102
packages/adapters/acpx-local/src/server/config-schema.ts
Normal file
@@ -0,0 +1,102 @@
|
||||
import type { AdapterConfigSchema } from "@paperclipai/adapter-utils";
|
||||
import {
|
||||
DEFAULT_ACPX_LOCAL_AGENT,
|
||||
DEFAULT_ACPX_LOCAL_MODE,
|
||||
DEFAULT_ACPX_LOCAL_NON_INTERACTIVE_PERMISSIONS,
|
||||
DEFAULT_ACPX_LOCAL_PERMISSION_MODE,
|
||||
DEFAULT_ACPX_LOCAL_TIMEOUT_SEC,
|
||||
acpxAgentOptions,
|
||||
} from "../index.js";
|
||||
|
||||
export function getConfigSchema(): AdapterConfigSchema {
|
||||
return {
|
||||
fields: [
|
||||
{
|
||||
key: "agent",
|
||||
label: "ACP agent",
|
||||
type: "select",
|
||||
default: DEFAULT_ACPX_LOCAL_AGENT,
|
||||
required: true,
|
||||
options: acpxAgentOptions.map((agent) => ({ value: agent.id, label: agent.label })),
|
||||
hint: "Choose the ACP agent launched through ACPX.",
|
||||
},
|
||||
{
|
||||
key: "agentCommand",
|
||||
label: "Agent command",
|
||||
type: "text",
|
||||
hint: "Required for custom agents; optional override for built-in Claude or Codex ACP commands.",
|
||||
},
|
||||
{
|
||||
key: "mode",
|
||||
label: "Session mode",
|
||||
type: "select",
|
||||
default: DEFAULT_ACPX_LOCAL_MODE,
|
||||
options: [
|
||||
{ value: "persistent", label: "Persistent" },
|
||||
{ value: "oneshot", label: "One shot" },
|
||||
],
|
||||
},
|
||||
{
|
||||
key: "permissionMode",
|
||||
label: "Permission mode",
|
||||
type: "select",
|
||||
default: DEFAULT_ACPX_LOCAL_PERMISSION_MODE,
|
||||
options: [
|
||||
{ value: "approve-all", label: "Approve all" },
|
||||
{ value: "default", label: "Approve reads" },
|
||||
],
|
||||
hint: "Defaults to maximum permissions. Approve reads grants read-only requests and asks for approval on writes.",
|
||||
},
|
||||
{
|
||||
key: "nonInteractivePermissions",
|
||||
label: "Non-interactive permissions",
|
||||
type: "select",
|
||||
default: DEFAULT_ACPX_LOCAL_NON_INTERACTIVE_PERMISSIONS,
|
||||
options: [
|
||||
{ value: "deny", label: "Deny" },
|
||||
{ value: "fail", label: "Fail" },
|
||||
],
|
||||
},
|
||||
{
|
||||
key: "cwd",
|
||||
label: "Working directory",
|
||||
type: "text",
|
||||
hint: "Absolute fallback directory. Paperclip execution workspaces can override this at runtime.",
|
||||
},
|
||||
{
|
||||
key: "stateDir",
|
||||
label: "State directory",
|
||||
type: "text",
|
||||
hint: "Optional ACPX session state directory. Defaults to Paperclip-managed company/agent scoped storage.",
|
||||
},
|
||||
{
|
||||
key: "instructionsFilePath",
|
||||
label: "Instructions file",
|
||||
type: "text",
|
||||
hint: "Optional absolute path to markdown instructions injected into the run prompt.",
|
||||
},
|
||||
{
|
||||
key: "promptTemplate",
|
||||
label: "Prompt template",
|
||||
type: "textarea",
|
||||
},
|
||||
{
|
||||
key: "bootstrapPromptTemplate",
|
||||
label: "Bootstrap prompt template",
|
||||
type: "textarea",
|
||||
},
|
||||
{
|
||||
key: "timeoutSec",
|
||||
label: "Timeout seconds",
|
||||
type: "number",
|
||||
default: DEFAULT_ACPX_LOCAL_TIMEOUT_SEC,
|
||||
},
|
||||
{
|
||||
key: "env",
|
||||
label: "Environment JSON",
|
||||
type: "textarea",
|
||||
hint: "Optional JSON object of environment values or secret bindings.",
|
||||
},
|
||||
],
|
||||
};
|
||||
}
|
||||
425
packages/adapters/acpx-local/src/server/execute.test.ts
Normal file
@@ -0,0 +1,425 @@
|
||||
import fs from "node:fs/promises";
|
||||
import os from "node:os";
|
||||
import path from "node:path";
|
||||
import { afterEach, describe, expect, it } from "vitest";
|
||||
import { createAcpxLocalExecutor } from "./execute.js";
|
||||
|
||||
const tempRoots: string[] = [];
|
||||
|
||||
async function makeTempRoot() {
|
||||
const root = await fs.mkdtemp(path.join(os.tmpdir(), "paperclip-acpx-skills-"));
|
||||
tempRoots.push(root);
|
||||
return root;
|
||||
}
|
||||
|
||||
afterEach(async () => {
|
||||
await Promise.all(tempRoots.splice(0).map((root) => fs.rm(root, { recursive: true, force: true })));
|
||||
});
|
||||
|
||||
async function pathExists(candidate: string): Promise<boolean> {
|
||||
return fs.access(candidate).then(() => true).catch(() => false);
|
||||
}
|
||||
|
||||
async function onlyChildDir(parent: string): Promise<string> {
|
||||
const entries = await fs.readdir(parent);
|
||||
expect(entries).toHaveLength(1);
|
||||
return path.join(parent, entries[0]!);
|
||||
}
|
||||
|
||||
async function createSkill(root: string, name: string, body = `---\nrequired: false\n---\n# ${name}\n`) {
|
||||
const skillDir = path.join(root, name);
|
||||
await fs.mkdir(skillDir, { recursive: true });
|
||||
await fs.writeFile(path.join(skillDir, "SKILL.md"), body, "utf8");
|
||||
return {
|
||||
key: `paperclipai/test/${name}`,
|
||||
runtimeName: name,
|
||||
source: skillDir,
|
||||
required: false,
|
||||
};
|
||||
}
|
||||
|
||||
function buildRuntime() {
|
||||
return {
|
||||
ensureSession: async () => ({
|
||||
backendSessionId: "backend-session",
|
||||
agentSessionId: "agent-session",
|
||||
runtimeSessionName: "runtime-session",
|
||||
}),
|
||||
startTurn: () => ({
|
||||
events: (async function* () {
|
||||
yield { type: "done", stopReason: "end_turn" };
|
||||
})(),
|
||||
result: Promise.resolve({ status: "completed", stopReason: "end_turn" }),
|
||||
cancel: async () => {},
|
||||
}),
|
||||
close: async () => {},
|
||||
};
|
||||
}
|
||||
|
||||
async function runExecutor(
|
||||
config: Record<string, unknown>,
|
||||
options: {
|
||||
context?: Record<string, unknown>;
|
||||
executionTransport?: Record<string, unknown>;
|
||||
} = {},
|
||||
) {
|
||||
const runtimeOptions: Record<string, unknown>[] = [];
|
||||
const meta: Record<string, unknown>[] = [];
|
||||
const logs: Array<{ stream: string; text: string }> = [];
|
||||
const execute = createAcpxLocalExecutor({
|
||||
createRuntime: (options) => {
|
||||
runtimeOptions.push(options as unknown as Record<string, unknown>);
|
||||
return buildRuntime() as never;
|
||||
},
|
||||
});
|
||||
|
||||
const result = await execute({
|
||||
runId: "run-1",
|
||||
agent: {
|
||||
id: "agent-1",
|
||||
companyId: "company-1",
|
||||
},
|
||||
runtime: {},
|
||||
config,
|
||||
context: options.context ?? {},
|
||||
executionTransport: options.executionTransport,
|
||||
onLog: async (stream: "stdout" | "stderr", text: string) => {
|
||||
logs.push({ stream, text });
|
||||
},
|
||||
onMeta: async (payload: unknown) => {
|
||||
meta.push(payload as Record<string, unknown>);
|
||||
},
|
||||
} as never);
|
||||
|
||||
expect(result.exitCode).toBe(0);
|
||||
return { logs, meta, runtimeOptions, result };
|
||||
}
|
||||
|
||||
describe("acpx_local runtime skill isolation", () => {
|
||||
it.skipIf(process.platform === "win32")("materializes ACPX Claude skills without symlinked descendants", async () => {
|
||||
const root = await makeTempRoot();
|
||||
const skillRoot = path.join(root, "skills");
|
||||
const outsideRoot = path.join(root, "outside");
|
||||
await fs.mkdir(outsideRoot, { recursive: true });
|
||||
await fs.writeFile(path.join(outsideRoot, "secret.txt"), "do not expose", "utf8");
|
||||
const skill = await createSkill(skillRoot, "danger");
|
||||
await fs.symlink(path.join(outsideRoot, "secret.txt"), path.join(skill.source, "leak.txt"));
|
||||
await fs.symlink(outsideRoot, path.join(skill.source, "leak-dir"));
|
||||
|
||||
const stateDir = path.join(root, "state");
|
||||
const { meta } = await runExecutor({
|
||||
agent: "claude",
|
||||
stateDir,
|
||||
paperclipRuntimeSkills: [skill],
|
||||
paperclipSkillSync: { desiredSkills: [skill.key] },
|
||||
});
|
||||
|
||||
const mountedRoot = await onlyChildDir(path.join(stateDir, "runtime-skills", "claude"));
|
||||
const skillsHome = path.join(mountedRoot, ".claude", "skills");
|
||||
const materializedSkill = path.join(skillsHome, skill.runtimeName);
|
||||
expect(await fs.readFile(path.join(materializedSkill, "SKILL.md"), "utf8")).toContain("# danger");
|
||||
expect(await pathExists(path.join(materializedSkill, "leak.txt"))).toBe(false);
|
||||
expect(await pathExists(path.join(materializedSkill, "leak-dir"))).toBe(false);
|
||||
expect(String(meta[0]?.prompt ?? "")).toContain(`Skill root: ${skillsHome}`);
|
||||
});
|
||||
|
||||
it.skipIf(process.platform === "win32")("revokes removed ACPX Codex skills and skips symlinked descendants", async () => {
|
||||
const root = await makeTempRoot();
|
||||
const skillRoot = path.join(root, "skills");
|
||||
const outsideRoot = path.join(root, "outside");
|
||||
const codexHome = path.join(root, "codex-home");
|
||||
await fs.mkdir(outsideRoot, { recursive: true });
|
||||
await fs.writeFile(path.join(outsideRoot, "secret.txt"), "do not expose", "utf8");
|
||||
const keep = await createSkill(skillRoot, "keep");
|
||||
const remove = await createSkill(skillRoot, "remove");
|
||||
await fs.symlink(path.join(outsideRoot, "secret.txt"), path.join(keep.source, "leak.txt"));
|
||||
await fs.symlink(outsideRoot, path.join(keep.source, "leak-dir"));
|
||||
|
||||
const baseConfig = {
|
||||
agent: "codex",
|
||||
stateDir: path.join(root, "state"),
|
||||
env: { CODEX_HOME: codexHome },
|
||||
paperclipRuntimeSkills: [keep, remove],
|
||||
};
|
||||
|
||||
await runExecutor({
|
||||
...baseConfig,
|
||||
paperclipSkillSync: { desiredSkills: [keep.key, remove.key] },
|
||||
});
|
||||
expect(await pathExists(path.join(codexHome, "skills", remove.runtimeName, "SKILL.md"))).toBe(true);
|
||||
|
||||
await runExecutor({
|
||||
...baseConfig,
|
||||
paperclipSkillSync: { desiredSkills: [keep.key] },
|
||||
});
|
||||
|
||||
expect(await pathExists(path.join(codexHome, "skills", keep.runtimeName, "SKILL.md"))).toBe(true);
|
||||
expect(await pathExists(path.join(codexHome, "skills", keep.runtimeName, "leak.txt"))).toBe(false);
|
||||
expect(await pathExists(path.join(codexHome, "skills", keep.runtimeName, "leak-dir"))).toBe(false);
|
||||
expect(await pathExists(path.join(codexHome, "skills", remove.runtimeName))).toBe(false);
|
||||
});
|
||||
|
||||
it.skipIf(process.platform === "win32")("removes legacy ACPX Codex skill symlinks when a skill is no longer desired", async () => {
|
||||
const root = await makeTempRoot();
|
||||
const skillRoot = path.join(root, "skills");
|
||||
const codexHome = path.join(root, "codex-home");
|
||||
const legacy = await createSkill(skillRoot, "legacy");
|
||||
const skillsHome = path.join(codexHome, "skills");
|
||||
await fs.mkdir(skillsHome, { recursive: true });
|
||||
await fs.symlink(legacy.source, path.join(skillsHome, legacy.runtimeName));
|
||||
|
||||
await runExecutor({
|
||||
agent: "codex",
|
||||
stateDir: path.join(root, "state"),
|
||||
env: { CODEX_HOME: codexHome },
|
||||
paperclipRuntimeSkills: [legacy],
|
||||
paperclipSkillSync: { desiredSkills: [] },
|
||||
});
|
||||
|
||||
expect(await pathExists(path.join(skillsHome, legacy.runtimeName))).toBe(false);
|
||||
});
|
||||
|
||||
it.skipIf(process.platform === "win32")("replaces stale managed Codex auth files with source symlinks", async () => {
|
||||
const root = await makeTempRoot();
|
||||
const sourceCodexHome = path.join(root, "source-codex-home");
|
||||
const paperclipHome = path.join(root, "paperclip-home");
|
||||
const paperclipInstanceId = "test-instance";
|
||||
const managedCodexHome = path.join(
|
||||
paperclipHome,
|
||||
"instances",
|
||||
paperclipInstanceId,
|
||||
"companies",
|
||||
"company-1",
|
||||
"codex-home",
|
||||
);
|
||||
await fs.mkdir(sourceCodexHome, { recursive: true });
|
||||
await fs.mkdir(managedCodexHome, { recursive: true });
|
||||
const sourceAuth = path.join(sourceCodexHome, "auth.json");
|
||||
const managedAuth = path.join(managedCodexHome, "auth.json");
|
||||
await fs.writeFile(sourceAuth, "{\"source\":true}", "utf8");
|
||||
await fs.writeFile(managedAuth, "{\"stale\":true}", "utf8");
|
||||
|
||||
const previousCodexHome = process.env.CODEX_HOME;
|
||||
const previousPaperclipHome = process.env.PAPERCLIP_HOME;
|
||||
const previousPaperclipInstanceId = process.env.PAPERCLIP_INSTANCE_ID;
|
||||
try {
|
||||
process.env.CODEX_HOME = sourceCodexHome;
|
||||
process.env.PAPERCLIP_HOME = paperclipHome;
|
||||
process.env.PAPERCLIP_INSTANCE_ID = paperclipInstanceId;
|
||||
await runExecutor({
|
||||
agent: "codex",
|
||||
stateDir: path.join(root, "state"),
|
||||
paperclipRuntimeSkills: [],
|
||||
paperclipSkillSync: { desiredSkills: [] },
|
||||
});
|
||||
} finally {
|
||||
if (previousCodexHome === undefined) delete process.env.CODEX_HOME;
|
||||
else process.env.CODEX_HOME = previousCodexHome;
|
||||
if (previousPaperclipHome === undefined) delete process.env.PAPERCLIP_HOME;
|
||||
else process.env.PAPERCLIP_HOME = previousPaperclipHome;
|
||||
if (previousPaperclipInstanceId === undefined) delete process.env.PAPERCLIP_INSTANCE_ID;
|
||||
else process.env.PAPERCLIP_INSTANCE_ID = previousPaperclipInstanceId;
|
||||
}
|
||||
|
||||
const authStat = await fs.lstat(managedAuth);
|
||||
expect(authStat.isSymbolicLink()).toBe(true);
|
||||
expect(path.resolve(path.dirname(managedAuth), await fs.readlink(managedAuth))).toBe(sourceAuth);
|
||||
});
|
||||
|
||||
it("keeps fresh credential wrapper scripts across ACPX agent changes", async () => {
|
||||
const root = await makeTempRoot();
|
||||
const stateDir = path.join(root, "state");
|
||||
const baseConfig = {
|
||||
agentCommand: "node ./fake-acp.js",
|
||||
stateDir,
|
||||
};
|
||||
|
||||
await runExecutor({
|
||||
...baseConfig,
|
||||
agent: "custom-a",
|
||||
env: { PAPERCLIP_API_KEY: "old-key" },
|
||||
});
|
||||
await runExecutor({
|
||||
...baseConfig,
|
||||
agent: "custom-b",
|
||||
env: { PAPERCLIP_API_KEY: "new-key" },
|
||||
});
|
||||
|
||||
const wrappers = await fs.readdir(path.join(stateDir, "wrappers"));
|
||||
expect(wrappers.filter((name) => name.endsWith(".sh"))).toHaveLength(2);
|
||||
expect(wrappers.filter((name) => name.endsWith(".env"))).toHaveLength(2);
|
||||
expect(wrappers.some((name) => name.startsWith("custom-a-"))).toBe(true);
|
||||
expect(wrappers.some((name) => name.startsWith("custom-b-"))).toBe(true);
|
||||
const wrapperPath = path.join(stateDir, "wrappers", wrappers.find((name) => name.startsWith("custom-b-") && name.endsWith(".sh"))!);
|
||||
const envPath = path.join(stateDir, "wrappers", wrappers.find((name) => name.startsWith("custom-b-") && name.endsWith(".env"))!);
|
||||
const wrapper = await fs.readFile(wrapperPath, "utf8");
|
||||
const env = await fs.readFile(envPath, "utf8");
|
||||
expect((await fs.stat(envPath)).mode & 0o777).toBe(0o600);
|
||||
expect((await fs.stat(wrapperPath)).mode & 0o777).toBe(0o700);
|
||||
expect(wrapper).toContain("node ./fake-acp.js");
|
||||
expect(wrapper).not.toContain("PAPERCLIP_API_KEY");
|
||||
expect(wrapper).not.toContain("new-key");
|
||||
expect(wrapper).not.toContain("old-key");
|
||||
expect(env).toContain("PAPERCLIP_API_KEY='new-key'");
|
||||
expect(env).not.toContain("old-key");
|
||||
});
|
||||
|
||||
it("shapes ACPX wrapper workspace env for remote execution identities", async () => {
|
||||
const root = await makeTempRoot();
|
||||
const stateDir = path.join(root, "state");
|
||||
const workspaceDir = path.join(root, "workspace");
|
||||
await fs.mkdir(workspaceDir, { recursive: true });
|
||||
|
||||
await runExecutor(
|
||||
{
|
||||
agentCommand: "node ./fake-acp.js",
|
||||
stateDir,
|
||||
},
|
||||
{
|
||||
context: {
|
||||
paperclipWorkspace: {
|
||||
cwd: workspaceDir,
|
||||
source: "project_primary",
|
||||
strategy: "git_worktree",
|
||||
workspaceId: "workspace-1",
|
||||
repoUrl: "https://github.com/paperclipai/paperclip.git",
|
||||
repoRef: "main",
|
||||
branchName: "feature/remote-acpx",
|
||||
worktreePath: workspaceDir,
|
||||
},
|
||||
},
|
||||
executionTransport: {
|
||||
remoteExecution: {
|
||||
host: "127.0.0.1",
|
||||
port: 2222,
|
||||
username: "fixture",
|
||||
remoteWorkspacePath: "/remote/workspace",
|
||||
remoteCwd: "/remote/workspace",
|
||||
privateKey: "PRIVATE KEY",
|
||||
knownHosts: "[127.0.0.1]:2222 ssh-ed25519 AAAA",
|
||||
strictHostKeyChecking: true,
|
||||
},
|
||||
},
|
||||
},
|
||||
);
|
||||
|
||||
const wrappers = await fs.readdir(path.join(stateDir, "wrappers"));
|
||||
const envPath = path.join(
|
||||
stateDir,
|
||||
"wrappers",
|
||||
wrappers.find((name) => name.endsWith(".env"))!,
|
||||
);
|
||||
const env = await fs.readFile(envPath, "utf8");
|
||||
|
||||
expect(env).toContain("PAPERCLIP_WORKSPACE_CWD='/remote/workspace'");
|
||||
expect(env).not.toContain("PAPERCLIP_WORKSPACE_WORKTREE_PATH=");
|
||||
});
|
||||
|
||||
it("cleans aged credential wrapper scripts across ACPX agent changes", async () => {
|
||||
const root = await makeTempRoot();
|
||||
const stateDir = path.join(root, "state");
|
||||
const wrappersDir = path.join(stateDir, "wrappers");
|
||||
const baseConfig = {
|
||||
agentCommand: "node ./fake-acp.js",
|
||||
stateDir,
|
||||
};
|
||||
|
||||
await runExecutor({
|
||||
...baseConfig,
|
||||
agent: "custom-a",
|
||||
env: { PAPERCLIP_API_KEY: "old-key" },
|
||||
});
|
||||
const oldDate = new Date(Date.now() - 16 * 60 * 1000);
|
||||
await Promise.all(
|
||||
(await fs.readdir(wrappersDir))
|
||||
.filter((name) => name.startsWith("custom-a-"))
|
||||
.map((name) => fs.utimes(path.join(wrappersDir, name), oldDate, oldDate)),
|
||||
);
|
||||
|
||||
await runExecutor({
|
||||
...baseConfig,
|
||||
agent: "custom-b",
|
||||
env: { PAPERCLIP_API_KEY: "new-key" },
|
||||
});
|
||||
|
||||
const wrappers = await fs.readdir(wrappersDir);
|
||||
expect(wrappers.filter((name) => name.endsWith(".sh"))).toHaveLength(1);
|
||||
expect(wrappers.filter((name) => name.endsWith(".env"))).toHaveLength(1);
|
||||
expect(wrappers.some((name) => name.startsWith("custom-a-"))).toBe(false);
|
||||
expect(wrappers.some((name) => name.startsWith("custom-b-"))).toBe(true);
|
||||
});
|
||||
|
||||
it("keeps distinct wrapper env files for concurrent runs with different credentials", async () => {
|
||||
const root = await makeTempRoot();
|
||||
const stateDir = path.join(root, "state");
|
||||
const baseConfig = {
|
||||
agent: "custom-a",
|
||||
agentCommand: "node ./fake-acp.js",
|
||||
stateDir,
|
||||
};
|
||||
|
||||
await runExecutor({
|
||||
...baseConfig,
|
||||
env: { PAPERCLIP_API_KEY: "first-key" },
|
||||
});
|
||||
await runExecutor({
|
||||
...baseConfig,
|
||||
env: { PAPERCLIP_API_KEY: "second-key" },
|
||||
});
|
||||
|
||||
const envFileNames = (await fs.readdir(path.join(stateDir, "wrappers"))).filter((name) => name.endsWith(".env"));
|
||||
expect(envFileNames).toHaveLength(2);
|
||||
const envFiles = await Promise.all(
|
||||
envFileNames.map(async (name) => fs.readFile(path.join(stateDir, "wrappers", name), "utf8")),
|
||||
);
|
||||
expect(envFiles.filter((contents) => contents.includes("PAPERCLIP_API_KEY='first-key'"))).toHaveLength(1);
|
||||
expect(envFiles.filter((contents) => contents.includes("PAPERCLIP_API_KEY='second-key'"))).toHaveLength(1);
|
||||
});
|
||||
|
||||
it("passes Paperclip env through the ACP agent wrapper instead of process.env", async () => {
|
||||
let observedApiKeyDuringStream: string | undefined;
|
||||
const execute = createAcpxLocalExecutor({
|
||||
createRuntime: () => ({
|
||||
ensureSession: async () => ({
|
||||
backendSessionId: "backend-session",
|
||||
agentSessionId: "agent-session",
|
||||
runtimeSessionName: "runtime-session",
|
||||
}),
|
||||
startTurn: () => ({
|
||||
events: (async function* () {
|
||||
await Promise.resolve();
|
||||
observedApiKeyDuringStream = process.env.PAPERCLIP_API_KEY;
|
||||
yield { type: "done", stopReason: "end_turn" };
|
||||
})(),
|
||||
result: Promise.resolve({ status: "completed", stopReason: "end_turn" }),
|
||||
cancel: async () => {},
|
||||
}),
|
||||
close: async () => {},
|
||||
}) as never,
|
||||
});
|
||||
|
||||
const previousApiKey = process.env.PAPERCLIP_API_KEY;
|
||||
try {
|
||||
delete process.env.PAPERCLIP_API_KEY;
|
||||
const result = await execute({
|
||||
runId: "run-1",
|
||||
agent: {
|
||||
id: "agent-1",
|
||||
companyId: "company-1",
|
||||
},
|
||||
runtime: {},
|
||||
config: { agent: "custom", agentCommand: "node ./fake-acp.js" },
|
||||
context: {},
|
||||
authToken: "runtime-key",
|
||||
onLog: async () => {},
|
||||
onMeta: async () => {},
|
||||
} as never);
|
||||
|
||||
expect(result.exitCode).toBe(0);
|
||||
expect(observedApiKeyDuringStream).toBeUndefined();
|
||||
} finally {
|
||||
if (previousApiKey === undefined) delete process.env.PAPERCLIP_API_KEY;
|
||||
else process.env.PAPERCLIP_API_KEY = previousApiKey;
|
||||
}
|
||||
});
|
||||
});
|
||||
1223
packages/adapters/acpx-local/src/server/execute.ts
Normal file
5
packages/adapters/acpx-local/src/server/index.ts
Normal file
@@ -0,0 +1,5 @@
|
||||
export { execute, createAcpxLocalExecutor } from "./execute.js";
|
||||
export { testEnvironment } from "./test.js";
|
||||
export { getConfigSchema } from "./config-schema.js";
|
||||
export { sessionCodec } from "./session-codec.js";
|
||||
export { listAcpxSkills, syncAcpxSkills } from "./skills.js";
|
||||
50
packages/adapters/acpx-local/src/server/session-codec.ts
Normal file
@@ -0,0 +1,50 @@
|
||||
import type { AdapterSessionCodec } from "@paperclipai/adapter-utils";
|
||||
|
||||
function readString(value: unknown): string | null {
|
||||
return typeof value === "string" && value.trim().length > 0 ? value.trim() : null;
|
||||
}
|
||||
|
||||
function readRecord(value: unknown): Record<string, unknown> | null {
|
||||
return typeof value === "object" && value !== null && !Array.isArray(value) ? { ...(value as Record<string, unknown>) } : null;
|
||||
}
|
||||
|
||||
export const sessionCodec: AdapterSessionCodec = {
|
||||
deserialize(raw: unknown) {
|
||||
if (typeof raw !== "object" || raw === null || Array.isArray(raw)) return null;
|
||||
const record = raw as Record<string, unknown>;
|
||||
const runtimeSessionName = readString(record.runtimeSessionName);
|
||||
const acpSessionId = readString(record.acpSessionId);
|
||||
const agentSessionId = readString(record.agentSessionId);
|
||||
const remoteExecution = readRecord(record.remoteExecution);
|
||||
if (!runtimeSessionName && !acpSessionId && !agentSessionId) return null;
|
||||
|
||||
return {
|
||||
...(runtimeSessionName ? { runtimeSessionName } : {}),
|
||||
...(readString(record.sessionKey) ? { sessionKey: readString(record.sessionKey) } : {}),
|
||||
...(readString(record.acpxRecordId) ? { acpxRecordId: readString(record.acpxRecordId) } : {}),
|
||||
...(acpSessionId ? { acpSessionId } : {}),
|
||||
...(agentSessionId ? { agentSessionId } : {}),
|
||||
...(readString(record.agent) ? { agent: readString(record.agent) } : {}),
|
||||
...(readString(record.cwd) ? { cwd: readString(record.cwd) } : {}),
|
||||
...(readString(record.mode) ? { mode: readString(record.mode) } : {}),
|
||||
...(readString(record.stateDir) ? { stateDir: readString(record.stateDir) } : {}),
|
||||
...(readString(record.configFingerprint) ? { configFingerprint: readString(record.configFingerprint) } : {}),
|
||||
...(readString(record.workspaceId) ? { workspaceId: readString(record.workspaceId) } : {}),
|
||||
...(readString(record.repoUrl) ? { repoUrl: readString(record.repoUrl) } : {}),
|
||||
...(readString(record.repoRef) ? { repoRef: readString(record.repoRef) } : {}),
|
||||
...(remoteExecution ? { remoteExecution } : {}),
|
||||
};
|
||||
},
|
||||
serialize(params: Record<string, unknown> | null) {
|
||||
if (!params) return null;
|
||||
return this.deserialize(params);
|
||||
},
|
||||
getDisplayId(params: Record<string, unknown> | null) {
|
||||
if (!params) return null;
|
||||
return (
|
||||
readString(params.runtimeSessionName) ??
|
||||
readString(params.acpSessionId) ??
|
||||
readString(params.agentSessionId)
|
||||
);
|
||||
},
|
||||
};
|
||||
106
packages/adapters/acpx-local/src/server/skills.ts
Normal file
@@ -0,0 +1,106 @@
|
||||
import path from "node:path";
|
||||
import { fileURLToPath } from "node:url";
|
||||
import type {
|
||||
AdapterSkillContext,
|
||||
AdapterSkillEntry,
|
||||
AdapterSkillSnapshot,
|
||||
} from "@paperclipai/adapter-utils";
|
||||
import {
|
||||
readPaperclipRuntimeSkillEntries,
|
||||
resolvePaperclipDesiredSkillNames,
|
||||
} from "@paperclipai/adapter-utils/server-utils";
|
||||
|
||||
const __moduleDir = path.dirname(fileURLToPath(import.meta.url));
|
||||
|
||||
type AcpxSkillAgent = "claude" | "codex" | "custom";
|
||||
|
||||
function normalizeAcpxSkillAgent(config: Record<string, unknown>): AcpxSkillAgent {
|
||||
const configured = typeof config.agent === "string" ? config.agent.trim() : "";
|
||||
if (configured === "codex" || configured === "custom") return configured;
|
||||
if (configured === "claude" || configured === "") return "claude";
|
||||
return "claude";
|
||||
}
|
||||
|
||||
function configuredDetail(agent: AcpxSkillAgent): string {
|
||||
if (agent === "codex") {
|
||||
return "Will be linked into the effective CODEX_HOME/skills/ directory for the next ACPX Codex session.";
|
||||
}
|
||||
return "Will be mounted into the next ACPX Claude session.";
|
||||
}
|
||||
|
||||
function unsupportedDetail(): string {
|
||||
return "Desired state is stored in Paperclip only; custom ACP commands need an explicit skill integration contract before runtime sync is available.";
|
||||
}
|
||||
|
||||
async function buildAcpxSkillSnapshot(config: Record<string, unknown>): Promise<AdapterSkillSnapshot> {
|
||||
const acpxAgent = normalizeAcpxSkillAgent(config);
|
||||
const availableEntries = await readPaperclipRuntimeSkillEntries(config, __moduleDir);
|
||||
const availableByKey = new Map(availableEntries.map((entry) => [entry.key, entry]));
|
||||
const desiredSkills = resolvePaperclipDesiredSkillNames(config, availableEntries);
|
||||
const desiredSet = new Set(desiredSkills);
|
||||
const supported = acpxAgent !== "custom";
|
||||
const warnings: string[] = supported
|
||||
? []
|
||||
: [
|
||||
"Custom ACP commands do not expose a Paperclip skill integration contract yet; selected skills are tracked only.",
|
||||
];
|
||||
|
||||
const entries: AdapterSkillEntry[] = availableEntries.map((entry) => {
|
||||
const desired = desiredSet.has(entry.key);
|
||||
return {
|
||||
key: entry.key,
|
||||
runtimeName: entry.runtimeName,
|
||||
desired,
|
||||
managed: true,
|
||||
state: desired ? "configured" : "available",
|
||||
origin: entry.required ? "paperclip_required" : "company_managed",
|
||||
originLabel: entry.required ? "Required by Paperclip" : "Managed by Paperclip",
|
||||
readOnly: false,
|
||||
sourcePath: entry.source,
|
||||
targetPath: null,
|
||||
detail: desired ? (supported ? configuredDetail(acpxAgent) : unsupportedDetail()) : null,
|
||||
required: Boolean(entry.required),
|
||||
requiredReason: entry.requiredReason ?? null,
|
||||
};
|
||||
});
|
||||
|
||||
for (const desiredSkill of desiredSkills) {
|
||||
if (availableByKey.has(desiredSkill)) continue;
|
||||
warnings.push(`Desired skill "${desiredSkill}" is not available from the Paperclip skills directory.`);
|
||||
entries.push({
|
||||
key: desiredSkill,
|
||||
runtimeName: null,
|
||||
desired: true,
|
||||
managed: true,
|
||||
state: "missing",
|
||||
origin: "external_unknown",
|
||||
originLabel: "External or unavailable",
|
||||
readOnly: false,
|
||||
sourcePath: null,
|
||||
targetPath: null,
|
||||
detail: "Paperclip cannot find this skill in the local runtime skills directory.",
|
||||
});
|
||||
}
|
||||
|
||||
entries.sort((left, right) => left.key.localeCompare(right.key));
|
||||
|
||||
return {
|
||||
adapterType: "acpx_local",
|
||||
supported,
|
||||
mode: supported ? "ephemeral" : "unsupported",
|
||||
desiredSkills,
|
||||
entries,
|
||||
warnings,
|
||||
};
|
||||
}
|
||||
|
||||
export async function listAcpxSkills(ctx: AdapterSkillContext): Promise<AdapterSkillSnapshot> {
|
||||
return buildAcpxSkillSnapshot(ctx.config);
|
||||
}
|
||||
|
||||
export async function syncAcpxSkills(
|
||||
ctx: AdapterSkillContext,
|
||||
_desiredSkills: string[],
|
||||
): Promise<AdapterSkillSnapshot> {
|
||||
return buildAcpxSkillSnapshot(ctx.config);
|
||||
}
|
||||
49
packages/adapters/acpx-local/src/server/test.test.ts
Normal file
@@ -0,0 +1,49 @@
|
||||
import { afterEach, describe, expect, it } from "vitest";
|
||||
import { testEnvironment } from "./test.js";
|
||||
|
||||
const originalNodeVersion = process.version;
|
||||
|
||||
function setNodeVersion(version: string): void {
|
||||
Object.defineProperty(process, "version", {
|
||||
configurable: true,
|
||||
enumerable: true,
|
||||
value: version,
|
||||
});
|
||||
}
|
||||
|
||||
afterEach(() => {
|
||||
setNodeVersion(originalNodeVersion);
|
||||
});
|
||||
|
||||
describe("acpx_local environment diagnostics", () => {
|
||||
it("does not force healthy default Claude diagnostics to warn", async () => {
|
||||
setNodeVersion("v22.12.0");
|
||||
|
||||
const result = await testEnvironment({
|
||||
adapterType: "acpx_local",
|
||||
companyId: "test-company",
|
||||
config: { agent: "claude" },
|
||||
});
|
||||
|
||||
expect(result.status).toBe("pass");
|
||||
expect(result.checks).toContainEqual(
|
||||
expect.objectContaining({
|
||||
code: "acpx_agent_selected",
|
||||
level: "info",
|
||||
message: "ACP agent selected: claude",
|
||||
}),
|
||||
);
|
||||
expect(result.checks).toContainEqual(
|
||||
expect.objectContaining({
|
||||
code: "acpx_runtime_scaffold",
|
||||
level: "info",
|
||||
}),
|
||||
);
|
||||
expect(result.checks).not.toContainEqual(
|
||||
expect.objectContaining({
|
||||
code: "acpx_runtime_scaffold",
|
||||
level: "warn",
|
||||
}),
|
||||
);
|
||||
});
|
||||
});
|
||||
295
packages/adapters/acpx-local/src/server/test.ts
Normal file
@@ -0,0 +1,295 @@
|
||||
import { createRequire } from "node:module";
|
||||
import fs from "node:fs/promises";
|
||||
import os from "node:os";
|
||||
import path from "node:path";
|
||||
import type {
|
||||
AdapterEnvironmentCheck,
|
||||
AdapterEnvironmentTestContext,
|
||||
AdapterEnvironmentTestResult,
|
||||
} from "@paperclipai/adapter-utils";
|
||||
import {
|
||||
asString,
|
||||
parseObject,
|
||||
} from "@paperclipai/adapter-utils/server-utils";
|
||||
|
||||
const require = createRequire(import.meta.url);
|
||||
const MIN_NODE_MAJOR = 22;
|
||||
const MIN_NODE_MINOR = 12;
|
||||
const MIN_NODE_PATCH = 0;
|
||||
|
||||
function summarizeStatus(checks: AdapterEnvironmentCheck[]): AdapterEnvironmentTestResult["status"] {
|
||||
if (checks.some((check) => check.level === "error")) return "fail";
|
||||
if (checks.some((check) => check.level === "warn")) return "warn";
|
||||
return "pass";
|
||||
}
|
||||
|
||||
function nodeVersionMeetsMinimum(version: string): boolean {
|
||||
const [major = 0, minor = 0, patch = 0] = version
|
||||
.replace(/^v/, "")
|
||||
.split(".")
|
||||
.map((part) => Number.parseInt(part, 10));
|
||||
if (major > MIN_NODE_MAJOR) return true;
|
||||
if (major < MIN_NODE_MAJOR) return false;
|
||||
if (minor > MIN_NODE_MINOR) return true;
|
||||
if (minor < MIN_NODE_MINOR) return false;
|
||||
return patch >= MIN_NODE_PATCH;
|
||||
}
|
||||
|
||||
function isNonEmpty(value: unknown): value is string {
|
||||
return typeof value === "string" && value.trim().length > 0;
|
||||
}
|
||||
|
||||
function getStringEnv(configEnv: Record<string, string>, key: string): string | undefined {
|
||||
const configured = configEnv[key];
|
||||
if (typeof configured === "string") return configured;
|
||||
return process.env[key];
|
||||
}
|
||||
|
||||
function credentialSource(configEnv: Record<string, string>, key: string): string {
|
||||
return typeof configEnv[key] === "string" ? "adapter config env" : "server environment";
|
||||
}
|
||||
|
||||
async function readJsonObject(filePath: string): Promise<Record<string, unknown> | null> {
|
||||
try {
|
||||
const parsed = JSON.parse(await fs.readFile(filePath, "utf8")) as unknown;
|
||||
return typeof parsed === "object" && parsed !== null && !Array.isArray(parsed)
|
||||
? parsed as Record<string, unknown>
|
||||
: null;
|
||||
} catch {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
function readNestedString(record: Record<string, unknown>, pathSegments: string[]): string | null {
|
||||
let current: unknown = record;
|
||||
for (const segment of pathSegments) {
|
||||
if (typeof current !== "object" || current === null || Array.isArray(current)) return null;
|
||||
current = (current as Record<string, unknown>)[segment];
|
||||
}
|
||||
return isNonEmpty(current) ? current.trim() : null;
|
||||
}
|
||||
|
||||
async function hasClaudeSubscriptionCredentials(configDir: string): Promise<boolean> {
|
||||
for (const filename of [".credentials.json", "credentials.json"]) {
|
||||
const credentials = await readJsonObject(path.join(configDir, filename));
|
||||
if (!credentials) continue;
|
||||
if (readNestedString(credentials, ["claudeAiOauth", "accessToken"])) return true;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
async function hasCodexNativeCredentials(codexHome: string): Promise<boolean> {
|
||||
const auth = await readJsonObject(path.join(codexHome, "auth.json"));
|
||||
if (!auth) return false;
|
||||
return Boolean(
|
||||
readNestedString(auth, ["accessToken"]) ||
|
||||
readNestedString(auth, ["tokens", "access_token"]) ||
|
||||
readNestedString(auth, ["OPENAI_API_KEY"]),
|
||||
);
|
||||
}
|
||||
|
||||
async function buildCredentialHintChecks(
|
||||
agent: string,
|
||||
configEnv: Record<string, string>,
|
||||
): Promise<AdapterEnvironmentCheck[]> {
|
||||
if (agent === "claude") {
|
||||
const bedrockFlag = getStringEnv(configEnv, "CLAUDE_CODE_USE_BEDROCK");
|
||||
const bedrockBaseUrl = getStringEnv(configEnv, "ANTHROPIC_BEDROCK_BASE_URL");
|
||||
const hasBedrock =
|
||||
bedrockFlag === "1" ||
|
||||
/^true$/i.test(bedrockFlag ?? "") ||
|
||||
isNonEmpty(bedrockBaseUrl);
|
||||
const bedrockSourceKey = isNonEmpty(bedrockFlag)
|
||||
? "CLAUDE_CODE_USE_BEDROCK"
|
||||
: "ANTHROPIC_BEDROCK_BASE_URL";
|
||||
const anthropicApiKey = getStringEnv(configEnv, "ANTHROPIC_API_KEY");
|
||||
const claudeConfigDir = isNonEmpty(getStringEnv(configEnv, "CLAUDE_CONFIG_DIR"))
|
||||
? path.resolve(getStringEnv(configEnv, "CLAUDE_CONFIG_DIR") as string)
|
||||
: path.join(os.homedir(), ".claude");
|
||||
|
||||
if (hasBedrock) {
|
||||
return [{
|
||||
code: "acpx_claude_bedrock_auth_detected",
|
||||
level: "info",
|
||||
message: "Claude credential hint: Bedrock auth indicators are configured.",
|
||||
detail: `Detected in ${credentialSource(configEnv, bedrockSourceKey)}.`,
|
||||
hint: "Ensure AWS credentials and AWS_REGION are available to the ACPX-launched Claude agent.",
|
||||
}];
|
||||
}
|
||||
|
||||
if (isNonEmpty(anthropicApiKey)) {
|
||||
return [{
|
||||
code: "acpx_claude_anthropic_api_key_detected",
|
||||
level: "info",
|
||||
message: "Claude credential hint: ANTHROPIC_API_KEY is set.",
|
||||
detail: `Detected in ${credentialSource(configEnv, "ANTHROPIC_API_KEY")}.`,
|
||||
}];
|
||||
}
|
||||
|
||||
if (await hasClaudeSubscriptionCredentials(claudeConfigDir)) {
|
||||
return [{
|
||||
code: "acpx_claude_subscription_auth_detected",
|
||||
level: "info",
|
||||
message: "Claude credential hint: local Claude subscription credentials were found.",
|
||||
detail: `Credentials found in ${claudeConfigDir}.`,
|
||||
}];
|
||||
}
|
||||
|
||||
return [{
|
||||
code: "acpx_claude_credentials_missing",
|
||||
level: "info",
|
||||
message: "Claude credential hint: no Claude API, Bedrock, or local subscription credentials were detected.",
|
||||
hint: "Set ANTHROPIC_API_KEY, configure Bedrock, or run `claude login` before starting an ACPX Claude agent.",
|
||||
}];
|
||||
}
|
||||
|
||||
if (agent === "codex") {
|
||||
const openAiApiKey = getStringEnv(configEnv, "OPENAI_API_KEY");
|
||||
const codexHome = isNonEmpty(getStringEnv(configEnv, "CODEX_HOME"))
|
||||
? path.resolve(getStringEnv(configEnv, "CODEX_HOME") as string)
|
||||
: path.join(os.homedir(), ".codex");
|
||||
|
||||
if (isNonEmpty(openAiApiKey)) {
|
||||
return [{
|
||||
code: "acpx_codex_openai_api_key_detected",
|
||||
level: "info",
|
||||
message: "Codex credential hint: OPENAI_API_KEY is set.",
|
||||
detail: `Detected in ${credentialSource(configEnv, "OPENAI_API_KEY")}.`,
|
||||
}];
|
||||
}
|
||||
|
||||
if (await hasCodexNativeCredentials(codexHome)) {
|
||||
return [{
|
||||
code: "acpx_codex_native_auth_detected",
|
||||
level: "info",
|
||||
message: "Codex credential hint: local Codex auth configuration was found.",
|
||||
detail: `Credentials found in ${path.join(codexHome, "auth.json")}.`,
|
||||
}];
|
||||
}
|
||||
|
||||
return [{
|
||||
code: "acpx_codex_credentials_missing",
|
||||
level: "info",
|
||||
message: "Codex credential hint: no OpenAI API key or local Codex auth configuration was detected.",
|
||||
hint: "Set OPENAI_API_KEY or run `codex login` before starting an ACPX Codex agent.",
|
||||
}];
|
||||
}
|
||||
|
||||
return [];
|
||||
}
|
||||
|
||||
function resolvePackage(name: string): AdapterEnvironmentCheck {
|
||||
try {
|
||||
const resolved = require.resolve(`${name}/package.json`);
|
||||
return {
|
||||
code: `acpx_package_${name.replace(/[^a-z0-9]+/gi, "_").toLowerCase()}_present`,
|
||||
level: "info",
|
||||
message: `${name} is resolvable.`,
|
||||
detail: resolved,
|
||||
};
|
||||
} catch {
|
||||
return {
|
||||
code: `acpx_package_${name.replace(/[^a-z0-9]+/gi, "_").toLowerCase()}_missing`,
|
||||
level: "error",
|
||||
message: `${name} is not resolvable from the acpx_local adapter package.`,
|
||||
hint: "Run pnpm install so the ACPX adapter dependencies are installed.",
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
async function checkDirectory(pathValue: string, code: string, label: string): Promise<AdapterEnvironmentCheck | null> {
|
||||
const dir = pathValue.trim();
|
||||
if (!dir) return null;
|
||||
try {
|
||||
await fs.mkdir(dir, { recursive: true });
|
||||
await fs.access(dir);
|
||||
return {
|
||||
code,
|
||||
level: "info",
|
||||
message: `${label} is writable: ${dir}`,
|
||||
};
|
||||
} catch (err) {
|
||||
return {
|
||||
code: `${code}_invalid`,
|
||||
level: "error",
|
||||
message: err instanceof Error ? err.message : `${label} is not writable.`,
|
||||
detail: dir,
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
export async function testEnvironment(
|
||||
ctx: AdapterEnvironmentTestContext,
|
||||
): Promise<AdapterEnvironmentTestResult> {
|
||||
const config = parseObject(ctx.config);
|
||||
const envConfig = parseObject(config.env);
|
||||
const configEnv: Record<string, string> = {};
|
||||
for (const [key, value] of Object.entries(envConfig)) {
|
||||
if (typeof value === "string") configEnv[key] = value;
|
||||
}
|
||||
const checks: AdapterEnvironmentCheck[] = [];
|
||||
const nodeVersion = process.version;
|
||||
|
||||
checks.push({
|
||||
code: nodeVersionMeetsMinimum(nodeVersion) ? "acpx_node_supported" : "acpx_node_unsupported",
|
||||
level: nodeVersionMeetsMinimum(nodeVersion) ? "info" : "error",
|
||||
message: nodeVersionMeetsMinimum(nodeVersion)
|
||||
? `Node ${nodeVersion} satisfies ACPX's >=22.12.0 requirement.`
|
||||
: `Node ${nodeVersion} does not satisfy ACPX's >=22.12.0 requirement.`,
|
||||
hint: nodeVersionMeetsMinimum(nodeVersion)
|
||||
? undefined
|
||||
: "Run acpx_local agents with Node >=22.12.0 or use claude_local/codex_local on Node 20.",
|
||||
});
|
||||
|
||||
checks.push(resolvePackage("acpx"));
|
||||
checks.push(resolvePackage("@agentclientprotocol/claude-agent-acp"));
|
||||
checks.push(resolvePackage("@zed-industries/codex-acp"));
|
||||
|
||||
const agent = asString(config.agent, "claude");
|
||||
if (!["claude", "codex", "custom"].includes(agent)) {
|
||||
checks.push({
|
||||
code: "acpx_agent_invalid",
|
||||
level: "error",
|
||||
message: `Unsupported ACP agent: ${agent}`,
|
||||
hint: "Use agent=claude, agent=codex, or agent=custom.",
|
||||
});
|
||||
} else {
|
||||
checks.push({
|
||||
code: "acpx_agent_selected",
|
||||
level: "info",
|
||||
message: `ACP agent selected: ${agent}`,
|
||||
});
|
||||
checks.push(...await buildCredentialHintChecks(agent, configEnv));
|
||||
}
|
||||
|
||||
if (agent === "custom" && !asString(config.agentCommand, "")) {
|
||||
checks.push({
|
||||
code: "acpx_custom_command_missing",
|
||||
level: "error",
|
||||
message: "agentCommand is required when agent=custom.",
|
||||
});
|
||||
}
|
||||
|
||||
const stateDirCheck = await checkDirectory(asString(config.stateDir, ""), "acpx_state_dir_writable", "ACPX state directory");
|
||||
if (stateDirCheck) checks.push(stateDirCheck);
|
||||
|
||||
const permissionMode = asString(config.permissionMode, "approve-all");
|
||||
checks.push({
|
||||
code: "acpx_permission_mode",
|
||||
level: "info",
|
||||
message: `Effective permission mode: ${permissionMode || "approve-all"}`,
|
||||
});
|
||||
|
||||
checks.push({
|
||||
code: "acpx_runtime_scaffold",
|
||||
level: "info",
|
||||
message: "acpx_local runtime execution is available through the bundled ACPX runtime.",
|
||||
});
|
||||
|
||||
return {
|
||||
adapterType: ctx.adapterType,
|
||||
status: summarizeStatus(checks),
|
||||
checks,
|
||||
testedAt: new Date().toISOString(),
|
||||
};
|
||||
}
|
||||
139
packages/adapters/acpx-local/src/ui/build-config.ts
Normal file
@@ -0,0 +1,139 @@
|
||||
import type { CreateConfigValues } from "@paperclipai/adapter-utils";
|
||||
import {
|
||||
DEFAULT_ACPX_LOCAL_AGENT,
|
||||
DEFAULT_ACPX_LOCAL_MODE,
|
||||
DEFAULT_ACPX_LOCAL_NON_INTERACTIVE_PERMISSIONS,
|
||||
DEFAULT_ACPX_LOCAL_PERMISSION_MODE,
|
||||
DEFAULT_ACPX_LOCAL_TIMEOUT_SEC,
|
||||
} from "../index.js";
|
||||
|
||||
function parseCommaArgs(value: string): string[] {
|
||||
return value
|
||||
.split(",")
|
||||
.map((item) => item.trim())
|
||||
.filter(Boolean);
|
||||
}
|
||||
|
||||
function parseEnvVars(text: string): Record<string, string> {
|
||||
const env: Record<string, string> = {};
|
||||
for (const line of text.split(/\r?\n/)) {
|
||||
const trimmed = line.trim();
|
||||
if (!trimmed || trimmed.startsWith("#")) continue;
|
||||
const eq = trimmed.indexOf("=");
|
||||
if (eq <= 0) continue;
|
||||
const key = trimmed.slice(0, eq).trim();
|
||||
const value = trimmed.slice(eq + 1);
|
||||
if (!/^[A-Za-z_][A-Za-z0-9_]*$/.test(key)) continue;
|
||||
env[key] = value;
|
||||
}
|
||||
return env;
|
||||
}
|
||||
|
||||
function parseEnvBindings(bindings: unknown): Record<string, unknown> {
|
||||
if (typeof bindings !== "object" || bindings === null || Array.isArray(bindings)) return {};
|
||||
const env: Record<string, unknown> = {};
|
||||
for (const [key, raw] of Object.entries(bindings)) {
|
||||
if (!/^[A-Za-z_][A-Za-z0-9_]*$/.test(key)) continue;
|
||||
if (typeof raw === "string") {
|
||||
env[key] = { type: "plain", value: raw };
|
||||
continue;
|
||||
}
|
||||
if (typeof raw !== "object" || raw === null || Array.isArray(raw)) continue;
|
||||
const rec = raw as Record<string, unknown>;
|
||||
if (rec.type === "plain" && typeof rec.value === "string") {
|
||||
env[key] = { type: "plain", value: rec.value };
|
||||
continue;
|
||||
}
|
||||
if (rec.type === "secret_ref" && typeof rec.secretId === "string") {
|
||||
env[key] = {
|
||||
type: "secret_ref",
|
||||
secretId: rec.secretId,
|
||||
...(typeof rec.version === "number" || rec.version === "latest"
|
||||
? { version: rec.version }
|
||||
: {}),
|
||||
};
|
||||
}
|
||||
}
|
||||
return env;
|
||||
}
|
||||
|
||||
function parseJsonObject(text: string): Record<string, unknown> | null {
|
||||
const trimmed = text.trim();
|
||||
if (!trimmed) return null;
|
||||
try {
|
||||
const parsed = JSON.parse(trimmed);
|
||||
if (typeof parsed !== "object" || parsed === null || Array.isArray(parsed)) return null;
|
||||
return parsed as Record<string, unknown>;
|
||||
} catch {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
function readNumber(value: unknown, fallback: number): number {
|
||||
if (typeof value === "number" && Number.isFinite(value)) return value;
|
||||
if (typeof value === "string" && value.trim()) {
|
||||
const parsed = Number(value);
|
||||
if (Number.isFinite(parsed)) return parsed;
|
||||
}
|
||||
return fallback;
|
||||
}
|
||||
|
||||
export function buildAcpxLocalConfig(v: CreateConfigValues): Record<string, unknown> {
|
||||
const schemaValues = v.adapterSchemaValues ?? {};
|
||||
const ac: Record<string, unknown> = {
|
||||
agent: schemaValues.agent || DEFAULT_ACPX_LOCAL_AGENT,
|
||||
mode: schemaValues.mode || DEFAULT_ACPX_LOCAL_MODE,
|
||||
permissionMode: schemaValues.permissionMode || DEFAULT_ACPX_LOCAL_PERMISSION_MODE,
|
||||
nonInteractivePermissions:
|
||||
schemaValues.nonInteractivePermissions || DEFAULT_ACPX_LOCAL_NON_INTERACTIVE_PERMISSIONS,
|
||||
timeoutSec: readNumber(schemaValues.timeoutSec, DEFAULT_ACPX_LOCAL_TIMEOUT_SEC),
|
||||
};
|
||||
|
||||
for (const key of [
|
||||
"agentCommand",
|
||||
"cwd",
|
||||
"stateDir",
|
||||
"instructionsFilePath",
|
||||
"promptTemplate",
|
||||
"bootstrapPromptTemplate",
|
||||
]) {
|
||||
const value = schemaValues[key];
|
||||
if (typeof value === "string" && value.trim()) ac[key] = value.trim();
|
||||
}
|
||||
|
||||
if (!ac.cwd && v.cwd) ac.cwd = v.cwd;
|
||||
if (!ac.instructionsFilePath && v.instructionsFilePath) ac.instructionsFilePath = v.instructionsFilePath;
|
||||
if (!ac.promptTemplate && v.promptTemplate) ac.promptTemplate = v.promptTemplate;
|
||||
if (!ac.bootstrapPromptTemplate && v.bootstrapPrompt) ac.bootstrapPromptTemplate = v.bootstrapPrompt;
|
||||
|
||||
const env = parseEnvBindings(v.envBindings);
|
||||
const legacy = parseEnvVars(v.envVars);
|
||||
for (const [key, value] of Object.entries(legacy)) {
|
||||
if (!Object.prototype.hasOwnProperty.call(env, key)) {
|
||||
env[key] = { type: "plain", value };
|
||||
}
|
||||
}
|
||||
if (typeof schemaValues.env === "string") {
|
||||
const schemaEnv = parseJsonObject(schemaValues.env);
|
||||
if (schemaEnv) Object.assign(env, schemaEnv);
|
||||
} else if (typeof schemaValues.env === "object" && schemaValues.env !== null && !Array.isArray(schemaValues.env)) {
|
||||
Object.assign(env, schemaValues.env as Record<string, unknown>);
|
||||
}
|
||||
if (Object.keys(env).length > 0) ac.env = env;
|
||||
|
||||
if (v.workspaceStrategyType === "git_worktree") {
|
||||
ac.workspaceStrategy = {
|
||||
type: "git_worktree",
|
||||
...(v.workspaceBaseRef ? { baseRef: v.workspaceBaseRef } : {}),
|
||||
...(v.workspaceBranchTemplate ? { branchTemplate: v.workspaceBranchTemplate } : {}),
|
||||
...(v.worktreeParentDir ? { worktreeParentDir: v.worktreeParentDir } : {}),
|
||||
};
|
||||
}
|
||||
const runtimeServices = parseJsonObject(v.runtimeServicesJson ?? "");
|
||||
if (runtimeServices && Array.isArray(runtimeServices.services)) {
|
||||
ac.workspaceRuntime = runtimeServices;
|
||||
}
|
||||
if (v.command) ac.command = v.command;
|
||||
if (v.extraArgs) ac.extraArgs = parseCommaArgs(v.extraArgs);
|
||||
return ac;
|
||||
}
|
||||
2
packages/adapters/acpx-local/src/ui/index.ts
Normal file
@@ -0,0 +1,2 @@
|
||||
export { parseAcpxStdoutLine } from "./parse-stdout.js";
|
||||
export { buildAcpxLocalConfig } from "./build-config.js";
|
||||
160
packages/adapters/acpx-local/src/ui/parse-stdout.test.ts
Normal file
@@ -0,0 +1,160 @@
|
||||
import { describe, expect, it } from "vitest";
|
||||
import { parseAcpxStdoutLine } from "./parse-stdout.js";
|
||||
|
||||
const TS = "2026-04-30T00:00:00.000Z";
|
||||
|
||||
function emit(payload: Record<string, unknown>): string {
|
||||
return JSON.stringify(payload);
|
||||
}
|
||||
|
||||
describe("parseAcpxStdoutLine", () => {
|
||||
it("renders an init entry from acpx.session", () => {
|
||||
const entries = parseAcpxStdoutLine(
|
||||
emit({
|
||||
type: "acpx.session",
|
||||
agent: "claude",
|
||||
acpSessionId: "acp-1",
|
||||
runtimeSessionName: "runtime-1",
|
||||
mode: "persistent",
|
||||
permissionMode: "approve-all",
|
||||
}),
|
||||
TS,
|
||||
);
|
||||
expect(entries).toEqual([
|
||||
{
|
||||
kind: "init",
|
||||
ts: TS,
|
||||
model: "claude (persistent / approve-all)",
|
||||
sessionId: "acp-1",
|
||||
},
|
||||
]);
|
||||
});
|
||||
|
||||
it("routes output text_delta to the assistant transcript", () => {
|
||||
const entries = parseAcpxStdoutLine(
|
||||
emit({ type: "acpx.text_delta", text: "hello", channel: "output", tag: "agent_message_chunk" }),
|
||||
TS,
|
||||
);
|
||||
expect(entries).toEqual([
|
||||
{ kind: "assistant", ts: TS, text: "hello", delta: true },
|
||||
]);
|
||||
});
|
||||
|
||||
it("routes thought text_delta to the thinking transcript", () => {
|
||||
const entries = parseAcpxStdoutLine(
|
||||
emit({ type: "acpx.text_delta", text: "thinking…", channel: "thought" }),
|
||||
TS,
|
||||
);
|
||||
expect(entries).toEqual([
|
||||
{ kind: "thinking", ts: TS, text: "thinking…", delta: true },
|
||||
]);
|
||||
});
|
||||
|
||||
it("falls back to stream when channel is missing", () => {
|
||||
const entries = parseAcpxStdoutLine(
|
||||
emit({ type: "acpx.text_delta", text: "thinking…", stream: "thought" }),
|
||||
TS,
|
||||
);
|
||||
expect(entries[0]).toMatchObject({ kind: "thinking" });
|
||||
});
|
||||
|
||||
it("renders status events as system text with optional ctx usage", () => {
|
||||
expect(
|
||||
parseAcpxStdoutLine(
|
||||
emit({ type: "acpx.status", text: "thinking", tag: "agent_thought_chunk" }),
|
||||
TS,
|
||||
),
|
||||
).toEqual([{ kind: "system", ts: TS, text: "thinking" }]);
|
||||
|
||||
expect(
|
||||
parseAcpxStdoutLine(
|
||||
emit({ type: "acpx.status", tag: "context_window", used: 12000, size: 200000 }),
|
||||
TS,
|
||||
),
|
||||
).toEqual([{ kind: "system", ts: TS, text: "context_window (12000/200000 ctx)" }]);
|
||||
});
|
||||
|
||||
it("emits a tool_call entry that preserves toolCallId, status, and input", () => {
|
||||
const entries = parseAcpxStdoutLine(
|
||||
emit({
|
||||
type: "acpx.tool_call",
|
||||
name: "read",
|
||||
toolCallId: "tool-1",
|
||||
status: "running",
|
||||
text: "read README.md",
|
||||
}),
|
||||
TS,
|
||||
);
|
||||
expect(entries).toEqual([
|
||||
{
|
||||
kind: "tool_call",
|
||||
ts: TS,
|
||||
name: "read",
|
||||
toolUseId: "tool-1",
|
||||
input: { text: "read README.md", status: "running" },
|
||||
},
|
||||
]);
|
||||
});
|
||||
|
||||
it("emits a paired tool_result entry when a tool_call reports terminal status", () => {
|
||||
const completed = parseAcpxStdoutLine(
|
||||
emit({
|
||||
type: "acpx.tool_call",
|
||||
name: "read",
|
||||
toolCallId: "tool-1",
|
||||
status: "completed",
|
||||
text: "ok",
|
||||
}),
|
||||
TS,
|
||||
);
|
||||
expect(completed[1]).toEqual({
|
||||
kind: "tool_result",
|
||||
ts: TS,
|
||||
toolUseId: "tool-1",
|
||||
toolName: "read",
|
||||
content: "ok",
|
||||
isError: false,
|
||||
});
|
||||
|
||||
const failed = parseAcpxStdoutLine(
|
||||
emit({
|
||||
type: "acpx.tool_call",
|
||||
name: "edit",
|
||||
toolCallId: "tool-2",
|
||||
status: "failed",
|
||||
text: "permission denied",
|
||||
}),
|
||||
TS,
|
||||
);
|
||||
expect(failed[1]).toMatchObject({ kind: "tool_result", isError: true, content: "permission denied" });
|
||||
});
|
||||
|
||||
it("renders acpx.result with summary fallback to stopReason", () => {
|
||||
const entries = parseAcpxStdoutLine(
|
||||
emit({ type: "acpx.result", summary: "completed", stopReason: "end_turn" }),
|
||||
TS,
|
||||
);
|
||||
expect(entries[0]).toMatchObject({ kind: "result", text: "completed", subtype: "end_turn", isError: false });
|
||||
});
|
||||
|
||||
it("treats acpx.error as a stderr entry", () => {
|
||||
const entries = parseAcpxStdoutLine(
|
||||
emit({ type: "acpx.error", message: "auth required", code: "ACP_AUTH" }),
|
||||
TS,
|
||||
);
|
||||
expect(entries).toEqual([{ kind: "stderr", ts: TS, text: "auth required" }]);
|
||||
});
|
||||
|
||||
it("renders unknown acpx.* events as system entries", () => {
|
||||
const entries = parseAcpxStdoutLine(
|
||||
emit({ type: "acpx.misc", message: "unhandled" }),
|
||||
TS,
|
||||
);
|
||||
expect(entries).toEqual([{ kind: "system", ts: TS, text: "unhandled" }]);
|
||||
});
|
||||
|
||||
it("falls back to a stdout entry for non-JSON lines", () => {
|
||||
const entries = parseAcpxStdoutLine("not json", TS);
|
||||
expect(entries).toEqual([{ kind: "stdout", ts: TS, text: "not json" }]);
|
||||
});
|
||||
});
|
||||
158
packages/adapters/acpx-local/src/ui/parse-stdout.ts
Normal file
@@ -0,0 +1,158 @@
|
||||
import type { TranscriptEntry } from "@paperclipai/adapter-utils";
|
||||
|
||||
function parseJson(line: string): Record<string, unknown> | null {
|
||||
try {
|
||||
const parsed = JSON.parse(line);
|
||||
if (typeof parsed !== "object" || parsed === null || Array.isArray(parsed)) return null;
|
||||
return parsed as Record<string, unknown>;
|
||||
} catch {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
function asString(value: unknown, fallback = ""): string {
|
||||
return typeof value === "string" ? value : fallback;
|
||||
}
|
||||
|
||||
function asNumber(value: unknown, fallback = 0): number {
|
||||
return typeof value === "number" && Number.isFinite(value) ? value : fallback;
|
||||
}
|
||||
|
||||
function stringify(value: unknown): string {
|
||||
if (typeof value === "string") return value;
|
||||
if (value === null || value === undefined) return "";
|
||||
try {
|
||||
return JSON.stringify(value, null, 2);
|
||||
} catch {
|
||||
return String(value);
|
||||
}
|
||||
}
|
||||
|
||||
function pickToolUseId(parsed: Record<string, unknown>): string {
|
||||
return (
|
||||
asString(parsed.toolCallId) ||
|
||||
asString(parsed.toolUseId) ||
|
||||
asString(parsed.id)
|
||||
);
|
||||
}
|
||||
|
||||
function statusText(parsed: Record<string, unknown>): string {
|
||||
const text = asString(parsed.text).trim();
|
||||
const tag = asString(parsed.tag).trim();
|
||||
const used = asNumber(parsed.used, -1);
|
||||
const size = asNumber(parsed.size, -1);
|
||||
const parts: string[] = [];
|
||||
if (text) parts.push(text);
|
||||
if (tag && !text) parts.push(tag);
|
||||
if (used >= 0 && size > 0) parts.push(`(${used}/${size} ctx)`);
|
||||
return parts.join(" ") || tag || "status";
|
||||
}
|
||||
|
||||
export function parseAcpxStdoutLine(line: string, ts: string): TranscriptEntry[] {
|
||||
const parsed = parseJson(line);
|
||||
if (!parsed) return [{ kind: "stdout", ts, text: line }];
|
||||
|
||||
const type = asString(parsed.type);
|
||||
if (type === "acpx.session") {
|
||||
const agent = asString(parsed.agent, "acpx");
|
||||
const mode = asString(parsed.mode);
|
||||
const permissionMode = asString(parsed.permissionMode);
|
||||
const tail = [mode, permissionMode].filter(Boolean).join(" / ");
|
||||
return [{
|
||||
kind: "init",
|
||||
ts,
|
||||
model: tail ? `${agent} (${tail})` : agent,
|
||||
sessionId:
|
||||
asString(parsed.acpSessionId) ||
|
||||
asString(parsed.sessionId) ||
|
||||
asString(parsed.runtimeSessionName),
|
||||
}];
|
||||
}
|
||||
|
||||
if (type === "acpx.text_delta") {
|
||||
const text = asString(parsed.text);
|
||||
if (!text) return [];
|
||||
const channel = asString(parsed.channel) || asString(parsed.stream);
|
||||
return [{
|
||||
kind: channel === "thought" || channel === "thinking" ? "thinking" : "assistant",
|
||||
ts,
|
||||
text,
|
||||
delta: true,
|
||||
}];
|
||||
}
|
||||
|
||||
if (type === "acpx.tool_call") {
|
||||
const status = asString(parsed.status);
|
||||
const text = asString(parsed.text);
|
||||
const name = asString(parsed.name, "acp_tool");
|
||||
const toolUseId = pickToolUseId(parsed);
|
||||
const input =
|
||||
parsed.input !== undefined
|
||||
? parsed.input
|
||||
: text || status
|
||||
? { ...(text ? { text } : {}), ...(status ? { status } : {}) }
|
||||
: {};
|
||||
const entries: TranscriptEntry[] = [
|
||||
{
|
||||
kind: "tool_call",
|
||||
ts,
|
||||
name,
|
||||
toolUseId: toolUseId || undefined,
|
||||
input,
|
||||
},
|
||||
];
|
||||
if (status === "completed" || status === "failed" || status === "cancelled") {
|
||||
entries.push({
|
||||
kind: "tool_result",
|
||||
ts,
|
||||
toolUseId: toolUseId || name,
|
||||
toolName: name,
|
||||
content: text || status,
|
||||
isError: status !== "completed",
|
||||
});
|
||||
}
|
||||
return entries;
|
||||
}
|
||||
|
||||
if (type === "acpx.tool_result") {
|
||||
return [{
|
||||
kind: "tool_result",
|
||||
ts,
|
||||
toolUseId: pickToolUseId(parsed) || asString(parsed.name, "acp_tool"),
|
||||
toolName: asString(parsed.name) || undefined,
|
||||
content: stringify(parsed.content ?? parsed.output ?? parsed.error),
|
||||
isError: parsed.isError === true || parsed.error !== undefined,
|
||||
}];
|
||||
}
|
||||
|
||||
if (type === "acpx.status") {
|
||||
return [{ kind: "system", ts, text: statusText(parsed) }];
|
||||
}
|
||||
|
||||
if (type === "acpx.result") {
|
||||
return [{
|
||||
kind: "result",
|
||||
ts,
|
||||
text: asString(parsed.summary, asString(parsed.stopReason, asString(parsed.text))),
|
||||
inputTokens: asNumber(parsed.inputTokens),
|
||||
outputTokens: asNumber(parsed.outputTokens),
|
||||
cachedTokens: asNumber(parsed.cachedTokens),
|
||||
costUsd: asNumber(parsed.costUsd),
|
||||
subtype: asString(parsed.subtype, asString(parsed.stopReason, "acpx.result")),
|
||||
isError: parsed.isError === true,
|
||||
errors: Array.isArray(parsed.errors)
|
||||
? parsed.errors.map((error) => stringify(error)).filter(Boolean)
|
||||
: [],
|
||||
}];
|
||||
}
|
||||
|
||||
if (type === "acpx.error") {
|
||||
return [{ kind: "stderr", ts, text: asString(parsed.message, line) }];
|
||||
}
|
||||
|
||||
if (type.startsWith("acpx.")) {
|
||||
return [{ kind: "system", ts, text: asString(parsed.message, type) }];
|
||||
}
|
||||
|
||||
return [{ kind: "stdout", ts, text: line }];
|
||||
}
|
||||
8
packages/adapters/acpx-local/tsconfig.json
Normal file
@@ -0,0 +1,8 @@
|
||||
{
|
||||
"extends": "../../../tsconfig.base.json",
|
||||
"compilerOptions": {
|
||||
"outDir": "dist",
|
||||
"rootDir": "src"
|
||||
},
|
||||
"include": ["src"]
|
||||
}
|
||||
7
packages/adapters/acpx-local/vitest.config.ts
Normal file
@@ -0,0 +1,7 @@
|
||||
import { defineConfig } from "vitest/config";
|
||||
|
||||
export default defineConfig({
|
||||
test: {
|
||||
environment: "node",
|
||||
},
|
||||
});
|
||||
@@ -1,3 +1,5 @@
|
||||
import type { AdapterModelProfileDefinition } from "@paperclipai/adapter-utils";
|
||||
|
||||
export const type = "claude_local";
|
||||
export const label = "Claude Code (local)";
|
||||
|
||||
@@ -10,6 +12,19 @@ export const models = [
|
||||
{ id: "claude-haiku-4-5-20251001", label: "Claude Haiku 4.5" },
|
||||
];
|
||||
|
||||
export const modelProfiles: AdapterModelProfileDefinition[] = [
|
||||
{
|
||||
key: "cheap",
|
||||
label: "Cheap",
|
||||
description: "Use Claude Sonnet as the lower-cost Claude Code lane while preserving the agent's primary model.",
|
||||
adapterConfig: {
|
||||
model: "claude-sonnet-4-6",
|
||||
effort: "low",
|
||||
},
|
||||
source: "adapter_default",
|
||||
},
|
||||
];
|
||||
|
||||
export const agentConfigurationDoc = `# claude_local agent configuration
|
||||
|
||||
Adapter: claude_local
|
||||
|
||||
@@ -0,0 +1,66 @@
|
||||
import * as fs from "node:fs/promises";
|
||||
import os from "node:os";
|
||||
import path from "node:path";
|
||||
import { afterEach, describe, expect, it, vi } from "vitest";
|
||||
import { prepareClaudeConfigSeed } from "./claude-config.js";
|
||||
|
||||
describe("prepareClaudeConfigSeed", () => {
|
||||
const cleanupDirs: string[] = [];
|
||||
|
||||
afterEach(async () => {
|
||||
vi.restoreAllMocks();
|
||||
while (cleanupDirs.length > 0) {
|
||||
const dir = cleanupDirs.pop();
|
||||
if (!dir) continue;
|
||||
await fs.rm(dir, { recursive: true, force: true }).catch(() => undefined);
|
||||
}
|
||||
});
|
||||
|
||||
function createEnv(root: string, sourceDir: string): NodeJS.ProcessEnv {
|
||||
return {
|
||||
HOME: root,
|
||||
PAPERCLIP_HOME: path.join(root, "paperclip-home"),
|
||||
PAPERCLIP_INSTANCE_ID: "test-instance",
|
||||
CLAUDE_CONFIG_DIR: sourceDir,
|
||||
};
|
||||
}
|
||||
|
||||
it("reuses the same snapshot path when the seeded files are unchanged", async () => {
|
||||
const root = await fs.mkdtemp(path.join(os.tmpdir(), "paperclip-claude-config-seed-"));
|
||||
cleanupDirs.push(root);
|
||||
const sourceDir = path.join(root, "claude-source");
|
||||
await fs.mkdir(sourceDir, { recursive: true });
|
||||
await fs.writeFile(path.join(sourceDir, "settings.json"), JSON.stringify({ theme: "light" }), "utf8");
|
||||
|
||||
const onLog = vi.fn(async () => {});
|
||||
const env = createEnv(root, sourceDir);
|
||||
|
||||
const first = await prepareClaudeConfigSeed(env, onLog, "company-1");
|
||||
const second = await prepareClaudeConfigSeed(env, onLog, "company-1");
|
||||
|
||||
expect(first).toBe(second);
|
||||
await expect(fs.readFile(path.join(first, "settings.json"), "utf8"))
|
||||
.resolves.toBe(JSON.stringify({ theme: "light" }));
|
||||
});
|
||||
|
||||
it("keeps an existing snapshot intact when the seeded files change", async () => {
|
||||
const root = await fs.mkdtemp(path.join(os.tmpdir(), "paperclip-claude-config-race-"));
|
||||
cleanupDirs.push(root);
|
||||
const sourceDir = path.join(root, "claude-source");
|
||||
await fs.mkdir(sourceDir, { recursive: true });
|
||||
await fs.writeFile(path.join(sourceDir, "settings.json"), JSON.stringify({ theme: "light" }), "utf8");
|
||||
|
||||
const onLog = vi.fn(async () => {});
|
||||
const env = createEnv(root, sourceDir);
|
||||
const first = await prepareClaudeConfigSeed(env, onLog, "company-1");
|
||||
|
||||
await fs.writeFile(path.join(sourceDir, "settings.json"), JSON.stringify({ theme: "dark" }), "utf8");
|
||||
const second = await prepareClaudeConfigSeed(env, onLog, "company-1");
|
||||
|
||||
expect(second).not.toBe(first);
|
||||
await expect(fs.readFile(path.join(first, "settings.json"), "utf8"))
|
||||
.resolves.toBe(JSON.stringify({ theme: "light" }));
|
||||
await expect(fs.readFile(path.join(second, "settings.json"), "utf8"))
|
||||
.resolves.toBe(JSON.stringify({ theme: "dark" }));
|
||||
});
|
||||
});
|
||||
135
packages/adapters/claude-local/src/server/claude-config.ts
Normal file
@@ -0,0 +1,135 @@
|
||||
import { createHash } from "node:crypto";
|
||||
import fs from "node:fs/promises";
|
||||
import os from "node:os";
|
||||
import path from "node:path";
|
||||
import type { AdapterExecutionContext } from "@paperclipai/adapter-utils";
|
||||
|
||||
const DEFAULT_PAPERCLIP_INSTANCE_ID = "default";
|
||||
const SEEDED_SHARED_FILES = [
|
||||
".credentials.json",
|
||||
"credentials.json",
|
||||
"settings.json",
|
||||
"settings.local.json",
|
||||
"CLAUDE.md",
|
||||
] as const;
|
||||
|
||||
function nonEmpty(value: string | undefined): string | null {
|
||||
return typeof value === "string" && value.trim().length > 0 ? value.trim() : null;
|
||||
}
|
||||
|
||||
async function pathExists(candidate: string): Promise<boolean> {
|
||||
return fs.access(candidate).then(() => true).catch(() => false);
|
||||
}
|
||||
|
||||
function isAlreadyExistsError(error: unknown): boolean {
|
||||
if (!error || typeof error !== "object") return false;
|
||||
const code = "code" in error ? error.code : null;
|
||||
return code === "EEXIST" || code === "ENOTEMPTY";
|
||||
}
|
||||
|
||||
async function collectSeedFiles(sourceDir: string): Promise<Array<{ name: string; sourcePath: string }>> {
|
||||
const files: Array<{ name: string; sourcePath: string }> = [];
|
||||
for (const name of SEEDED_SHARED_FILES) {
|
||||
const sourcePath = path.join(sourceDir, name);
|
||||
if (!(await pathExists(sourcePath))) continue;
|
||||
files.push({ name, sourcePath });
|
||||
}
|
||||
return files;
|
||||
}
|
||||
|
||||
async function buildSeedSnapshotKey(files: Array<{ name: string; sourcePath: string }>): Promise<string> {
|
||||
if (files.length === 0) return "empty";
|
||||
const hash = createHash("sha256");
|
||||
for (const file of files) {
|
||||
hash.update(file.name);
|
||||
hash.update("\0");
|
||||
hash.update(await fs.readFile(file.sourcePath));
|
||||
hash.update("\0");
|
||||
}
|
||||
return hash.digest("hex").slice(0, 16);
|
||||
}
|
||||
|
||||
async function materializeSeedSnapshot(input: {
|
||||
rootDir: string;
|
||||
snapshotKey: string;
|
||||
files: Array<{ name: string; sourcePath: string }>;
|
||||
}): Promise<string> {
|
||||
const targetDir = path.join(input.rootDir, input.snapshotKey);
|
||||
if (await pathExists(targetDir)) {
|
||||
return targetDir;
|
||||
}
|
||||
|
||||
await fs.mkdir(input.rootDir, { recursive: true });
|
||||
const stagingDir = await fs.mkdtemp(path.join(input.rootDir, ".tmp-"));
|
||||
try {
|
||||
for (const file of input.files) {
|
||||
await fs.copyFile(file.sourcePath, path.join(stagingDir, file.name));
|
||||
}
|
||||
try {
|
||||
await fs.rename(stagingDir, targetDir);
|
||||
} catch (error) {
|
||||
if (!isAlreadyExistsError(error)) {
|
||||
throw error;
|
||||
}
|
||||
await fs.rm(stagingDir, { recursive: true, force: true });
|
||||
}
|
||||
} catch (error) {
|
||||
await fs.rm(stagingDir, { recursive: true, force: true }).catch(() => undefined);
|
||||
throw error;
|
||||
}
|
||||
|
||||
return targetDir;
|
||||
}
|
||||
|
||||
export function resolveSharedClaudeConfigDir(
|
||||
env: NodeJS.ProcessEnv = process.env,
|
||||
): string {
|
||||
const fromEnv = nonEmpty(env.CLAUDE_CONFIG_DIR);
|
||||
return fromEnv ? path.resolve(fromEnv) : path.join(os.homedir(), ".claude");
|
||||
}
|
||||
|
||||
export function resolveManagedClaudeConfigSeedDir(
|
||||
env: NodeJS.ProcessEnv,
|
||||
companyId?: string,
|
||||
): string {
|
||||
const paperclipHome = nonEmpty(env.PAPERCLIP_HOME) ?? path.resolve(os.homedir(), ".paperclip");
|
||||
const instanceId = nonEmpty(env.PAPERCLIP_INSTANCE_ID) ?? DEFAULT_PAPERCLIP_INSTANCE_ID;
|
||||
return companyId
|
||||
? path.resolve(paperclipHome, "instances", instanceId, "companies", companyId, "claude-config-seed")
|
||||
: path.resolve(paperclipHome, "instances", instanceId, "claude-config-seed");
|
||||
}
|
||||
|
||||
export async function prepareClaudeConfigSeed(
|
||||
env: NodeJS.ProcessEnv,
|
||||
onLog: AdapterExecutionContext["onLog"],
|
||||
companyId?: string,
|
||||
): Promise<string> {
|
||||
const sourceDir = resolveSharedClaudeConfigDir(env);
|
||||
const targetRootDir = resolveManagedClaudeConfigSeedDir(env, companyId);
|
||||
|
||||
if (path.resolve(sourceDir) === path.resolve(targetRootDir)) {
|
||||
return targetRootDir;
|
||||
}
|
||||
|
||||
const copiedFiles = await collectSeedFiles(sourceDir);
|
||||
const snapshotKey = await buildSeedSnapshotKey(copiedFiles);
|
||||
const targetDir = await materializeSeedSnapshot({
|
||||
rootDir: targetRootDir,
|
||||
snapshotKey,
|
||||
files: copiedFiles,
|
||||
});
|
||||
|
||||
if (copiedFiles.length > 0) {
|
||||
await onLog(
|
||||
"stdout",
|
||||
`[paperclip] Prepared Claude config seed "${targetDir}" from "${sourceDir}" (${copiedFiles.map((file) => file.name).join(", ")}).\n`,
|
||||
);
|
||||
} else {
|
||||
await onLog(
|
||||
"stdout",
|
||||
`[paperclip] No local Claude config seed files were found in "${sourceDir}". Remote Claude auth may still require login.\n`,
|
||||
);
|
||||
}
|
||||
|
||||
return targetDir;
|
||||
}
|
||||
@@ -10,6 +10,7 @@ const {
|
||||
prepareWorkspaceForSshExecution,
|
||||
restoreWorkspaceFromSshExecution,
|
||||
syncDirectoryToSsh,
|
||||
startAdapterExecutionTargetPaperclipBridge,
|
||||
} = vi.hoisted(() => ({
|
||||
runChildProcess: vi.fn(async () => ({
|
||||
exitCode: 0,
|
||||
@@ -29,6 +30,14 @@ const {
|
||||
prepareWorkspaceForSshExecution: vi.fn(async () => undefined),
|
||||
restoreWorkspaceFromSshExecution: vi.fn(async () => undefined),
|
||||
syncDirectoryToSsh: vi.fn(async () => undefined),
|
||||
startAdapterExecutionTargetPaperclipBridge: vi.fn(async () => ({
|
||||
env: {
|
||||
PAPERCLIP_API_URL: "http://127.0.0.1:4310",
|
||||
PAPERCLIP_API_KEY: "bridge-token",
|
||||
PAPERCLIP_API_BRIDGE_MODE: "queue_v1",
|
||||
},
|
||||
stop: async () => {},
|
||||
})),
|
||||
}));
|
||||
|
||||
vi.mock("@paperclipai/adapter-utils/server-utils", async () => {
|
||||
@@ -55,6 +64,16 @@ vi.mock("@paperclipai/adapter-utils/ssh", async () => {
|
||||
};
|
||||
});
|
||||
|
||||
vi.mock("@paperclipai/adapter-utils/execution-target", async () => {
|
||||
const actual = await vi.importActual<typeof import("@paperclipai/adapter-utils/execution-target")>(
|
||||
"@paperclipai/adapter-utils/execution-target",
|
||||
);
|
||||
return {
|
||||
...actual,
|
||||
startAdapterExecutionTargetPaperclipBridge,
|
||||
};
|
||||
});
|
||||
|
||||
import { execute } from "./execute.js";
|
||||
|
||||
describe("claude remote execution", () => {
|
||||
@@ -73,8 +92,10 @@ describe("claude remote execution", () => {
|
||||
const rootDir = await mkdtemp(path.join(os.tmpdir(), "paperclip-claude-remote-"));
|
||||
cleanupDirs.push(rootDir);
|
||||
const workspaceDir = path.join(rootDir, "workspace");
|
||||
const alternateWorkspaceDir = path.join(rootDir, "workspace-other");
|
||||
const instructionsPath = path.join(rootDir, "instructions.md");
|
||||
await mkdir(workspaceDir, { recursive: true });
|
||||
await mkdir(alternateWorkspaceDir, { recursive: true });
|
||||
await writeFile(instructionsPath, "Use the remote workspace.\n", "utf8");
|
||||
|
||||
await execute({
|
||||
@@ -100,7 +121,27 @@ describe("claude remote execution", () => {
|
||||
paperclipWorkspace: {
|
||||
cwd: workspaceDir,
|
||||
source: "project_primary",
|
||||
strategy: "git_worktree",
|
||||
workspaceId: "workspace-1",
|
||||
repoUrl: "https://github.com/paperclipai/paperclip.git",
|
||||
repoRef: "main",
|
||||
branchName: "feature/remote-claude",
|
||||
worktreePath: workspaceDir,
|
||||
},
|
||||
paperclipWorkspaces: [
|
||||
{
|
||||
workspaceId: "workspace-1",
|
||||
cwd: workspaceDir,
|
||||
repoUrl: "https://github.com/paperclipai/paperclip.git",
|
||||
repoRef: "main",
|
||||
},
|
||||
{
|
||||
workspaceId: "workspace-2",
|
||||
cwd: alternateWorkspaceDir,
|
||||
repoUrl: "https://github.com/paperclipai/paperclip.git",
|
||||
repoRef: "feature/other",
|
||||
},
|
||||
],
|
||||
},
|
||||
executionTransport: {
|
||||
remoteExecution: {
|
||||
@@ -112,7 +153,6 @@ describe("claude remote execution", () => {
|
||||
privateKey: "PRIVATE KEY",
|
||||
knownHosts: "[127.0.0.1]:2222 ssh-ed25519 AAAA",
|
||||
strictHostKeyChecking: true,
|
||||
paperclipApiUrl: "http://198.51.100.10:3102",
|
||||
},
|
||||
},
|
||||
onLog: async () => {},
|
||||
@@ -136,8 +176,25 @@ describe("claude remote execution", () => {
|
||||
expect(call?.[2]).toContain("/remote/workspace/.paperclip-runtime/claude/skills/agent-instructions.md");
|
||||
expect(call?.[2]).toContain("--add-dir");
|
||||
expect(call?.[2]).toContain("/remote/workspace/.paperclip-runtime/claude/skills");
|
||||
expect(call?.[3].env.PAPERCLIP_API_URL).toBe("http://198.51.100.10:3102");
|
||||
expect(call?.[3].env.PAPERCLIP_WORKSPACE_CWD).toBe("/remote/workspace");
|
||||
expect(call?.[3].env.PAPERCLIP_WORKSPACE_WORKTREE_PATH).toBeUndefined();
|
||||
expect(JSON.parse(call?.[3].env.PAPERCLIP_WORKSPACES_JSON ?? "[]")).toEqual([
|
||||
{
|
||||
workspaceId: "workspace-1",
|
||||
cwd: "/remote/workspace",
|
||||
repoUrl: "https://github.com/paperclipai/paperclip.git",
|
||||
repoRef: "main",
|
||||
},
|
||||
{
|
||||
workspaceId: "workspace-2",
|
||||
repoUrl: "https://github.com/paperclipai/paperclip.git",
|
||||
repoRef: "feature/other",
|
||||
},
|
||||
]);
|
||||
expect(call?.[3].env.PAPERCLIP_API_URL).toBe("http://127.0.0.1:4310");
|
||||
expect(call?.[3].env.PAPERCLIP_API_BRIDGE_MODE).toBe("queue_v1");
|
||||
expect(call?.[3].remoteExecution?.remoteCwd).toBe("/remote/workspace");
|
||||
expect(startAdapterExecutionTargetPaperclipBridge).toHaveBeenCalledTimes(1);
|
||||
expect(restoreWorkspaceFromSshExecution).toHaveBeenCalledTimes(1);
|
||||
expect(restoreWorkspaceFromSshExecution).toHaveBeenCalledWith(expect.objectContaining({
|
||||
localDir: workspaceDir,
|
||||
|
||||
@@ -5,17 +5,20 @@ import type { AdapterExecutionContext, AdapterExecutionResult } from "@paperclip
|
||||
import type { RunProcessResult } from "@paperclipai/adapter-utils/server-utils";
|
||||
import {
|
||||
adapterExecutionTargetIsRemote,
|
||||
adapterExecutionTargetPaperclipApiUrl,
|
||||
adapterExecutionTargetRemoteCwd,
|
||||
adapterExecutionTargetSessionIdentity,
|
||||
adapterExecutionTargetSessionMatches,
|
||||
adapterExecutionTargetUsesManagedHome,
|
||||
adapterExecutionTargetUsesPaperclipBridge,
|
||||
describeAdapterExecutionTarget,
|
||||
ensureAdapterExecutionTargetCommandResolvable,
|
||||
ensureAdapterExecutionTargetRuntimeCommandInstalled,
|
||||
prepareAdapterExecutionTargetRuntime,
|
||||
readAdapterExecutionTarget,
|
||||
resolveAdapterExecutionTargetCommandForLogs,
|
||||
runAdapterExecutionTargetProcess,
|
||||
runAdapterExecutionTargetShellCommand,
|
||||
startAdapterExecutionTargetPaperclipBridge,
|
||||
} from "@paperclipai/adapter-utils/execution-target";
|
||||
import {
|
||||
asString,
|
||||
@@ -33,9 +36,11 @@ import {
|
||||
ensurePathInEnv,
|
||||
renderTemplate,
|
||||
renderPaperclipWakePrompt,
|
||||
shapePaperclipWorkspaceEnvForExecution,
|
||||
stringifyPaperclipWakePayload,
|
||||
DEFAULT_PAPERCLIP_AGENT_PROMPT_TEMPLATE,
|
||||
} from "@paperclipai/adapter-utils/server-utils";
|
||||
import { shellQuote } from "@paperclipai/adapter-utils/ssh";
|
||||
import {
|
||||
parseClaudeStreamJson,
|
||||
describeClaudeFailure,
|
||||
@@ -45,6 +50,7 @@ import {
|
||||
isClaudeTransientUpstreamError,
|
||||
isClaudeUnknownSessionError,
|
||||
} from "./parse.js";
|
||||
import { prepareClaudeConfigSeed } from "./claude-config.js";
|
||||
import { resolveClaudeDesiredSkillNames } from "./skills.js";
|
||||
import { isBedrockModelId } from "./models.js";
|
||||
import { prepareClaudePromptBundle } from "./prompt-cache.js";
|
||||
@@ -56,8 +62,10 @@ interface ClaudeExecutionInput {
|
||||
agent: AdapterExecutionContext["agent"];
|
||||
config: Record<string, unknown>;
|
||||
context: Record<string, unknown>;
|
||||
runtimeCommandSpec?: AdapterExecutionContext["runtimeCommandSpec"];
|
||||
executionTarget?: ReturnType<typeof readAdapterExecutionTarget>;
|
||||
authToken?: string;
|
||||
onLog?: (stream: "stdout" | "stderr", chunk: string) => Promise<void>;
|
||||
}
|
||||
|
||||
interface ClaudeRuntimeConfig {
|
||||
@@ -107,7 +115,8 @@ function resolveClaudeBillingType(env: Record<string, string>): "api" | "subscri
|
||||
}
|
||||
|
||||
async function buildClaudeRuntimeConfig(input: ClaudeExecutionInput): Promise<ClaudeRuntimeConfig> {
|
||||
const { runId, agent, config, context, executionTarget, authToken } = input;
|
||||
const { runId, agent, config, context, runtimeCommandSpec, executionTarget, authToken } = input;
|
||||
const onLog = input.onLog ?? (async () => {});
|
||||
|
||||
const command = asString(config.command, "claude");
|
||||
const workspaceContext = parseObject(context.paperclipWorkspace);
|
||||
@@ -140,6 +149,15 @@ async function buildClaudeRuntimeConfig(input: ClaudeExecutionInput): Promise<Cl
|
||||
const useConfiguredInsteadOfAgentHome = workspaceSource === "agent_home" && configuredCwd.length > 0;
|
||||
const effectiveWorkspaceCwd = useConfiguredInsteadOfAgentHome ? "" : workspaceCwd;
|
||||
const cwd = effectiveWorkspaceCwd || configuredCwd || process.cwd();
|
||||
const executionTargetIsRemote = adapterExecutionTargetIsRemote(executionTarget);
|
||||
const effectiveExecutionCwd = adapterExecutionTargetRemoteCwd(executionTarget, cwd);
|
||||
const shapedWorkspaceEnv = shapePaperclipWorkspaceEnvForExecution({
|
||||
workspaceCwd: effectiveWorkspaceCwd,
|
||||
workspaceWorktreePath,
|
||||
workspaceHints,
|
||||
executionTargetIsRemote,
|
||||
executionCwd: effectiveExecutionCwd,
|
||||
});
|
||||
await ensureAbsoluteDirectory(cwd, { createIfMissing: true });
|
||||
|
||||
const envConfig = parseObject(config.env);
|
||||
@@ -195,18 +213,18 @@ async function buildClaudeRuntimeConfig(input: ClaudeExecutionInput): Promise<Cl
|
||||
env.PAPERCLIP_WAKE_PAYLOAD_JSON = wakePayloadJson;
|
||||
}
|
||||
applyPaperclipWorkspaceEnv(env, {
|
||||
workspaceCwd: effectiveWorkspaceCwd,
|
||||
workspaceCwd: shapedWorkspaceEnv.workspaceCwd,
|
||||
workspaceSource,
|
||||
workspaceStrategy,
|
||||
workspaceId,
|
||||
workspaceRepoUrl,
|
||||
workspaceRepoRef,
|
||||
workspaceBranch,
|
||||
workspaceWorktreePath,
|
||||
workspaceWorktreePath: shapedWorkspaceEnv.workspaceWorktreePath,
|
||||
agentHome,
|
||||
});
|
||||
if (workspaceHints.length > 0) {
|
||||
env.PAPERCLIP_WORKSPACES_JSON = JSON.stringify(workspaceHints);
|
||||
if (shapedWorkspaceEnv.workspaceHints.length > 0) {
|
||||
env.PAPERCLIP_WORKSPACES_JSON = JSON.stringify(shapedWorkspaceEnv.workspaceHints);
|
||||
}
|
||||
if (runtimeServiceIntents.length > 0) {
|
||||
env.PAPERCLIP_RUNTIME_SERVICE_INTENTS_JSON = JSON.stringify(runtimeServiceIntents);
|
||||
@@ -217,11 +235,6 @@ async function buildClaudeRuntimeConfig(input: ClaudeExecutionInput): Promise<Cl
|
||||
if (runtimePrimaryUrl) {
|
||||
env.PAPERCLIP_RUNTIME_PRIMARY_URL = runtimePrimaryUrl;
|
||||
}
|
||||
const targetPaperclipApiUrl = adapterExecutionTargetPaperclipApiUrl(executionTarget);
|
||||
if (targetPaperclipApiUrl) {
|
||||
env.PAPERCLIP_API_URL = targetPaperclipApiUrl;
|
||||
}
|
||||
|
||||
for (const [key, value] of Object.entries(envConfig)) {
|
||||
if (typeof value === "string") env[key] = value;
|
||||
}
|
||||
@@ -230,7 +243,24 @@ async function buildClaudeRuntimeConfig(input: ClaudeExecutionInput): Promise<Cl
|
||||
env.PAPERCLIP_API_KEY = authToken;
|
||||
}
|
||||
|
||||
const runtimeEnv = ensurePathInEnv({ ...process.env, ...env });
|
||||
const runtimeEnv = Object.fromEntries(
|
||||
Object.entries(ensurePathInEnv({ ...process.env, ...env })).filter(
|
||||
(entry): entry is [string, string] => typeof entry[1] === "string",
|
||||
),
|
||||
);
|
||||
const timeoutSec = asNumber(config.timeoutSec, 0);
|
||||
const graceSec = asNumber(config.graceSec, 20);
|
||||
await ensureAdapterExecutionTargetRuntimeCommandInstalled({
|
||||
runId,
|
||||
target: executionTarget,
|
||||
installCommand: runtimeCommandSpec?.installCommand,
|
||||
detectCommand: runtimeCommandSpec?.detectCommand,
|
||||
cwd,
|
||||
env: runtimeEnv,
|
||||
timeoutSec,
|
||||
graceSec,
|
||||
onLog,
|
||||
});
|
||||
await ensureAdapterExecutionTargetCommandResolvable(command, executionTarget, cwd, runtimeEnv);
|
||||
const resolvedCommand = await resolveAdapterExecutionTargetCommandForLogs(command, executionTarget, cwd, runtimeEnv);
|
||||
const loggedEnv = buildInvocationEnvForLogs(env, {
|
||||
@@ -239,8 +269,6 @@ async function buildClaudeRuntimeConfig(input: ClaudeExecutionInput): Promise<Cl
|
||||
resolvedCommand,
|
||||
});
|
||||
|
||||
const timeoutSec = asNumber(config.timeoutSec, 0);
|
||||
const graceSec = asNumber(config.graceSec, 20);
|
||||
const extraArgs = (() => {
|
||||
const fromExtraArgs = asStringArray(config.extraArgs);
|
||||
if (fromExtraArgs.length > 0) return fromExtraArgs;
|
||||
@@ -316,6 +344,9 @@ export async function execute(ctx: AdapterExecutionContext): Promise<AdapterExec
|
||||
const chrome = asBoolean(config.chrome, false);
|
||||
const maxTurns = asNumber(config.maxTurnsPerRun, 0);
|
||||
const dangerouslySkipPermissions = asBoolean(config.dangerouslySkipPermissions, true);
|
||||
const configEnv = parseObject(config.env);
|
||||
const hasExplicitClaudeConfigDir =
|
||||
typeof configEnv.CLAUDE_CONFIG_DIR === "string" && configEnv.CLAUDE_CONFIG_DIR.trim().length > 0;
|
||||
const instructionsFilePath = asString(config.instructionsFilePath, "").trim();
|
||||
const instructionsFileDir = instructionsFilePath ? `${path.dirname(instructionsFilePath)}/` : "";
|
||||
const runtimeConfig = await buildClaudeRuntimeConfig({
|
||||
@@ -323,8 +354,10 @@ export async function execute(ctx: AdapterExecutionContext): Promise<AdapterExec
|
||||
agent,
|
||||
config,
|
||||
context,
|
||||
runtimeCommandSpec: ctx.runtimeCommandSpec,
|
||||
executionTarget,
|
||||
authToken,
|
||||
onLog,
|
||||
});
|
||||
const {
|
||||
command,
|
||||
@@ -334,11 +367,12 @@ export async function execute(ctx: AdapterExecutionContext): Promise<AdapterExec
|
||||
workspaceRepoUrl,
|
||||
workspaceRepoRef,
|
||||
env,
|
||||
loggedEnv,
|
||||
loggedEnv: initialLoggedEnv,
|
||||
timeoutSec,
|
||||
graceSec,
|
||||
extraArgs,
|
||||
} = runtimeConfig;
|
||||
let loggedEnv = initialLoggedEnv;
|
||||
const effectiveExecutionCwd = adapterExecutionTargetRemoteCwd(executionTarget, cwd);
|
||||
const terminalResultCleanupGraceMs = Math.max(
|
||||
0,
|
||||
@@ -379,6 +413,13 @@ export async function execute(ctx: AdapterExecutionContext): Promise<AdapterExec
|
||||
instructionsContents: combinedInstructionsContents,
|
||||
onLog,
|
||||
});
|
||||
const useManagedRemoteClaudeConfig =
|
||||
executionTargetIsRemote &&
|
||||
adapterExecutionTargetUsesManagedHome(executionTarget) &&
|
||||
!hasExplicitClaudeConfigDir;
|
||||
const claudeConfigSeedDir = useManagedRemoteClaudeConfig
|
||||
? await prepareClaudeConfigSeed(process.env, onLog, agent.companyId)
|
||||
: null;
|
||||
const preparedExecutionTargetRuntime = executionTargetIsRemote
|
||||
? await (async () => {
|
||||
await onLog(
|
||||
@@ -395,6 +436,13 @@ export async function execute(ctx: AdapterExecutionContext): Promise<AdapterExec
|
||||
localDir: promptBundle.addDir,
|
||||
followSymlinks: true,
|
||||
},
|
||||
...(claudeConfigSeedDir
|
||||
? [{
|
||||
key: "config-seed",
|
||||
localDir: claudeConfigSeedDir,
|
||||
followSymlinks: true,
|
||||
}]
|
||||
: []),
|
||||
],
|
||||
});
|
||||
})()
|
||||
@@ -411,6 +459,63 @@ export async function execute(ctx: AdapterExecutionContext): Promise<AdapterExec
|
||||
? path.posix.join(effectivePromptBundleAddDir, path.basename(promptBundle.instructionsFilePath))
|
||||
: promptBundle.instructionsFilePath
|
||||
: undefined;
|
||||
const remoteClaudeRuntimeRoot = executionTargetIsRemote
|
||||
? preparedExecutionTargetRuntime?.runtimeRootDir ??
|
||||
path.posix.join(effectiveExecutionCwd, ".paperclip-runtime", "claude")
|
||||
: null;
|
||||
const remoteClaudeConfigSeedDir = claudeConfigSeedDir && remoteClaudeRuntimeRoot
|
||||
? preparedExecutionTargetRuntime?.assetDirs["config-seed"] ??
|
||||
path.posix.join(remoteClaudeRuntimeRoot, "config-seed")
|
||||
: null;
|
||||
const remoteClaudeConfigDir = useManagedRemoteClaudeConfig && remoteClaudeRuntimeRoot
|
||||
? path.posix.join(remoteClaudeRuntimeRoot, "config")
|
||||
: null;
|
||||
if (remoteClaudeConfigDir && remoteClaudeConfigSeedDir) {
|
||||
env.CLAUDE_CONFIG_DIR = remoteClaudeConfigDir;
|
||||
loggedEnv.CLAUDE_CONFIG_DIR = remoteClaudeConfigDir;
|
||||
await onLog(
|
||||
"stdout",
|
||||
`[paperclip] Materializing Claude auth/config into ${remoteClaudeConfigDir}.\n`,
|
||||
);
|
||||
await runAdapterExecutionTargetShellCommand(
|
||||
runId,
|
||||
executionTarget,
|
||||
`mkdir -p ${shellQuote(remoteClaudeConfigDir)} && ` +
|
||||
`if [ -d ${shellQuote(remoteClaudeConfigSeedDir)} ]; then ` +
|
||||
`cp -R ${shellQuote(`${remoteClaudeConfigSeedDir}/.`)} ${shellQuote(remoteClaudeConfigDir)}/; ` +
|
||||
`fi`,
|
||||
{
|
||||
cwd,
|
||||
env,
|
||||
timeoutSec: Math.max(timeoutSec, 15),
|
||||
graceSec,
|
||||
onLog,
|
||||
},
|
||||
);
|
||||
}
|
||||
let paperclipBridge: Awaited<ReturnType<typeof startAdapterExecutionTargetPaperclipBridge>> = null;
|
||||
if (executionTargetIsRemote && adapterExecutionTargetUsesPaperclipBridge(executionTarget)) {
|
||||
paperclipBridge = await startAdapterExecutionTargetPaperclipBridge({
|
||||
runId,
|
||||
target: executionTarget,
|
||||
runtimeRootDir: preparedExecutionTargetRuntime?.runtimeRootDir,
|
||||
adapterKey: "claude",
|
||||
hostApiToken: env.PAPERCLIP_API_KEY,
|
||||
onLog,
|
||||
});
|
||||
if (paperclipBridge) {
|
||||
Object.assign(env, paperclipBridge.env);
|
||||
const runtimeEnv = ensurePathInEnv({ ...process.env, ...env });
|
||||
loggedEnv = buildInvocationEnvForLogs(env, {
|
||||
runtimeEnv,
|
||||
includeRuntimeKeys: ["HOME", "CLAUDE_CONFIG_DIR"],
|
||||
resolvedCommand,
|
||||
});
|
||||
if (remoteClaudeConfigDir) {
|
||||
loggedEnv.CLAUDE_CONFIG_DIR = remoteClaudeConfigDir;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const runtimeSessionParams = parseObject(runtime.sessionParams);
|
||||
const runtimeSessionId = asString(runtimeSessionParams.sessionId, runtime.sessionId ?? "");
|
||||
@@ -697,6 +802,7 @@ export async function execute(ctx: AdapterExecutionContext): Promise<AdapterExec
|
||||
const transientUpstream =
|
||||
failed &&
|
||||
!loginMeta.requiresLogin &&
|
||||
!clearSessionForMaxTurns &&
|
||||
isClaudeTransientUpstreamError({
|
||||
parsed,
|
||||
stdout: proc.stdout,
|
||||
@@ -713,11 +819,14 @@ export async function execute(ctx: AdapterExecutionContext): Promise<AdapterExec
|
||||
: null;
|
||||
const resolvedErrorCode = loginMeta.requiresLogin
|
||||
? "claude_auth_required"
|
||||
: failed && clearSessionForMaxTurns
|
||||
? "max_turns_exhausted"
|
||||
: transientUpstream
|
||||
? "claude_transient_upstream"
|
||||
: null;
|
||||
const mergedResultJson: Record<string, unknown> = {
|
||||
...parsed,
|
||||
...(failed && clearSessionForMaxTurns ? { stopReason: "max_turns_exhausted" } : {}),
|
||||
...(transientUpstream ? { errorFamily: "transient_upstream" } : {}),
|
||||
...(transientRetryNotBefore ? { retryNotBefore: transientRetryNotBefore.toISOString() } : {}),
|
||||
...(transientRetryNotBefore ? { transientRetryNotBefore: transientRetryNotBefore.toISOString() } : {}),
|
||||
@@ -766,6 +875,9 @@ export async function execute(ctx: AdapterExecutionContext): Promise<AdapterExec
|
||||
|
||||
return toAdapterResult(initial, { fallbackSessionId: runtimeSessionId || runtime.sessionId });
|
||||
} finally {
|
||||
if (paperclipBridge) {
|
||||
await paperclipBridge.stop();
|
||||
}
|
||||
if (restoreRemoteWorkspace) {
|
||||
await onLog(
|
||||
"stdout",
|
||||
|
||||
@@ -170,11 +170,19 @@ export function isClaudeMaxTurnsResult(parsed: Record<string, unknown> | null |
|
||||
const subtype = asString(parsed.subtype, "").trim().toLowerCase();
|
||||
if (subtype === "error_max_turns") return true;
|
||||
|
||||
const stopReason = asString(parsed.stop_reason, "").trim().toLowerCase();
|
||||
if (stopReason === "max_turns") return true;
|
||||
const structuredStopReasons = [
|
||||
parsed.stop_reason,
|
||||
parsed.stopReason,
|
||||
parsed.error_code,
|
||||
parsed.errorCode,
|
||||
].map((value) => asString(value, "").trim().toLowerCase());
|
||||
|
||||
const resultText = asString(parsed.result, "").trim();
|
||||
return /max(?:imum)?\s+turns?/i.test(resultText);
|
||||
return structuredStopReasons.some((reason) =>
|
||||
reason === "max_turns" ||
|
||||
reason === "max_turns_exhausted" ||
|
||||
reason === "turn_limit" ||
|
||||
reason === "turn_limit_exhausted",
|
||||
);
|
||||
}
|
||||
|
||||
export function isClaudeUnknownSessionError(parsed: Record<string, unknown>): boolean {
|
||||
|
||||
@@ -9,11 +9,15 @@ import {
|
||||
asNumber,
|
||||
asStringArray,
|
||||
parseObject,
|
||||
ensureAbsoluteDirectory,
|
||||
ensureCommandResolvable,
|
||||
ensurePathInEnv,
|
||||
runChildProcess,
|
||||
} from "@paperclipai/adapter-utils/server-utils";
|
||||
import {
|
||||
ensureAdapterExecutionTargetCommandResolvable,
|
||||
ensureAdapterExecutionTargetDirectory,
|
||||
runAdapterExecutionTargetProcess,
|
||||
describeAdapterExecutionTarget,
|
||||
resolveAdapterExecutionTargetCwd,
|
||||
} from "@paperclipai/adapter-utils/execution-target";
|
||||
import path from "node:path";
|
||||
import { detectClaudeLoginRequired, parseClaudeStreamJson } from "./parse.js";
|
||||
import { isBedrockModelId } from "./models.js";
|
||||
@@ -56,10 +60,28 @@ export async function testEnvironment(
|
||||
const checks: AdapterEnvironmentCheck[] = [];
|
||||
const config = parseObject(ctx.config);
|
||||
const command = asString(config.command, "claude");
|
||||
const cwd = asString(config.cwd, process.cwd());
|
||||
const target = ctx.executionTarget ?? null;
|
||||
const targetIsRemote = target?.kind === "remote";
|
||||
const cwd = resolveAdapterExecutionTargetCwd(target, asString(config.cwd, ""), process.cwd());
|
||||
const targetLabel = targetIsRemote
|
||||
? ctx.environmentName ?? describeAdapterExecutionTarget(target)
|
||||
: null;
|
||||
const runId = `claude-envtest-${Date.now()}-${Math.random().toString(16).slice(2)}`;
|
||||
|
||||
if (targetLabel) {
|
||||
checks.push({
|
||||
code: "claude_environment_target",
|
||||
level: "info",
|
||||
message: `Probing inside environment: ${targetLabel}`,
|
||||
});
|
||||
}
|
||||
|
||||
try {
|
||||
await ensureAbsoluteDirectory(cwd, { createIfMissing: true });
|
||||
await ensureAdapterExecutionTargetDirectory(runId, target, cwd, {
|
||||
cwd,
|
||||
env: {},
|
||||
createIfMissing: true,
|
||||
});
|
||||
checks.push({
|
||||
code: "claude_cwd_valid",
|
||||
level: "info",
|
||||
@@ -81,7 +103,7 @@ export async function testEnvironment(
|
||||
}
|
||||
const runtimeEnv = ensurePathInEnv({ ...process.env, ...env });
|
||||
try {
|
||||
await ensureCommandResolvable(command, cwd, runtimeEnv);
|
||||
await ensureAdapterExecutionTargetCommandResolvable(command, target, cwd, runtimeEnv);
|
||||
checks.push({
|
||||
code: "claude_command_resolvable",
|
||||
level: "info",
|
||||
@@ -96,16 +118,21 @@ export async function testEnvironment(
|
||||
});
|
||||
}
|
||||
|
||||
// When probing a remote target, the Paperclip host's process.env does not
|
||||
// reflect what the agent will actually see at runtime. Only consider env
|
||||
// vars from the adapter config in that case; the probe itself will surface
|
||||
// any auth issues on the remote box.
|
||||
const considerHostEnv = !targetIsRemote;
|
||||
const hasBedrock =
|
||||
env.CLAUDE_CODE_USE_BEDROCK === "1" ||
|
||||
env.CLAUDE_CODE_USE_BEDROCK === "true" ||
|
||||
process.env.CLAUDE_CODE_USE_BEDROCK === "1" ||
|
||||
process.env.CLAUDE_CODE_USE_BEDROCK === "true" ||
|
||||
(considerHostEnv && process.env.CLAUDE_CODE_USE_BEDROCK === "1") ||
|
||||
(considerHostEnv && process.env.CLAUDE_CODE_USE_BEDROCK === "true") ||
|
||||
isNonEmpty(env.ANTHROPIC_BEDROCK_BASE_URL) ||
|
||||
isNonEmpty(process.env.ANTHROPIC_BEDROCK_BASE_URL);
|
||||
(considerHostEnv && isNonEmpty(process.env.ANTHROPIC_BEDROCK_BASE_URL));
|
||||
|
||||
const configApiKey = env.ANTHROPIC_API_KEY;
|
||||
const hostApiKey = process.env.ANTHROPIC_API_KEY;
|
||||
const hostApiKey = considerHostEnv ? process.env.ANTHROPIC_API_KEY : undefined;
|
||||
if (hasBedrock) {
|
||||
const source =
|
||||
env.CLAUDE_CODE_USE_BEDROCK === "1" ||
|
||||
@@ -130,7 +157,7 @@ export async function testEnvironment(
|
||||
detail: `Detected in ${source}.`,
|
||||
hint: "Unset ANTHROPIC_API_KEY if you want subscription-based Claude login behavior.",
|
||||
});
|
||||
} else {
|
||||
} else if (!targetIsRemote) {
|
||||
checks.push({
|
||||
code: "claude_subscription_mode_possible",
|
||||
level: "info",
|
||||
@@ -172,8 +199,9 @@ export async function testEnvironment(
|
||||
if (maxTurns > 0) args.push("--max-turns", String(maxTurns));
|
||||
if (extraArgs.length > 0) args.push(...extraArgs);
|
||||
|
||||
const probe = await runChildProcess(
|
||||
`claude-envtest-${Date.now()}-${Math.random().toString(16).slice(2)}`,
|
||||
const probe = await runAdapterExecutionTargetProcess(
|
||||
runId,
|
||||
target,
|
||||
command,
|
||||
args,
|
||||
{
|
||||
|
||||
@@ -66,8 +66,6 @@ export function buildClaudeLocalConfig(v: CreateConfigValues): Record<string, un
|
||||
const ac: Record<string, unknown> = {};
|
||||
if (v.cwd) ac.cwd = v.cwd;
|
||||
if (v.instructionsFilePath) ac.instructionsFilePath = v.instructionsFilePath;
|
||||
if (v.promptTemplate) ac.promptTemplate = v.promptTemplate;
|
||||
if (v.bootstrapPrompt) ac.bootstrapPromptTemplate = v.bootstrapPrompt;
|
||||
if (v.model) ac.model = v.model;
|
||||
if (v.thinkingEffort) ac.effort = v.thinkingEffort;
|
||||
if (v.chrome) ac.chrome = true;
|
||||
|
||||
@@ -1,5 +1,8 @@
|
||||
import type { AdapterModelProfileDefinition } from "@paperclipai/adapter-utils";
|
||||
|
||||
export const type = "codex_local";
|
||||
export const label = "Codex (local)";
|
||||
|
||||
export const DEFAULT_CODEX_LOCAL_MODEL = "gpt-5.3-codex";
|
||||
export const DEFAULT_CODEX_LOCAL_BYPASS_APPROVALS_AND_SANDBOX = true;
|
||||
export const CODEX_LOCAL_FAST_MODE_SUPPORTED_MODELS = ["gpt-5.4"] as const;
|
||||
@@ -40,6 +43,19 @@ export const models = [
|
||||
{ id: "codex-mini-latest", label: "Codex Mini" },
|
||||
];
|
||||
|
||||
export const modelProfiles: AdapterModelProfileDefinition[] = [
|
||||
{
|
||||
key: "cheap",
|
||||
label: "Cheap",
|
||||
description: "Use the lowest-cost known Codex local model lane without changing the primary model.",
|
||||
adapterConfig: {
|
||||
model: "gpt-5.3-codex-spark",
|
||||
modelReasoningEffort: "low",
|
||||
},
|
||||
source: "adapter_default",
|
||||
},
|
||||
];
|
||||
|
||||
export const agentConfigurationDoc = `# codex_local agent configuration
|
||||
|
||||
Adapter: codex_local
|
||||
|
||||
@@ -10,6 +10,7 @@ const {
|
||||
prepareWorkspaceForSshExecution,
|
||||
restoreWorkspaceFromSshExecution,
|
||||
syncDirectoryToSsh,
|
||||
startAdapterExecutionTargetPaperclipBridge,
|
||||
} = vi.hoisted(() => ({
|
||||
runChildProcess: vi.fn(async () => ({
|
||||
exitCode: 1,
|
||||
@@ -25,6 +26,14 @@ const {
|
||||
prepareWorkspaceForSshExecution: vi.fn(async () => undefined),
|
||||
restoreWorkspaceFromSshExecution: vi.fn(async () => undefined),
|
||||
syncDirectoryToSsh: vi.fn(async () => undefined),
|
||||
startAdapterExecutionTargetPaperclipBridge: vi.fn(async () => ({
|
||||
env: {
|
||||
PAPERCLIP_API_URL: "http://127.0.0.1:4310",
|
||||
PAPERCLIP_API_KEY: "bridge-token",
|
||||
PAPERCLIP_API_BRIDGE_MODE: "queue_v1",
|
||||
},
|
||||
stop: async () => {},
|
||||
})),
|
||||
}));
|
||||
|
||||
vi.mock("@paperclipai/adapter-utils/server-utils", async () => {
|
||||
@@ -51,6 +60,16 @@ vi.mock("@paperclipai/adapter-utils/ssh", async () => {
|
||||
};
|
||||
});
|
||||
|
||||
vi.mock("@paperclipai/adapter-utils/execution-target", async () => {
|
||||
const actual = await vi.importActual<typeof import("@paperclipai/adapter-utils/execution-target")>(
|
||||
"@paperclipai/adapter-utils/execution-target",
|
||||
);
|
||||
return {
|
||||
...actual,
|
||||
startAdapterExecutionTargetPaperclipBridge,
|
||||
};
|
||||
});
|
||||
|
||||
import { execute } from "./execute.js";
|
||||
|
||||
describe("codex remote execution", () => {
|
||||
@@ -74,6 +93,8 @@ describe("codex remote execution", () => {
|
||||
await mkdir(codexHomeDir, { recursive: true });
|
||||
await writeFile(path.join(rootDir, "instructions.md"), "Use the remote workspace.\n", "utf8");
|
||||
await writeFile(path.join(codexHomeDir, "auth.json"), "{}", "utf8");
|
||||
const alternateWorkspaceDir = path.join(rootDir, "alternate-workspace");
|
||||
await mkdir(alternateWorkspaceDir, { recursive: true });
|
||||
|
||||
await execute({
|
||||
runId: "run-1",
|
||||
@@ -100,7 +121,27 @@ describe("codex remote execution", () => {
|
||||
paperclipWorkspace: {
|
||||
cwd: workspaceDir,
|
||||
source: "project_primary",
|
||||
strategy: "git_worktree",
|
||||
workspaceId: "workspace-1",
|
||||
repoUrl: "https://github.com/paperclipai/paperclip.git",
|
||||
repoRef: "main",
|
||||
branchName: "feature/remote-codex",
|
||||
worktreePath: workspaceDir,
|
||||
},
|
||||
paperclipWorkspaces: [
|
||||
{
|
||||
workspaceId: "workspace-1",
|
||||
cwd: workspaceDir,
|
||||
repoUrl: "https://github.com/paperclipai/paperclip.git",
|
||||
repoRef: "main",
|
||||
},
|
||||
{
|
||||
workspaceId: "workspace-2",
|
||||
cwd: alternateWorkspaceDir,
|
||||
repoUrl: "https://github.com/paperclipai/paperclip.git",
|
||||
repoRef: "feature/other",
|
||||
},
|
||||
],
|
||||
},
|
||||
executionTransport: {
|
||||
remoteExecution: {
|
||||
@@ -134,7 +175,25 @@ describe("codex remote execution", () => {
|
||||
| [string, string, string[], { env: Record<string, string>; remoteExecution?: { remoteCwd: string } | null }]
|
||||
| undefined;
|
||||
expect(call?.[3].env.CODEX_HOME).toBe("/remote/workspace/.paperclip-runtime/codex/home");
|
||||
expect(call?.[3].env.PAPERCLIP_WORKSPACE_CWD).toBe("/remote/workspace");
|
||||
expect(call?.[3].env.PAPERCLIP_WORKSPACE_WORKTREE_PATH).toBeUndefined();
|
||||
expect(JSON.parse(call?.[3].env.PAPERCLIP_WORKSPACES_JSON ?? "[]")).toEqual([
|
||||
{
|
||||
workspaceId: "workspace-1",
|
||||
cwd: "/remote/workspace",
|
||||
repoUrl: "https://github.com/paperclipai/paperclip.git",
|
||||
repoRef: "main",
|
||||
},
|
||||
{
|
||||
workspaceId: "workspace-2",
|
||||
repoUrl: "https://github.com/paperclipai/paperclip.git",
|
||||
repoRef: "feature/other",
|
||||
},
|
||||
]);
|
||||
expect(call?.[3].env.PAPERCLIP_API_URL).toBe("http://127.0.0.1:4310");
|
||||
expect(call?.[3].env.PAPERCLIP_API_BRIDGE_MODE).toBe("queue_v1");
|
||||
expect(call?.[3].remoteExecution?.remoteCwd).toBe("/remote/workspace");
|
||||
expect(startAdapterExecutionTargetPaperclipBridge).toHaveBeenCalledTimes(1);
|
||||
expect(restoreWorkspaceFromSshExecution).toHaveBeenCalledTimes(1);
|
||||
expect(restoreWorkspaceFromSshExecution).toHaveBeenCalledWith(expect.objectContaining({
|
||||
localDir: workspaceDir,
|
||||
|
||||
@@ -4,16 +4,18 @@ import { fileURLToPath } from "node:url";
|
||||
import { inferOpenAiCompatibleBiller, type AdapterExecutionContext, type AdapterExecutionResult } from "@paperclipai/adapter-utils";
|
||||
import {
|
||||
adapterExecutionTargetIsRemote,
|
||||
adapterExecutionTargetPaperclipApiUrl,
|
||||
adapterExecutionTargetRemoteCwd,
|
||||
adapterExecutionTargetSessionIdentity,
|
||||
adapterExecutionTargetSessionMatches,
|
||||
adapterExecutionTargetUsesPaperclipBridge,
|
||||
describeAdapterExecutionTarget,
|
||||
ensureAdapterExecutionTargetCommandResolvable,
|
||||
ensureAdapterExecutionTargetRuntimeCommandInstalled,
|
||||
prepareAdapterExecutionTargetRuntime,
|
||||
readAdapterExecutionTarget,
|
||||
resolveAdapterExecutionTargetCommandForLogs,
|
||||
runAdapterExecutionTargetProcess,
|
||||
startAdapterExecutionTargetPaperclipBridge,
|
||||
} from "@paperclipai/adapter-utils/execution-target";
|
||||
import {
|
||||
asString,
|
||||
@@ -29,6 +31,7 @@ import {
|
||||
resolvePaperclipDesiredSkillNames,
|
||||
renderTemplate,
|
||||
renderPaperclipWakePrompt,
|
||||
shapePaperclipWorkspaceEnvForExecution,
|
||||
stringifyPaperclipWakePayload,
|
||||
DEFAULT_PAPERCLIP_AGENT_PROMPT_TEMPLATE,
|
||||
joinPromptSections,
|
||||
@@ -346,6 +349,13 @@ export async function execute(ctx: AdapterExecutionContext): Promise<AdapterExec
|
||||
},
|
||||
);
|
||||
const effectiveExecutionCwd = adapterExecutionTargetRemoteCwd(executionTarget, cwd);
|
||||
const shapedWorkspaceEnv = shapePaperclipWorkspaceEnvForExecution({
|
||||
workspaceCwd: effectiveWorkspaceCwd,
|
||||
workspaceWorktreePath,
|
||||
workspaceHints,
|
||||
executionTargetIsRemote,
|
||||
executionCwd: effectiveExecutionCwd,
|
||||
});
|
||||
const preparedExecutionTargetRuntime = executionTargetIsRemote
|
||||
? await (async () => {
|
||||
await onLog(
|
||||
@@ -369,6 +379,7 @@ export async function execute(ctx: AdapterExecutionContext): Promise<AdapterExec
|
||||
const restoreRemoteWorkspace = preparedExecutionTargetRuntime
|
||||
? () => preparedExecutionTargetRuntime.restoreWorkspace()
|
||||
: null;
|
||||
let paperclipBridge: Awaited<ReturnType<typeof startAdapterExecutionTargetPaperclipBridge>> = null;
|
||||
const remoteCodexHome = executionTargetIsRemote
|
||||
? preparedExecutionTargetRuntime?.assetDirs.home ??
|
||||
path.posix.join(effectiveExecutionCwd, ".paperclip-runtime", "codex", "home")
|
||||
@@ -423,18 +434,18 @@ export async function execute(ctx: AdapterExecutionContext): Promise<AdapterExec
|
||||
env.PAPERCLIP_WAKE_PAYLOAD_JSON = wakePayloadJson;
|
||||
}
|
||||
applyPaperclipWorkspaceEnv(env, {
|
||||
workspaceCwd: effectiveWorkspaceCwd,
|
||||
workspaceCwd: shapedWorkspaceEnv.workspaceCwd,
|
||||
workspaceSource,
|
||||
workspaceStrategy,
|
||||
workspaceId,
|
||||
workspaceRepoUrl,
|
||||
workspaceRepoRef,
|
||||
workspaceBranch,
|
||||
workspaceWorktreePath,
|
||||
workspaceWorktreePath: shapedWorkspaceEnv.workspaceWorktreePath,
|
||||
agentHome,
|
||||
});
|
||||
if (workspaceHints.length > 0) {
|
||||
env.PAPERCLIP_WORKSPACES_JSON = JSON.stringify(workspaceHints);
|
||||
if (shapedWorkspaceEnv.workspaceHints.length > 0) {
|
||||
env.PAPERCLIP_WORKSPACES_JSON = JSON.stringify(shapedWorkspaceEnv.workspaceHints);
|
||||
}
|
||||
if (runtimeServiceIntents.length > 0) {
|
||||
env.PAPERCLIP_RUNTIME_SERVICE_INTENTS_JSON = JSON.stringify(runtimeServiceIntents);
|
||||
@@ -445,10 +456,6 @@ export async function execute(ctx: AdapterExecutionContext): Promise<AdapterExec
|
||||
if (runtimePrimaryUrl) {
|
||||
env.PAPERCLIP_RUNTIME_PRIMARY_URL = runtimePrimaryUrl;
|
||||
}
|
||||
const targetPaperclipApiUrl = adapterExecutionTargetPaperclipApiUrl(executionTarget);
|
||||
if (targetPaperclipApiUrl) {
|
||||
env.PAPERCLIP_API_URL = targetPaperclipApiUrl;
|
||||
}
|
||||
for (const [k, v] of Object.entries(envConfig)) {
|
||||
if (typeof v === "string") env[k] = v;
|
||||
}
|
||||
@@ -456,13 +463,41 @@ export async function execute(ctx: AdapterExecutionContext): Promise<AdapterExec
|
||||
if (!hasExplicitApiKey && authToken) {
|
||||
env.PAPERCLIP_API_KEY = authToken;
|
||||
}
|
||||
if (executionTargetIsRemote && adapterExecutionTargetUsesPaperclipBridge(executionTarget)) {
|
||||
paperclipBridge = await startAdapterExecutionTargetPaperclipBridge({
|
||||
runId,
|
||||
target: executionTarget,
|
||||
runtimeRootDir: preparedExecutionTargetRuntime?.runtimeRootDir,
|
||||
adapterKey: "codex",
|
||||
hostApiToken: env.PAPERCLIP_API_KEY,
|
||||
onLog,
|
||||
});
|
||||
if (paperclipBridge) {
|
||||
Object.assign(env, paperclipBridge.env);
|
||||
}
|
||||
}
|
||||
const effectiveEnv = Object.fromEntries(
|
||||
Object.entries({ ...process.env, ...env }).filter(
|
||||
(entry): entry is [string, string] => typeof entry[1] === "string",
|
||||
),
|
||||
);
|
||||
const billingType = resolveCodexBillingType(effectiveEnv);
|
||||
const runtimeEnv = ensurePathInEnv(effectiveEnv);
|
||||
const runtimeEnv = Object.fromEntries(
|
||||
Object.entries(ensurePathInEnv(effectiveEnv)).filter(
|
||||
(entry): entry is [string, string] => typeof entry[1] === "string",
|
||||
),
|
||||
);
|
||||
await ensureAdapterExecutionTargetRuntimeCommandInstalled({
|
||||
runId,
|
||||
target: executionTarget,
|
||||
installCommand: ctx.runtimeCommandSpec?.installCommand,
|
||||
detectCommand: ctx.runtimeCommandSpec?.detectCommand,
|
||||
cwd,
|
||||
env: runtimeEnv,
|
||||
timeoutSec: asNumber(config.timeoutSec, 0),
|
||||
graceSec: asNumber(config.graceSec, 20),
|
||||
onLog,
|
||||
});
|
||||
await ensureAdapterExecutionTargetCommandResolvable(command, executionTarget, cwd, runtimeEnv);
|
||||
const resolvedCommand = await resolveAdapterExecutionTargetCommandForLogs(command, executionTarget, cwd, runtimeEnv);
|
||||
const loggedEnv = buildInvocationEnvForLogs(env, {
|
||||
@@ -780,6 +815,9 @@ export async function execute(ctx: AdapterExecutionContext): Promise<AdapterExec
|
||||
|
||||
return toResult(initial, false, false);
|
||||
} finally {
|
||||
if (paperclipBridge) {
|
||||
await paperclipBridge.stop();
|
||||
}
|
||||
if (restoreRemoteWorkspace) {
|
||||
await onLog(
|
||||
"stdout",
|
||||
|
||||
@@ -6,11 +6,15 @@ import type {
|
||||
import {
|
||||
asString,
|
||||
parseObject,
|
||||
ensureAbsoluteDirectory,
|
||||
ensureCommandResolvable,
|
||||
ensurePathInEnv,
|
||||
runChildProcess,
|
||||
} from "@paperclipai/adapter-utils/server-utils";
|
||||
import {
|
||||
ensureAdapterExecutionTargetCommandResolvable,
|
||||
ensureAdapterExecutionTargetDirectory,
|
||||
runAdapterExecutionTargetProcess,
|
||||
describeAdapterExecutionTarget,
|
||||
resolveAdapterExecutionTargetCwd,
|
||||
} from "@paperclipai/adapter-utils/execution-target";
|
||||
import path from "node:path";
|
||||
import { parseCodexJsonl } from "./parse.js";
|
||||
import { codexHomeDir, readCodexAuthInfo } from "./quota.js";
|
||||
@@ -57,10 +61,28 @@ export async function testEnvironment(
|
||||
const checks: AdapterEnvironmentCheck[] = [];
|
||||
const config = parseObject(ctx.config);
|
||||
const command = asString(config.command, "codex");
|
||||
const cwd = asString(config.cwd, process.cwd());
|
||||
const target = ctx.executionTarget ?? null;
|
||||
const targetIsRemote = target?.kind === "remote";
|
||||
const cwd = resolveAdapterExecutionTargetCwd(target, asString(config.cwd, ""), process.cwd());
|
||||
const targetLabel = targetIsRemote
|
||||
? ctx.environmentName ?? describeAdapterExecutionTarget(target)
|
||||
: null;
|
||||
const runId = `codex-envtest-${Date.now()}-${Math.random().toString(16).slice(2)}`;
|
||||
|
||||
if (targetLabel) {
|
||||
checks.push({
|
||||
code: "codex_environment_target",
|
||||
level: "info",
|
||||
message: `Probing inside environment: ${targetLabel}`,
|
||||
});
|
||||
}
|
||||
|
||||
try {
|
||||
await ensureAbsoluteDirectory(cwd, { createIfMissing: true });
|
||||
await ensureAdapterExecutionTargetDirectory(runId, target, cwd, {
|
||||
cwd,
|
||||
env: {},
|
||||
createIfMissing: true,
|
||||
});
|
||||
checks.push({
|
||||
code: "codex_cwd_valid",
|
||||
level: "info",
|
||||
@@ -82,7 +104,7 @@ export async function testEnvironment(
|
||||
}
|
||||
const runtimeEnv = ensurePathInEnv({ ...process.env, ...env });
|
||||
try {
|
||||
await ensureCommandResolvable(command, cwd, runtimeEnv);
|
||||
await ensureAdapterExecutionTargetCommandResolvable(command, target, cwd, runtimeEnv);
|
||||
checks.push({
|
||||
code: "codex_command_resolvable",
|
||||
level: "info",
|
||||
@@ -98,7 +120,7 @@ export async function testEnvironment(
|
||||
}
|
||||
|
||||
const configOpenAiKey = env.OPENAI_API_KEY;
|
||||
const hostOpenAiKey = process.env.OPENAI_API_KEY;
|
||||
const hostOpenAiKey = targetIsRemote ? undefined : process.env.OPENAI_API_KEY;
|
||||
if (isNonEmpty(configOpenAiKey) || isNonEmpty(hostOpenAiKey)) {
|
||||
const source = isNonEmpty(configOpenAiKey) ? "adapter config env" : "server environment";
|
||||
checks.push({
|
||||
@@ -107,7 +129,9 @@ export async function testEnvironment(
|
||||
message: "OPENAI_API_KEY is set for Codex authentication.",
|
||||
detail: `Detected in ${source}.`,
|
||||
});
|
||||
} else {
|
||||
} else if (!targetIsRemote) {
|
||||
// Local-only auth file check. On remote targets, the probe will surface
|
||||
// any missing-auth errors directly from the remote `codex` invocation.
|
||||
const codexHome = isNonEmpty(env.CODEX_HOME) ? env.CODEX_HOME : undefined;
|
||||
const codexAuth = await readCodexAuthInfo(codexHome).catch(() => null);
|
||||
if (codexAuth) {
|
||||
@@ -150,8 +174,9 @@ export async function testEnvironment(
|
||||
});
|
||||
}
|
||||
|
||||
const probe = await runChildProcess(
|
||||
`codex-envtest-${Date.now()}-${Math.random().toString(16).slice(2)}`,
|
||||
const probe = await runAdapterExecutionTargetProcess(
|
||||
runId,
|
||||
target,
|
||||
command,
|
||||
args,
|
||||
{
|
||||
|
||||
@@ -70,8 +70,6 @@ export function buildCodexLocalConfig(v: CreateConfigValues): Record<string, unk
|
||||
const ac: Record<string, unknown> = {};
|
||||
if (v.cwd) ac.cwd = v.cwd;
|
||||
if (v.instructionsFilePath) ac.instructionsFilePath = v.instructionsFilePath;
|
||||
if (v.promptTemplate) ac.promptTemplate = v.promptTemplate;
|
||||
if (v.bootstrapPrompt) ac.bootstrapPromptTemplate = v.bootstrapPrompt;
|
||||
ac.model = v.model || DEFAULT_CODEX_LOCAL_MODEL;
|
||||
if (v.thinkingEffort) ac.modelReasoningEffort = v.thinkingEffort;
|
||||
ac.timeoutSec = 0;
|
||||
|
||||
@@ -1,5 +1,8 @@
|
||||
import type { AdapterModelProfileDefinition } from "@paperclipai/adapter-utils";
|
||||
|
||||
export const type = "cursor";
|
||||
export const label = "Cursor CLI (local)";
|
||||
|
||||
export const DEFAULT_CURSOR_LOCAL_MODEL = "auto";
|
||||
|
||||
const CURSOR_FALLBACK_MODEL_IDS = [
|
||||
@@ -46,6 +49,18 @@ const CURSOR_FALLBACK_MODEL_IDS = [
|
||||
|
||||
export const models = CURSOR_FALLBACK_MODEL_IDS.map((id) => ({ id, label: id }));
|
||||
|
||||
export const modelProfiles: AdapterModelProfileDefinition[] = [
|
||||
{
|
||||
key: "cheap",
|
||||
label: "Cheap",
|
||||
description: "Use Cursor's known Codex mini model as the budget lane instead of assuming auto is cheap.",
|
||||
adapterConfig: {
|
||||
model: "gpt-5.1-codex-mini",
|
||||
},
|
||||
source: "adapter_default",
|
||||
},
|
||||
];
|
||||
|
||||
export const agentConfigurationDoc = `# cursor agent configuration
|
||||
|
||||
Adapter: cursor
|
||||
@@ -80,4 +95,5 @@ Notes:
|
||||
- Sessions are resumed with --resume when stored session cwd matches current cwd.
|
||||
- Paperclip auto-injects local skills into "~/.cursor/skills" when missing, so Cursor can discover "$paperclip" and related skills on local runs.
|
||||
- Paperclip auto-adds --yolo unless one of --trust/--yolo/-f is already present in extraArgs.
|
||||
- Remote sandbox runs prepend "~/.local/bin" to PATH and prefer "~/.local/bin/cursor-agent" when the default Cursor entrypoint is requested, so standard E2B-style installs do not need hardcoded absolute command paths.
|
||||
`;
|
||||
|
||||
@@ -11,6 +11,7 @@ const {
|
||||
restoreWorkspaceFromSshExecution,
|
||||
runSshCommand,
|
||||
syncDirectoryToSsh,
|
||||
startAdapterExecutionTargetPaperclipBridge,
|
||||
} = vi.hoisted(() => ({
|
||||
runChildProcess: vi.fn(async () => ({
|
||||
exitCode: 0,
|
||||
@@ -35,6 +36,14 @@ const {
|
||||
exitCode: 0,
|
||||
})),
|
||||
syncDirectoryToSsh: vi.fn(async () => undefined),
|
||||
startAdapterExecutionTargetPaperclipBridge: vi.fn(async () => ({
|
||||
env: {
|
||||
PAPERCLIP_API_URL: "http://127.0.0.1:4310",
|
||||
PAPERCLIP_API_KEY: "bridge-token",
|
||||
PAPERCLIP_API_BRIDGE_MODE: "queue_v1",
|
||||
},
|
||||
stop: async () => {},
|
||||
})),
|
||||
}));
|
||||
|
||||
vi.mock("@paperclipai/adapter-utils/server-utils", async () => {
|
||||
@@ -62,6 +71,16 @@ vi.mock("@paperclipai/adapter-utils/ssh", async () => {
|
||||
};
|
||||
});
|
||||
|
||||
vi.mock("@paperclipai/adapter-utils/execution-target", async () => {
|
||||
const actual = await vi.importActual<typeof import("@paperclipai/adapter-utils/execution-target")>(
|
||||
"@paperclipai/adapter-utils/execution-target",
|
||||
);
|
||||
return {
|
||||
...actual,
|
||||
startAdapterExecutionTargetPaperclipBridge,
|
||||
};
|
||||
});
|
||||
|
||||
import { execute } from "./execute.js";
|
||||
|
||||
describe("cursor remote execution", () => {
|
||||
@@ -80,7 +99,9 @@ describe("cursor remote execution", () => {
|
||||
const rootDir = await mkdtemp(path.join(os.tmpdir(), "paperclip-cursor-remote-"));
|
||||
cleanupDirs.push(rootDir);
|
||||
const workspaceDir = path.join(rootDir, "workspace");
|
||||
const alternateWorkspaceDir = path.join(rootDir, "workspace-other");
|
||||
await mkdir(workspaceDir, { recursive: true });
|
||||
await mkdir(alternateWorkspaceDir, { recursive: true });
|
||||
|
||||
const result = await execute({
|
||||
runId: "run-1",
|
||||
@@ -105,6 +126,20 @@ describe("cursor remote execution", () => {
|
||||
cwd: workspaceDir,
|
||||
source: "project_primary",
|
||||
},
|
||||
paperclipWorkspaces: [
|
||||
{
|
||||
workspaceId: "workspace-1",
|
||||
cwd: workspaceDir,
|
||||
repoUrl: "https://github.com/paperclipai/paperclip.git",
|
||||
repoRef: "main",
|
||||
},
|
||||
{
|
||||
workspaceId: "workspace-2",
|
||||
cwd: alternateWorkspaceDir,
|
||||
repoUrl: "https://github.com/paperclipai/paperclip.git",
|
||||
repoRef: "feature/other",
|
||||
},
|
||||
],
|
||||
},
|
||||
executionTransport: {
|
||||
remoteExecution: {
|
||||
@@ -116,7 +151,6 @@ describe("cursor remote execution", () => {
|
||||
privateKey: "PRIVATE KEY",
|
||||
knownHosts: "[127.0.0.1]:2222 ssh-ed25519 AAAA",
|
||||
strictHostKeyChecking: true,
|
||||
paperclipApiUrl: "http://198.51.100.10:3102",
|
||||
},
|
||||
},
|
||||
onLog: async () => {},
|
||||
@@ -131,7 +165,6 @@ describe("cursor remote execution", () => {
|
||||
port: 2222,
|
||||
username: "fixture",
|
||||
remoteCwd: "/remote/workspace",
|
||||
paperclipApiUrl: "http://198.51.100.10:3102",
|
||||
},
|
||||
});
|
||||
expect(prepareWorkspaceForSshExecution).toHaveBeenCalledTimes(1);
|
||||
@@ -150,8 +183,24 @@ describe("cursor remote execution", () => {
|
||||
| undefined;
|
||||
expect(call?.[2]).toContain("--workspace");
|
||||
expect(call?.[2]).toContain("/remote/workspace");
|
||||
expect(call?.[3].env.PAPERCLIP_API_URL).toBe("http://198.51.100.10:3102");
|
||||
expect(call?.[3].env.PAPERCLIP_WORKSPACE_CWD).toBe("/remote/workspace");
|
||||
expect(JSON.parse(call?.[3].env.PAPERCLIP_WORKSPACES_JSON ?? "[]")).toEqual([
|
||||
{
|
||||
workspaceId: "workspace-1",
|
||||
cwd: "/remote/workspace",
|
||||
repoUrl: "https://github.com/paperclipai/paperclip.git",
|
||||
repoRef: "main",
|
||||
},
|
||||
{
|
||||
workspaceId: "workspace-2",
|
||||
repoUrl: "https://github.com/paperclipai/paperclip.git",
|
||||
repoRef: "feature/other",
|
||||
},
|
||||
]);
|
||||
expect(call?.[3].env.PAPERCLIP_API_URL).toBe("http://127.0.0.1:4310");
|
||||
expect(call?.[3].env.PAPERCLIP_API_BRIDGE_MODE).toBe("queue_v1");
|
||||
expect(call?.[3].remoteExecution?.remoteCwd).toBe("/remote/workspace");
|
||||
expect(startAdapterExecutionTargetPaperclipBridge).toHaveBeenCalledTimes(1);
|
||||
expect(restoreWorkspaceFromSshExecution).toHaveBeenCalledTimes(1);
|
||||
});
|
||||
|
||||
|
||||
@@ -5,19 +5,21 @@ import { fileURLToPath } from "node:url";
|
||||
import { inferOpenAiCompatibleBiller, type AdapterExecutionContext, type AdapterExecutionResult } from "@paperclipai/adapter-utils";
|
||||
import {
|
||||
adapterExecutionTargetIsRemote,
|
||||
adapterExecutionTargetPaperclipApiUrl,
|
||||
adapterExecutionTargetRemoteCwd,
|
||||
adapterExecutionTargetSessionIdentity,
|
||||
adapterExecutionTargetSessionMatches,
|
||||
adapterExecutionTargetUsesManagedHome,
|
||||
adapterExecutionTargetUsesPaperclipBridge,
|
||||
describeAdapterExecutionTarget,
|
||||
ensureAdapterExecutionTargetCommandResolvable,
|
||||
ensureAdapterExecutionTargetRuntimeCommandInstalled,
|
||||
prepareAdapterExecutionTargetRuntime,
|
||||
readAdapterExecutionTarget,
|
||||
readAdapterExecutionTargetHomeDir,
|
||||
resolveAdapterExecutionTargetCommandForLogs,
|
||||
runAdapterExecutionTargetProcess,
|
||||
runAdapterExecutionTargetShellCommand,
|
||||
startAdapterExecutionTargetPaperclipBridge,
|
||||
} from "@paperclipai/adapter-utils/execution-target";
|
||||
import {
|
||||
asString,
|
||||
@@ -35,12 +37,14 @@ import {
|
||||
removeMaintainerOnlySkillSymlinks,
|
||||
renderTemplate,
|
||||
renderPaperclipWakePrompt,
|
||||
shapePaperclipWorkspaceEnvForExecution,
|
||||
stringifyPaperclipWakePayload,
|
||||
DEFAULT_PAPERCLIP_AGENT_PROMPT_TEMPLATE,
|
||||
joinPromptSections,
|
||||
} from "@paperclipai/adapter-utils/server-utils";
|
||||
import { DEFAULT_CURSOR_LOCAL_MODEL } from "../index.js";
|
||||
import { parseCursorJsonl, isCursorUnknownSessionError } from "./parse.js";
|
||||
import { prepareCursorSandboxCommand } from "./remote-command.js";
|
||||
import { normalizeCursorStreamLine } from "../shared/stream.js";
|
||||
import { hasCursorTrustBypassArg } from "../shared/trust.js";
|
||||
|
||||
@@ -199,7 +203,7 @@ export async function execute(ctx: AdapterExecutionContext): Promise<AdapterExec
|
||||
config.promptTemplate,
|
||||
DEFAULT_PAPERCLIP_AGENT_PROMPT_TEMPLATE,
|
||||
);
|
||||
const command = asString(config.command, "agent");
|
||||
let command = asString(config.command, "agent");
|
||||
const model = asString(config.model, DEFAULT_CURSOR_LOCAL_MODEL).trim();
|
||||
const mode = normalizeMode(asString(config.mode, ""));
|
||||
|
||||
@@ -219,6 +223,13 @@ export async function execute(ctx: AdapterExecutionContext): Promise<AdapterExec
|
||||
const useConfiguredInsteadOfAgentHome = workspaceSource === "agent_home" && configuredCwd.length > 0;
|
||||
const effectiveWorkspaceCwd = useConfiguredInsteadOfAgentHome ? "" : workspaceCwd;
|
||||
const cwd = effectiveWorkspaceCwd || configuredCwd || process.cwd();
|
||||
const effectiveExecutionCwd = adapterExecutionTargetRemoteCwd(executionTarget, cwd);
|
||||
const shapedWorkspaceEnv = shapePaperclipWorkspaceEnvForExecution({
|
||||
workspaceCwd: effectiveWorkspaceCwd,
|
||||
workspaceHints,
|
||||
executionTargetIsRemote,
|
||||
executionCwd: effectiveExecutionCwd,
|
||||
});
|
||||
await ensureAbsoluteDirectory(cwd, { createIfMissing: true });
|
||||
const cursorSkillEntries = await readPaperclipRuntimeSkillEntries(config, __moduleDir);
|
||||
const desiredCursorSkillNames = resolvePaperclipDesiredSkillNames(config, cursorSkillEntries);
|
||||
@@ -231,7 +242,7 @@ export async function execute(ctx: AdapterExecutionContext): Promise<AdapterExec
|
||||
const envConfig = parseObject(config.env);
|
||||
const hasExplicitApiKey =
|
||||
typeof envConfig.PAPERCLIP_API_KEY === "string" && envConfig.PAPERCLIP_API_KEY.trim().length > 0;
|
||||
const env: Record<string, string> = { ...buildPaperclipEnv(agent) };
|
||||
let env: Record<string, string> = { ...buildPaperclipEnv(agent) };
|
||||
env.PAPERCLIP_RUN_ID = runId;
|
||||
const wakeTaskId =
|
||||
(typeof context.taskId === "string" && context.taskId.trim().length > 0 && context.taskId.trim()) ||
|
||||
@@ -279,19 +290,15 @@ export async function execute(ctx: AdapterExecutionContext): Promise<AdapterExec
|
||||
env.PAPERCLIP_WAKE_PAYLOAD_JSON = wakePayloadJson;
|
||||
}
|
||||
applyPaperclipWorkspaceEnv(env, {
|
||||
workspaceCwd: effectiveWorkspaceCwd,
|
||||
workspaceCwd: shapedWorkspaceEnv.workspaceCwd,
|
||||
workspaceSource,
|
||||
workspaceId,
|
||||
workspaceRepoUrl,
|
||||
workspaceRepoRef,
|
||||
agentHome,
|
||||
});
|
||||
if (workspaceHints.length > 0) {
|
||||
env.PAPERCLIP_WORKSPACES_JSON = JSON.stringify(workspaceHints);
|
||||
}
|
||||
const targetPaperclipApiUrl = adapterExecutionTargetPaperclipApiUrl(executionTarget);
|
||||
if (targetPaperclipApiUrl) {
|
||||
env.PAPERCLIP_API_URL = targetPaperclipApiUrl;
|
||||
if (shapedWorkspaceEnv.workspaceHints.length > 0) {
|
||||
env.PAPERCLIP_WORKSPACES_JSON = JSON.stringify(shapedWorkspaceEnv.workspaceHints);
|
||||
}
|
||||
for (const [k, v] of Object.entries(envConfig)) {
|
||||
if (typeof v === "string") env[k] = v;
|
||||
@@ -299,6 +306,33 @@ export async function execute(ctx: AdapterExecutionContext): Promise<AdapterExec
|
||||
if (!hasExplicitApiKey && authToken) {
|
||||
env.PAPERCLIP_API_KEY = authToken;
|
||||
}
|
||||
const timeoutSec = asNumber(config.timeoutSec, 0);
|
||||
const graceSec = asNumber(config.graceSec, 20);
|
||||
await ensureAdapterExecutionTargetRuntimeCommandInstalled({
|
||||
runId,
|
||||
target: executionTarget,
|
||||
installCommand: ctx.runtimeCommandSpec?.installCommand,
|
||||
detectCommand: ctx.runtimeCommandSpec?.detectCommand,
|
||||
cwd,
|
||||
env,
|
||||
timeoutSec,
|
||||
graceSec,
|
||||
onLog,
|
||||
});
|
||||
// Probe the sandbox before the managed-home override so we discover
|
||||
// cursor-agent from the real system HOME (e.g. ~/.local/bin/cursor-agent).
|
||||
// The managed HOME set later is for runtime isolation, not for finding the CLI.
|
||||
const sandboxCommand = await prepareCursorSandboxCommand({
|
||||
runId,
|
||||
target: executionTarget,
|
||||
command,
|
||||
cwd,
|
||||
env,
|
||||
timeoutSec,
|
||||
graceSec,
|
||||
});
|
||||
command = sandboxCommand.command;
|
||||
env = sandboxCommand.env;
|
||||
const effectiveEnv = Object.fromEntries(
|
||||
Object.entries({ ...process.env, ...env }).filter(
|
||||
(entry): entry is [string, string] => typeof entry[1] === "string",
|
||||
@@ -308,23 +342,22 @@ export async function execute(ctx: AdapterExecutionContext): Promise<AdapterExec
|
||||
const runtimeEnv = ensurePathInEnv(effectiveEnv);
|
||||
await ensureAdapterExecutionTargetCommandResolvable(command, executionTarget, cwd, runtimeEnv);
|
||||
const resolvedCommand = await resolveAdapterExecutionTargetCommandForLogs(command, executionTarget, cwd, runtimeEnv);
|
||||
const loggedEnv = buildInvocationEnvForLogs(env, {
|
||||
let loggedEnv = buildInvocationEnvForLogs(env, {
|
||||
runtimeEnv,
|
||||
includeRuntimeKeys: ["HOME"],
|
||||
resolvedCommand,
|
||||
});
|
||||
|
||||
const timeoutSec = asNumber(config.timeoutSec, 0);
|
||||
const graceSec = asNumber(config.graceSec, 20);
|
||||
const extraArgs = (() => {
|
||||
const fromExtraArgs = asStringArray(config.extraArgs);
|
||||
if (fromExtraArgs.length > 0) return fromExtraArgs;
|
||||
return asStringArray(config.args);
|
||||
})();
|
||||
const autoTrustEnabled = !hasCursorTrustBypassArg(extraArgs);
|
||||
const effectiveExecutionCwd = adapterExecutionTargetRemoteCwd(executionTarget, cwd);
|
||||
let restoreRemoteWorkspace: (() => Promise<void>) | null = null;
|
||||
let localSkillsDir: string | null = null;
|
||||
let remoteRuntimeRootDir: string | null = null;
|
||||
let paperclipBridge: Awaited<ReturnType<typeof startAdapterExecutionTargetPaperclipBridge>> = null;
|
||||
|
||||
if (executionTargetIsRemote) {
|
||||
try {
|
||||
@@ -344,6 +377,7 @@ export async function execute(ctx: AdapterExecutionContext): Promise<AdapterExec
|
||||
}],
|
||||
});
|
||||
restoreRemoteWorkspace = () => preparedExecutionTargetRuntime.restoreWorkspace();
|
||||
remoteRuntimeRootDir = preparedExecutionTargetRuntime.runtimeRootDir;
|
||||
const managedHome = adapterExecutionTargetUsesManagedHome(executionTarget);
|
||||
if (managedHome && preparedExecutionTargetRuntime.runtimeRootDir) {
|
||||
env.HOME = preparedExecutionTargetRuntime.runtimeRootDir;
|
||||
@@ -374,6 +408,24 @@ export async function execute(ctx: AdapterExecutionContext): Promise<AdapterExec
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
if (executionTargetIsRemote && adapterExecutionTargetUsesPaperclipBridge(executionTarget)) {
|
||||
paperclipBridge = await startAdapterExecutionTargetPaperclipBridge({
|
||||
runId,
|
||||
target: executionTarget,
|
||||
runtimeRootDir: remoteRuntimeRootDir,
|
||||
adapterKey: "cursor",
|
||||
hostApiToken: env.PAPERCLIP_API_KEY,
|
||||
onLog,
|
||||
});
|
||||
if (paperclipBridge) {
|
||||
Object.assign(env, paperclipBridge.env);
|
||||
loggedEnv = buildInvocationEnvForLogs(env, {
|
||||
runtimeEnv: ensurePathInEnv({ ...process.env, ...env }),
|
||||
includeRuntimeKeys: ["HOME"],
|
||||
resolvedCommand,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
const runtimeSessionParams = parseObject(runtime.sessionParams);
|
||||
const runtimeSessionId = asString(runtimeSessionParams.sessionId, runtime.sessionId ?? "");
|
||||
@@ -422,6 +474,12 @@ export async function execute(ctx: AdapterExecutionContext): Promise<AdapterExec
|
||||
notes.push("Auto-added --yolo to bypass interactive prompts.");
|
||||
}
|
||||
notes.push("Prompt is piped to Cursor via stdin.");
|
||||
if (sandboxCommand.addedPathEntry) {
|
||||
notes.push(`Remote sandbox runs prepend ${sandboxCommand.addedPathEntry} to PATH.`);
|
||||
}
|
||||
if (sandboxCommand.preferredCommandPath) {
|
||||
notes.push(`Remote sandbox runs prefer ${sandboxCommand.preferredCommandPath} when using the default Cursor entrypoint.`);
|
||||
}
|
||||
if (!instructionsFilePath) return notes;
|
||||
if (instructionsPrefix.length > 0) {
|
||||
notes.push(
|
||||
@@ -636,6 +694,9 @@ export async function execute(ctx: AdapterExecutionContext): Promise<AdapterExec
|
||||
}
|
||||
return toResult(initial);
|
||||
} finally {
|
||||
if (paperclipBridge) {
|
||||
await paperclipBridge.stop();
|
||||
}
|
||||
if (restoreRemoteWorkspace) {
|
||||
await onLog(
|
||||
"stdout",
|
||||
|
||||
160
packages/adapters/cursor-local/src/server/remote-command.ts
Normal file
@@ -0,0 +1,160 @@
|
||||
import path from "node:path";
|
||||
import {
|
||||
runAdapterExecutionTargetShellCommand,
|
||||
type AdapterExecutionTarget,
|
||||
} from "@paperclipai/adapter-utils/execution-target";
|
||||
import { ensurePathInEnv } from "@paperclipai/adapter-utils/server-utils";
|
||||
|
||||
const DEFAULT_CURSOR_COMMAND_BASENAMES = new Set(["agent", "cursor-agent"]);
|
||||
|
||||
function commandBasename(command: string): string {
|
||||
return command.trim().split(/[\\/]/).pop()?.toLowerCase() ?? "";
|
||||
}
|
||||
|
||||
function hasPathSeparator(command: string): boolean {
|
||||
return command.includes("/") || command.includes("\\");
|
||||
}
|
||||
|
||||
function prependPosixPathEntry(pathValue: string, entry: string): string {
|
||||
const parts = pathValue.split(":").filter(Boolean);
|
||||
if (parts.includes(entry)) return pathValue;
|
||||
const cleaned = parts.join(":");
|
||||
return cleaned.length > 0 ? `${entry}:${cleaned}` : entry;
|
||||
}
|
||||
|
||||
type SandboxCursorRuntimeInfo = {
|
||||
remoteSystemHomeDir: string | null;
|
||||
preferredCommandPath: string | null;
|
||||
};
|
||||
|
||||
function readMarkedValue(lines: string[], marker: string): string | null {
|
||||
const matchedLine = lines.find((line) => line.startsWith(marker));
|
||||
if (!matchedLine) return null;
|
||||
const value = matchedLine.slice(marker.length).trim();
|
||||
return value.length > 0 ? value : null;
|
||||
}
|
||||
|
||||
async function readSandboxCursorRuntimeInfo(input: {
|
||||
runId: string;
|
||||
target: AdapterExecutionTarget;
|
||||
command: string;
|
||||
cwd: string;
|
||||
env: Record<string, string>;
|
||||
timeoutSec: number;
|
||||
graceSec: number;
|
||||
}): Promise<SandboxCursorRuntimeInfo> {
|
||||
const shouldCheckPreferredCommand = isDefaultCursorCommand(input.command) && !hasPathSeparator(input.command);
|
||||
const homeMarker = "__PAPERCLIP_CURSOR_HOME__:";
|
||||
const preferredMarker = "__PAPERCLIP_CURSOR_AGENT__:";
|
||||
try {
|
||||
const result = await runAdapterExecutionTargetShellCommand(
|
||||
input.runId,
|
||||
input.target,
|
||||
[
|
||||
`printf ${JSON.stringify(`${homeMarker}%s\\n`)} "$HOME"`,
|
||||
shouldCheckPreferredCommand
|
||||
? `if [ -x "$HOME/.local/bin/cursor-agent" ]; then printf ${JSON.stringify(`${preferredMarker}%s\\n`)} "$HOME/.local/bin/cursor-agent"; fi`
|
||||
: "",
|
||||
].filter(Boolean).join("; "),
|
||||
{
|
||||
cwd: input.cwd,
|
||||
env: input.env,
|
||||
timeoutSec: input.timeoutSec,
|
||||
graceSec: input.graceSec,
|
||||
},
|
||||
);
|
||||
if (result.timedOut || (result.exitCode ?? 1) !== 0) {
|
||||
return {
|
||||
remoteSystemHomeDir: null,
|
||||
preferredCommandPath: null,
|
||||
};
|
||||
}
|
||||
const lines = result.stdout.split(/\r?\n/);
|
||||
return {
|
||||
remoteSystemHomeDir: readMarkedValue(lines, homeMarker),
|
||||
preferredCommandPath: readMarkedValue(lines, preferredMarker),
|
||||
};
|
||||
} catch {
|
||||
return {
|
||||
remoteSystemHomeDir: null,
|
||||
preferredCommandPath: null,
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
export function isDefaultCursorCommand(command: string): boolean {
|
||||
return DEFAULT_CURSOR_COMMAND_BASENAMES.has(commandBasename(command));
|
||||
}
|
||||
|
||||
export type PreparedCursorSandboxCommand = {
|
||||
command: string;
|
||||
env: Record<string, string>;
|
||||
remoteSystemHomeDir: string | null;
|
||||
addedPathEntry: string | null;
|
||||
preferredCommandPath: string | null;
|
||||
};
|
||||
|
||||
export async function prepareCursorSandboxCommand(input: {
|
||||
runId: string;
|
||||
target: AdapterExecutionTarget | null | undefined;
|
||||
command: string;
|
||||
cwd: string;
|
||||
env: Record<string, string>;
|
||||
timeoutSec: number;
|
||||
graceSec: number;
|
||||
}): Promise<PreparedCursorSandboxCommand> {
|
||||
if (input.target?.kind !== "remote" || input.target.transport !== "sandbox") {
|
||||
return {
|
||||
command: input.command,
|
||||
env: input.env,
|
||||
remoteSystemHomeDir: null,
|
||||
addedPathEntry: null,
|
||||
preferredCommandPath: null,
|
||||
};
|
||||
}
|
||||
|
||||
const runtimeInfo = await readSandboxCursorRuntimeInfo({
|
||||
runId: input.runId,
|
||||
target: input.target,
|
||||
command: input.command,
|
||||
cwd: input.cwd,
|
||||
env: input.env,
|
||||
timeoutSec: input.timeoutSec,
|
||||
graceSec: input.graceSec,
|
||||
});
|
||||
const remoteSystemHomeDir = runtimeInfo.remoteSystemHomeDir;
|
||||
|
||||
if (!remoteSystemHomeDir) {
|
||||
return {
|
||||
command: input.command,
|
||||
env: input.env,
|
||||
remoteSystemHomeDir: null,
|
||||
addedPathEntry: null,
|
||||
preferredCommandPath: null,
|
||||
};
|
||||
}
|
||||
|
||||
const remoteLocalBinDir = path.posix.join(remoteSystemHomeDir, ".local", "bin");
|
||||
const runtimeEnv = ensurePathInEnv(input.env);
|
||||
const currentPath = runtimeEnv.PATH ?? runtimeEnv.Path ?? "";
|
||||
const nextPath = prependPosixPathEntry(currentPath, remoteLocalBinDir);
|
||||
const env = nextPath === currentPath ? input.env : { ...input.env, PATH: nextPath };
|
||||
|
||||
if (!runtimeInfo.preferredCommandPath) {
|
||||
return {
|
||||
command: input.command,
|
||||
env,
|
||||
remoteSystemHomeDir,
|
||||
addedPathEntry: nextPath === currentPath ? null : remoteLocalBinDir,
|
||||
preferredCommandPath: null,
|
||||
};
|
||||
}
|
||||
|
||||
return {
|
||||
command: runtimeInfo.preferredCommandPath,
|
||||
env,
|
||||
remoteSystemHomeDir,
|
||||
addedPathEntry: nextPath === currentPath ? null : remoteLocalBinDir,
|
||||
preferredCommandPath: runtimeInfo.preferredCommandPath,
|
||||
};
|
||||
}
|
||||
@@ -7,16 +7,21 @@ import {
|
||||
asString,
|
||||
asStringArray,
|
||||
parseObject,
|
||||
ensureAbsoluteDirectory,
|
||||
ensureCommandResolvable,
|
||||
ensurePathInEnv,
|
||||
runChildProcess,
|
||||
} from "@paperclipai/adapter-utils/server-utils";
|
||||
import {
|
||||
ensureAdapterExecutionTargetCommandResolvable,
|
||||
ensureAdapterExecutionTargetDirectory,
|
||||
runAdapterExecutionTargetProcess,
|
||||
describeAdapterExecutionTarget,
|
||||
resolveAdapterExecutionTargetCwd,
|
||||
} from "@paperclipai/adapter-utils/execution-target";
|
||||
import fs from "node:fs/promises";
|
||||
import os from "node:os";
|
||||
import path from "node:path";
|
||||
import { DEFAULT_CURSOR_LOCAL_MODEL } from "../index.js";
|
||||
import { parseCursorJsonl } from "./parse.js";
|
||||
import { isDefaultCursorCommand, prepareCursorSandboxCommand } from "./remote-command.js";
|
||||
import { hasCursorTrustBypassArg } from "../shared/trust.js";
|
||||
|
||||
function summarizeStatus(checks: AdapterEnvironmentCheck[]): AdapterEnvironmentTestResult["status"] {
|
||||
@@ -38,11 +43,6 @@ function firstNonEmptyLine(text: string): string {
|
||||
);
|
||||
}
|
||||
|
||||
function commandLooksLike(command: string, expected: string): boolean {
|
||||
const base = path.basename(command).toLowerCase();
|
||||
return base === expected || base === `${expected}.cmd` || base === `${expected}.exe`;
|
||||
}
|
||||
|
||||
function summarizeProbeDetail(stdout: string, stderr: string, parsedError: string | null): string | null {
|
||||
const raw = parsedError?.trim() || firstNonEmptyLine(stderr) || firstNonEmptyLine(stdout);
|
||||
if (!raw) return null;
|
||||
@@ -94,11 +94,29 @@ export async function testEnvironment(
|
||||
): Promise<AdapterEnvironmentTestResult> {
|
||||
const checks: AdapterEnvironmentCheck[] = [];
|
||||
const config = parseObject(ctx.config);
|
||||
const command = asString(config.command, "agent");
|
||||
const cwd = asString(config.cwd, process.cwd());
|
||||
let command = asString(config.command, "agent");
|
||||
const target = ctx.executionTarget ?? null;
|
||||
const targetIsRemote = target?.kind === "remote";
|
||||
const cwd = resolveAdapterExecutionTargetCwd(target, asString(config.cwd, ""), process.cwd());
|
||||
const targetLabel = targetIsRemote
|
||||
? ctx.environmentName ?? describeAdapterExecutionTarget(target)
|
||||
: null;
|
||||
const runId = `cursor-envtest-${Date.now()}-${Math.random().toString(16).slice(2)}`;
|
||||
|
||||
if (targetLabel) {
|
||||
checks.push({
|
||||
code: "cursor_environment_target",
|
||||
level: "info",
|
||||
message: `Probing inside environment: ${targetLabel}`,
|
||||
});
|
||||
}
|
||||
|
||||
try {
|
||||
await ensureAbsoluteDirectory(cwd, { createIfMissing: true });
|
||||
await ensureAdapterExecutionTargetDirectory(runId, target, cwd, {
|
||||
cwd,
|
||||
env: {},
|
||||
createIfMissing: true,
|
||||
});
|
||||
checks.push({
|
||||
code: "cursor_cwd_valid",
|
||||
level: "info",
|
||||
@@ -114,13 +132,24 @@ export async function testEnvironment(
|
||||
}
|
||||
|
||||
const envConfig = parseObject(config.env);
|
||||
const env: Record<string, string> = {};
|
||||
let env: Record<string, string> = {};
|
||||
for (const [key, value] of Object.entries(envConfig)) {
|
||||
if (typeof value === "string") env[key] = value;
|
||||
}
|
||||
const sandboxCommand = await prepareCursorSandboxCommand({
|
||||
runId,
|
||||
target,
|
||||
command,
|
||||
cwd,
|
||||
env,
|
||||
timeoutSec: 45,
|
||||
graceSec: 5,
|
||||
});
|
||||
command = sandboxCommand.command;
|
||||
env = sandboxCommand.env;
|
||||
const runtimeEnv = ensurePathInEnv({ ...process.env, ...env });
|
||||
try {
|
||||
await ensureCommandResolvable(command, cwd, runtimeEnv);
|
||||
await ensureAdapterExecutionTargetCommandResolvable(command, target, cwd, runtimeEnv);
|
||||
checks.push({
|
||||
code: "cursor_command_resolvable",
|
||||
level: "info",
|
||||
@@ -136,7 +165,7 @@ export async function testEnvironment(
|
||||
}
|
||||
|
||||
const configCursorApiKey = env.CURSOR_API_KEY;
|
||||
const hostCursorApiKey = process.env.CURSOR_API_KEY;
|
||||
const hostCursorApiKey = targetIsRemote ? undefined : process.env.CURSOR_API_KEY;
|
||||
if (isNonEmpty(configCursorApiKey) || isNonEmpty(hostCursorApiKey)) {
|
||||
const source = isNonEmpty(configCursorApiKey) ? "adapter config env" : "server environment";
|
||||
checks.push({
|
||||
@@ -145,7 +174,7 @@ export async function testEnvironment(
|
||||
message: "CURSOR_API_KEY is set for Cursor authentication.",
|
||||
detail: `Detected in ${source}.`,
|
||||
});
|
||||
} else {
|
||||
} else if (!targetIsRemote) {
|
||||
const cursorHome = isNonEmpty(env.CURSOR_HOME) ? env.CURSOR_HOME : undefined;
|
||||
const cursorAuth = await readCursorAuthInfo(cursorHome).catch(() => null);
|
||||
if (cursorAuth) {
|
||||
@@ -170,13 +199,13 @@ export async function testEnvironment(
|
||||
const canRunProbe =
|
||||
checks.every((check) => check.code !== "cursor_cwd_invalid" && check.code !== "cursor_command_unresolvable");
|
||||
if (canRunProbe) {
|
||||
if (!commandLooksLike(command, "agent")) {
|
||||
if (!isDefaultCursorCommand(command)) {
|
||||
checks.push({
|
||||
code: "cursor_hello_probe_skipped_custom_command",
|
||||
level: "info",
|
||||
message: "Skipped hello probe because command is not `agent`.",
|
||||
message: "Skipped hello probe because command is not a default Cursor CLI entrypoint.",
|
||||
detail: command,
|
||||
hint: "Use the `agent` CLI command to run the automatic installation and auth probe.",
|
||||
hint: "Use `agent` or `cursor-agent` to run the automatic installation and auth probe.",
|
||||
});
|
||||
} else {
|
||||
const model = asString(config.model, DEFAULT_CURSOR_LOCAL_MODEL).trim();
|
||||
@@ -192,8 +221,9 @@ export async function testEnvironment(
|
||||
if (extraArgs.length > 0) args.push(...extraArgs);
|
||||
args.push("Respond with hello.");
|
||||
|
||||
const probe = await runChildProcess(
|
||||
`cursor-envtest-${Date.now()}-${Math.random().toString(16).slice(2)}`,
|
||||
const probe = await runAdapterExecutionTargetProcess(
|
||||
runId,
|
||||
target,
|
||||
command,
|
||||
args,
|
||||
{
|
||||
|
||||
@@ -61,8 +61,6 @@ export function buildCursorLocalConfig(v: CreateConfigValues): Record<string, un
|
||||
const ac: Record<string, unknown> = {};
|
||||
if (v.cwd) ac.cwd = v.cwd;
|
||||
if (v.instructionsFilePath) ac.instructionsFilePath = v.instructionsFilePath;
|
||||
if (v.promptTemplate) ac.promptTemplate = v.promptTemplate;
|
||||
if (v.bootstrapPrompt) ac.bootstrapPromptTemplate = v.bootstrapPrompt;
|
||||
ac.model = v.model || DEFAULT_CURSOR_LOCAL_MODEL;
|
||||
const mode = normalizeMode(v.thinkingEffort);
|
||||
if (mode) ac.mode = mode;
|
||||
|
||||