mirror of
https://github.com/paperclipai/paperclip
synced 2026-04-25 17:25:15 +02:00
Compare commits
267 Commits
pr/pap-817
...
PAPA-45-up
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
347f38019f | ||
|
|
25615407a4 | ||
|
|
f843a45a84 | ||
|
|
36049beeea | ||
|
|
c041fee6fc | ||
|
|
82290451d4 | ||
|
|
fb3b57ab1f | ||
|
|
ca8d35fd99 | ||
|
|
81a7f79dfd | ||
|
|
ad1ef6a8c6 | ||
|
|
833842b391 | ||
|
|
fd6cfc7149 | ||
|
|
50e9f69010 | ||
|
|
38a0cd275e | ||
|
|
bd6d07d0b4 | ||
|
|
3ab7d52f00 | ||
|
|
909e8cd4c8 | ||
|
|
36376968af | ||
|
|
29d0e82dce | ||
|
|
1c1040e219 | ||
|
|
0ec8257563 | ||
|
|
38833304d4 | ||
|
|
85e6371cb6 | ||
|
|
daea94a2ed | ||
|
|
c18b3cb414 | ||
|
|
af844b778e | ||
|
|
53dbcd185e | ||
|
|
f16de6026d | ||
|
|
34044cdfce | ||
|
|
ca5659f734 | ||
|
|
d12e3e3d1a | ||
|
|
c0d0d03bce | ||
|
|
3db6bdfc3c | ||
|
|
6524dbe08f | ||
|
|
2c1883fc77 | ||
|
|
4abd53c089 | ||
|
|
3c99ab8d01 | ||
|
|
9d6d159209 | ||
|
|
26069682ee | ||
|
|
1e24e6e84c | ||
|
|
9d89d74d70 | ||
|
|
056a5ee32a | ||
|
|
dedd972e3d | ||
|
|
6a7830b07e | ||
|
|
f9cebe9b73 | ||
|
|
9e1ee925cd | ||
|
|
6c2c63e0f1 | ||
|
|
461779a960 | ||
|
|
6aa3ead238 | ||
|
|
e0f64c04e7 | ||
|
|
e5b2e8b29b | ||
|
|
62d8b39474 | ||
|
|
420cd4fd8d | ||
|
|
5b479652f2 | ||
|
|
99296f95db | ||
|
|
92e03ac4e3 | ||
|
|
ce8d9eb323 | ||
|
|
06cf00129f | ||
|
|
ebc6888e7d | ||
|
|
9f1bb350fe | ||
|
|
46ce546174 | ||
|
|
90889c12d8 | ||
|
|
761dce559d | ||
|
|
41f261eaf5 | ||
|
|
8427043431 | ||
|
|
19aaa54ae4 | ||
|
|
d134d5f3a1 | ||
|
|
98337f5b03 | ||
|
|
477ef78fed | ||
|
|
b0e0f8cd91 | ||
|
|
ccb5cce4ac | ||
|
|
5575399af1 | ||
|
|
2c75c8a1ec | ||
|
|
d8814e938c | ||
|
|
a7cfbc98f3 | ||
|
|
5e65bb2b92 | ||
|
|
d7d01e9819 | ||
|
|
88e742a129 | ||
|
|
db4e146551 | ||
|
|
9684e7bf30 | ||
|
|
a3e125f796 | ||
|
|
2b18fc4007 | ||
|
|
ec1210caaa | ||
|
|
3c66683169 | ||
|
|
c610192c53 | ||
|
|
4d61dbfd34 | ||
|
|
26a974da17 | ||
|
|
8a368e8721 | ||
|
|
c8ab70f2ce | ||
|
|
29da357c5b | ||
|
|
4120016d30 | ||
|
|
fceefe7f09 | ||
|
|
2d31c71fbe | ||
|
|
b5efd8b435 | ||
|
|
fc2be204e2 | ||
|
|
92ebad3d42 | ||
|
|
5310bbd4d8 | ||
|
|
c54b985d9f | ||
|
|
70702ce74f | ||
|
|
b1b3408efa | ||
|
|
57357991e4 | ||
|
|
50577b8c63 | ||
|
|
1871a602df | ||
|
|
facf994694 | ||
|
|
403aeff7f6 | ||
|
|
7d81e4cb2a | ||
|
|
44f052f4c5 | ||
|
|
c33dcbd202 | ||
|
|
bc61eb84df | ||
|
|
74687553f3 | ||
|
|
4226e15128 | ||
|
|
cfb7dd4818 | ||
|
|
52bb4ea37a | ||
|
|
3986eb615c | ||
|
|
0f9faa297b | ||
|
|
d917375e35 | ||
|
|
ce4536d1fa | ||
|
|
4fd62a3d91 | ||
|
|
25066c967b | ||
|
|
1534b39ee3 | ||
|
|
826da2973d | ||
|
|
4426d96610 | ||
|
|
c8956094ad | ||
|
|
2ec4ba629e | ||
|
|
182b459235 | ||
|
|
94d6ae4049 | ||
|
|
b3d61a7561 | ||
|
|
d9005405b9 | ||
|
|
e3f07aad55 | ||
|
|
2fea39b814 | ||
|
|
0356040a29 | ||
|
|
caa7550e9f | ||
|
|
84d4c328f5 | ||
|
|
11f08ea5d5 | ||
|
|
1f1fe9c989 | ||
|
|
f1ad07616c | ||
|
|
868cfa8c50 | ||
|
|
6793dde597 | ||
|
|
cadfcd1bc6 | ||
|
|
c114ff4dc6 | ||
|
|
84e35b801c | ||
|
|
cbeefbfa5a | ||
|
|
2de691f023 | ||
|
|
41f2a80aa8 | ||
|
|
bb1732dd11 | ||
|
|
15e0e2ece9 | ||
|
|
b7b5d8dae3 | ||
|
|
0ff778ec29 | ||
|
|
b69f0b7dc4 | ||
|
|
b75ac76b13 | ||
|
|
19b6adc415 | ||
|
|
54b05d6d68 | ||
|
|
f83a77f41f | ||
|
|
a3537a86e3 | ||
|
|
5d538d4792 | ||
|
|
dc3aa8f31f | ||
|
|
c98af52590 | ||
|
|
01fb97e8da | ||
|
|
6a72faf83b | ||
|
|
1fd40920db | ||
|
|
caef115b95 | ||
|
|
17e5322e28 | ||
|
|
582f4ceaf4 | ||
|
|
1583a2d65a | ||
|
|
9a70a4edaa | ||
|
|
0ac01a04e5 | ||
|
|
11ff24cd22 | ||
|
|
a5d47166e2 | ||
|
|
eb8c5d93e7 | ||
|
|
af5b980362 | ||
|
|
2e563ccd50 | ||
|
|
2c406d3b8c | ||
|
|
49c7fb7fbd | ||
|
|
995f5b0b66 | ||
|
|
b34fa3b273 | ||
|
|
9ddf960312 | ||
|
|
a8894799e4 | ||
|
|
76a692c260 | ||
|
|
5913706329 | ||
|
|
b944293eda | ||
|
|
3c1ebed539 | ||
|
|
ab0d04ff7a | ||
|
|
6073ac3145 | ||
|
|
3b329467eb | ||
|
|
aa5b2be907 | ||
|
|
dcb66eeae7 | ||
|
|
874fe5ec7d | ||
|
|
c916626cef | ||
|
|
555f026c24 | ||
|
|
e91da556ee | ||
|
|
ab82e3f022 | ||
|
|
c74cda1851 | ||
|
|
fcf3ba6974 | ||
|
|
ed62d58cb2 | ||
|
|
dd8c1ca3b2 | ||
|
|
5ee4cd98e8 | ||
|
|
a6ca3a9418 | ||
|
|
0fd75aa579 | ||
|
|
eaa765118f | ||
|
|
ed73547fb6 | ||
|
|
692105e202 | ||
|
|
01b550d61a | ||
|
|
c6364149b1 | ||
|
|
b0b9809732 | ||
|
|
844b6dfd70 | ||
|
|
0a32e3838a | ||
|
|
e186449f94 | ||
|
|
4bb42005ea | ||
|
|
66aa65f8f7 | ||
|
|
15f6079c6b | ||
|
|
9e9eec9af6 | ||
|
|
1a4ed8c953 | ||
|
|
bd60ea4909 | ||
|
|
6ebfc0ff3d | ||
|
|
083d7c9ac4 | ||
|
|
80766e589c | ||
|
|
c5c6c62bd7 | ||
|
|
1549799c1e | ||
|
|
af1b08fdf4 | ||
|
|
72bc4ab403 | ||
|
|
4c6b9c190b | ||
|
|
f6ac6e47c4 | ||
|
|
623ab1c3ea | ||
|
|
eeec52ad74 | ||
|
|
db3883d2e7 | ||
|
|
9637351880 | ||
|
|
d0e01d2863 | ||
|
|
cbca599625 | ||
|
|
b1d12d2f37 | ||
|
|
0a952dc93d | ||
|
|
ff8b839f42 | ||
|
|
fea892c8b3 | ||
|
|
1696ff0c3f | ||
|
|
4eecd23ea3 | ||
|
|
4da83296a9 | ||
|
|
0ce4134ce1 | ||
|
|
03f44d0089 | ||
|
|
d38d5e1a7b | ||
|
|
add6ca5648 | ||
|
|
04a07080af | ||
|
|
8bebc9599a | ||
|
|
6250d536a0 | ||
|
|
de5985bb75 | ||
|
|
331e1f0d06 | ||
|
|
58c511af9a | ||
|
|
4b668379bc | ||
|
|
f352f3f514 | ||
|
|
4ff460de38 | ||
|
|
06b85d62b2 | ||
|
|
3447e2087a | ||
|
|
44fbf83106 | ||
|
|
eb73fc747a | ||
|
|
5602576ae1 | ||
|
|
c4838cca6e | ||
|
|
5561a9c17f | ||
|
|
a9dcea023b | ||
|
|
14ffbe30a0 | ||
|
|
98a5e287ef | ||
|
|
2735ef1f4a | ||
|
|
53f0988006 | ||
|
|
730a67bb20 | ||
|
|
dd1d9bed80 | ||
|
|
47449152ac | ||
|
|
59b1d1551a | ||
|
|
432d7e72fa | ||
|
|
666ab53648 | ||
|
|
314288ff82 |
7
.github/CODEOWNERS
vendored
7
.github/CODEOWNERS
vendored
@@ -8,3 +8,10 @@ scripts/rollback-latest.sh @cryppadotta @devinfoley
|
||||
doc/RELEASING.md @cryppadotta @devinfoley
|
||||
doc/PUBLISHING.md @cryppadotta @devinfoley
|
||||
doc/RELEASE-AUTOMATION-SETUP.md @cryppadotta @devinfoley
|
||||
|
||||
# Package files — dependency changes require review
|
||||
# package.json matches recursively at all depths (covers root + all workspaces)
|
||||
package.json @cryppadotta @devinfoley
|
||||
pnpm-lock.yaml @cryppadotta @devinfoley
|
||||
pnpm-workspace.yaml @cryppadotta @devinfoley
|
||||
.npmrc @cryppadotta @devinfoley
|
||||
|
||||
16
.github/PULL_REQUEST_TEMPLATE.md
vendored
16
.github/PULL_REQUEST_TEMPLATE.md
vendored
@@ -38,9 +38,25 @@
|
||||
|
||||
-
|
||||
|
||||
## Model Used
|
||||
|
||||
<!--
|
||||
Required. Specify which AI model was used to produce or assist with
|
||||
this change. Be as descriptive as possible — include:
|
||||
• Provider and model name (e.g., Claude, GPT, Gemini, Codex)
|
||||
• Exact model ID or version (e.g., claude-opus-4-6, gpt-4-turbo-2024-04-09)
|
||||
• Context window size if relevant (e.g., 1M context)
|
||||
• Reasoning/thinking mode if applicable (e.g., extended thinking, chain-of-thought)
|
||||
• Any other relevant capability details (e.g., tool use, code execution)
|
||||
If no AI model was used, write "None — human-authored".
|
||||
-->
|
||||
|
||||
-
|
||||
|
||||
## Checklist
|
||||
|
||||
- [ ] I have included a thinking path that traces from project context to this change
|
||||
- [ ] I have specified the model used (with version and capability details)
|
||||
- [ ] I have run tests locally and they pass
|
||||
- [ ] I have added or updated tests where applicable
|
||||
- [ ] If this change affects the UI, I have included before/after screenshots
|
||||
|
||||
40
.github/workflows/pr.yml
vendored
40
.github/workflows/pr.yml
vendored
@@ -40,6 +40,46 @@ jobs:
|
||||
with:
|
||||
node-version: 24
|
||||
|
||||
- name: Validate Dockerfile deps stage
|
||||
run: |
|
||||
missing=0
|
||||
|
||||
# Extract only the deps stage from the Dockerfile
|
||||
deps_stage="$(awk '/^FROM .* AS deps$/{found=1; next} found && /^FROM /{exit} found{print}' Dockerfile)"
|
||||
|
||||
if [ -z "$deps_stage" ]; then
|
||||
echo "::error::Could not extract deps stage from Dockerfile (expected 'FROM ... AS deps')"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Derive workspace search roots from pnpm-workspace.yaml (exclude dev-only packages)
|
||||
search_roots="$(grep '^ *- ' pnpm-workspace.yaml | sed 's/^ *- //' | sed 's/\*$//' | grep -v 'examples' | grep -v 'create-paperclip-plugin' | tr '\n' ' ')"
|
||||
|
||||
if [ -z "$search_roots" ]; then
|
||||
echo "::error::Could not derive workspace roots from pnpm-workspace.yaml"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Check all workspace package.json files are copied in the deps stage
|
||||
for pkg in $(find $search_roots -maxdepth 2 -name package.json -not -path '*/examples/*' -not -path '*/create-paperclip-plugin/*' -not -path '*/node_modules/*' 2>/dev/null | sort -u); do
|
||||
dir="$(dirname "$pkg")"
|
||||
if ! echo "$deps_stage" | grep -q "^COPY ${dir}/package.json"; then
|
||||
echo "::error::Dockerfile deps stage missing: COPY ${pkg} ${dir}/"
|
||||
missing=1
|
||||
fi
|
||||
done
|
||||
|
||||
# Check patches directory is copied if it exists
|
||||
if [ -d patches ] && ! echo "$deps_stage" | grep -q '^COPY patches/'; then
|
||||
echo "::error::Dockerfile deps stage missing: COPY patches/ patches/"
|
||||
missing=1
|
||||
fi
|
||||
|
||||
if [ "$missing" -eq 1 ]; then
|
||||
echo "Dockerfile deps stage is out of sync. Update it to include the missing files."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
- name: Validate dependency resolution when manifests change
|
||||
run: |
|
||||
changed="$(git diff --name-only "${{ github.event.pull_request.base.sha }}" "${{ github.event.pull_request.head.sha }}")"
|
||||
|
||||
1
.gitignore
vendored
1
.gitignore
vendored
@@ -31,6 +31,7 @@ server/src/**/*.js.map
|
||||
server/src/**/*.d.ts
|
||||
server/src/**/*.d.ts.map
|
||||
tmp/
|
||||
feedback-export-*
|
||||
|
||||
# Editor / tool temp files
|
||||
*.tmp
|
||||
|
||||
@@ -26,6 +26,9 @@ Before making changes, read in this order:
|
||||
- `ui/`: React + Vite board UI
|
||||
- `packages/db/`: Drizzle schema, migrations, DB clients
|
||||
- `packages/shared/`: shared types, constants, validators, API path constants
|
||||
- `packages/adapters/`: agent adapter implementations (Claude, Codex, Cursor, etc.)
|
||||
- `packages/adapter-utils/`: shared adapter utilities
|
||||
- `packages/plugins/`: plugin system packages
|
||||
- `doc/`: operational and product docs
|
||||
|
||||
## 4. Dev Setup (Auto DB)
|
||||
|
||||
@@ -11,8 +11,9 @@ We really appreciate both small fixes and thoughtful larger changes.
|
||||
- Pick **one** clear thing to fix/improve
|
||||
- Touch the **smallest possible number of files**
|
||||
- Make sure the change is very targeted and easy to review
|
||||
- All automated checks pass (including Greptile comments)
|
||||
- No new lint/test failures
|
||||
- All tests pass and CI is green
|
||||
- Greptile score is 5/5 with all comments addressed
|
||||
- Use the [PR template](.github/PULL_REQUEST_TEMPLATE.md)
|
||||
|
||||
These almost always get merged quickly when they're clean.
|
||||
|
||||
@@ -26,11 +27,26 @@ These almost always get merged quickly when they're clean.
|
||||
- Before / After screenshots (or short video if UI/behavior change)
|
||||
- Clear description of what & why
|
||||
- Proof it works (manual testing notes)
|
||||
- All tests passing
|
||||
- All Greptile + other PR comments addressed
|
||||
- All tests passing and CI green
|
||||
- Greptile score 5/5 with all comments addressed
|
||||
- [PR template](.github/PULL_REQUEST_TEMPLATE.md) fully filled out
|
||||
|
||||
PRs that follow this path are **much** more likely to be accepted, even when they're large.
|
||||
|
||||
## PR Requirements (all PRs)
|
||||
|
||||
### Use the PR Template
|
||||
|
||||
Every pull request **must** follow the PR template at [`.github/PULL_REQUEST_TEMPLATE.md`](.github/PULL_REQUEST_TEMPLATE.md). If you create a PR via the GitHub API or other tooling that bypasses the template, copy its contents into your PR description manually. The template includes required sections: Thinking Path, What Changed, Verification, Risks, and a Checklist.
|
||||
|
||||
### Tests Must Pass
|
||||
|
||||
All tests must pass before a PR can be merged. Run them locally first and verify CI is green after pushing.
|
||||
|
||||
### Greptile Review
|
||||
|
||||
We use [Greptile](https://greptile.com) for automated code review. Your PR must achieve a **5/5 Greptile score** with **all Greptile comments addressed** before it can be merged. If Greptile leaves comments, fix or respond to each one and request a re-review.
|
||||
|
||||
## General Rules (both paths)
|
||||
|
||||
- Write clear commit messages
|
||||
@@ -41,7 +57,7 @@ PRs that follow this path are **much** more likely to be accepted, even when the
|
||||
|
||||
## Writing a Good PR message
|
||||
|
||||
Please include a "thinking path" at the top of your PR message that explains from the top of the project down to what you fixed. E.g.:
|
||||
Your PR description must follow the [PR template](.github/PULL_REQUEST_TEMPLATE.md). All sections are required. The "thinking path" at the top explains from the top of the project down to what you fixed. E.g.:
|
||||
|
||||
### Thinking Path Example 1:
|
||||
|
||||
|
||||
34
Dockerfile
34
Dockerfile
@@ -1,8 +1,23 @@
|
||||
FROM node:lts-trixie-slim AS base
|
||||
ARG USER_UID=1000
|
||||
ARG USER_GID=1000
|
||||
RUN apt-get update \
|
||||
&& apt-get install -y --no-install-recommends ca-certificates curl git \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
RUN corepack enable
|
||||
&& apt-get install -y --no-install-recommends ca-certificates gosu curl git wget ripgrep python3 \
|
||||
&& mkdir -p -m 755 /etc/apt/keyrings \
|
||||
&& wget -nv -O/etc/apt/keyrings/githubcli-archive-keyring.gpg https://cli.github.com/packages/githubcli-archive-keyring.gpg \
|
||||
&& echo "20e0125d6f6e077a9ad46f03371bc26d90b04939fb95170f5a1905099cc6bcc0 /etc/apt/keyrings/githubcli-archive-keyring.gpg" | sha256sum -c - \
|
||||
&& chmod go+r /etc/apt/keyrings/githubcli-archive-keyring.gpg \
|
||||
&& mkdir -p -m 755 /etc/apt/sources.list.d \
|
||||
&& echo "deb [arch=$(dpkg --print-architecture) signed-by=/etc/apt/keyrings/githubcli-archive-keyring.gpg] https://cli.github.com/packages stable main" > /etc/apt/sources.list.d/github-cli.list \
|
||||
&& apt-get update \
|
||||
&& apt-get install -y --no-install-recommends gh \
|
||||
&& rm -rf /var/lib/apt/lists/* \
|
||||
&& corepack enable
|
||||
|
||||
# Modify the existing node user/group to have the specified UID/GID to match host user
|
||||
RUN usermod -u $USER_UID --non-unique node \
|
||||
&& groupmod -g $USER_GID --non-unique node \
|
||||
&& usermod -g $USER_GID -d /paperclip node
|
||||
|
||||
FROM base AS deps
|
||||
WORKDIR /app
|
||||
@@ -21,6 +36,7 @@ COPY packages/adapters/openclaw-gateway/package.json packages/adapters/openclaw-
|
||||
COPY packages/adapters/opencode-local/package.json packages/adapters/opencode-local/
|
||||
COPY packages/adapters/pi-local/package.json packages/adapters/pi-local/
|
||||
COPY packages/plugins/sdk/package.json packages/plugins/sdk/
|
||||
COPY patches/ patches/
|
||||
|
||||
RUN pnpm install --frozen-lockfile
|
||||
|
||||
@@ -34,12 +50,17 @@ RUN pnpm --filter @paperclipai/server build
|
||||
RUN test -f server/dist/index.js || (echo "ERROR: server build output missing" && exit 1)
|
||||
|
||||
FROM base AS production
|
||||
ARG USER_UID=1000
|
||||
ARG USER_GID=1000
|
||||
WORKDIR /app
|
||||
COPY --chown=node:node --from=build /app /app
|
||||
RUN npm install --global --omit=dev @anthropic-ai/claude-code@latest @openai/codex@latest opencode-ai \
|
||||
&& mkdir -p /paperclip \
|
||||
&& chown node:node /paperclip
|
||||
|
||||
COPY scripts/docker-entrypoint.sh /usr/local/bin/
|
||||
RUN chmod +x /usr/local/bin/docker-entrypoint.sh
|
||||
|
||||
ENV NODE_ENV=production \
|
||||
HOME=/paperclip \
|
||||
HOST=0.0.0.0 \
|
||||
@@ -47,12 +68,15 @@ ENV NODE_ENV=production \
|
||||
SERVE_UI=true \
|
||||
PAPERCLIP_HOME=/paperclip \
|
||||
PAPERCLIP_INSTANCE_ID=default \
|
||||
USER_UID=${USER_UID} \
|
||||
USER_GID=${USER_GID} \
|
||||
PAPERCLIP_CONFIG=/paperclip/instances/default/config.json \
|
||||
PAPERCLIP_DEPLOYMENT_MODE=authenticated \
|
||||
PAPERCLIP_DEPLOYMENT_EXPOSURE=private
|
||||
PAPERCLIP_DEPLOYMENT_EXPOSURE=private \
|
||||
OPENCODE_ALLOW_ALL_MODELS=true
|
||||
|
||||
VOLUME ["/paperclip"]
|
||||
EXPOSE 3100
|
||||
|
||||
USER node
|
||||
ENTRYPOINT ["docker-entrypoint.sh"]
|
||||
CMD ["node", "--import", "./server/node_modules/tsx/dist/loader.mjs", "server/dist/index.js"]
|
||||
|
||||
40
README.md
40
README.md
@@ -177,6 +177,8 @@ Open source. Self-hosted. No Paperclip account required.
|
||||
npx paperclipai onboard --yes
|
||||
```
|
||||
|
||||
If you already have Paperclip configured, rerunning `onboard` keeps the existing config in place. Use `paperclipai configure` to edit settings.
|
||||
|
||||
Or manually:
|
||||
|
||||
```bash
|
||||
@@ -234,16 +236,40 @@ See [doc/DEVELOPING.md](doc/DEVELOPING.md) for the full development guide.
|
||||
|
||||
## Roadmap
|
||||
|
||||
- ⚪ Get OpenClaw onboarding easier
|
||||
- ⚪ Get cloud agents working e.g. Cursor / e2b agents
|
||||
- ⚪ ClipMart - buy and sell entire agent companies
|
||||
- ⚪ Easy agent configurations / easier to understand
|
||||
- ⚪ Better support for harness engineering
|
||||
- 🟢 Plugin system (e.g. if you want to add a knowledgebase, custom tracing, queues, etc)
|
||||
- ⚪ Better docs
|
||||
- ✅ Plugin system (e.g. add a knowledge base, custom tracing, queues, etc)
|
||||
- ✅ Get OpenClaw / claw-style agent employees
|
||||
- ✅ companies.sh - import and export entire organizations
|
||||
- ✅ Easy AGENTS.md configurations
|
||||
- ✅ Skills Manager
|
||||
- ✅ Scheduled Routines
|
||||
- ✅ Better Budgeting
|
||||
- ⚪ Artifacts & Deployments
|
||||
- ⚪ CEO Chat
|
||||
- ⚪ MAXIMIZER MODE
|
||||
- ⚪ Multiple Human Users
|
||||
- ⚪ Cloud / Sandbox agents (e.g. Cursor / e2b agents)
|
||||
- ⚪ Cloud deployments
|
||||
- ⚪ Desktop App
|
||||
|
||||
<br/>
|
||||
|
||||
## Community & Plugins
|
||||
|
||||
Find Plugins and more at [awesome-paperclip](https://github.com/gsxdsm/awesome-paperclip)
|
||||
|
||||
## Telemetry
|
||||
|
||||
Paperclip collects anonymous usage telemetry to help us understand how the product is used and improve it. No personal information, issue content, prompts, file paths, or secrets are ever collected. Private repository references are hashed with a per-install salt before being sent.
|
||||
|
||||
Telemetry is **enabled by default** and can be disabled with any of the following:
|
||||
|
||||
| Method | How |
|
||||
|---|---|
|
||||
| Environment variable | `PAPERCLIP_TELEMETRY_DISABLED=1` |
|
||||
| Standard convention | `DO_NOT_TRACK=1` |
|
||||
| CI environments | Automatically disabled when `CI=true` |
|
||||
| Config file | Set `telemetry.enabled: false` in your Paperclip config |
|
||||
|
||||
## Contributing
|
||||
|
||||
We welcome contributions. See the [contributing guide](CONTRIBUTING.md) for details.
|
||||
|
||||
292
cli/README.md
Normal file
292
cli/README.md
Normal file
@@ -0,0 +1,292 @@
|
||||
<p align="center">
|
||||
<img src="https://raw.githubusercontent.com/paperclipai/paperclip/master/doc/assets/header.png" alt="Paperclip — runs your business" width="720" />
|
||||
</p>
|
||||
|
||||
<p align="center">
|
||||
<a href="#quickstart"><strong>Quickstart</strong></a> ·
|
||||
<a href="https://paperclip.ing/docs"><strong>Docs</strong></a> ·
|
||||
<a href="https://github.com/paperclipai/paperclip"><strong>GitHub</strong></a> ·
|
||||
<a href="https://discord.gg/m4HZY7xNG3"><strong>Discord</strong></a>
|
||||
</p>
|
||||
|
||||
<p align="center">
|
||||
<a href="https://github.com/paperclipai/paperclip/blob/master/LICENSE"><img src="https://img.shields.io/badge/license-MIT-blue" alt="MIT License" /></a>
|
||||
<a href="https://github.com/paperclipai/paperclip/stargazers"><img src="https://img.shields.io/github/stars/paperclipai/paperclip?style=flat" alt="Stars" /></a>
|
||||
<a href="https://discord.gg/m4HZY7xNG3"><img src="https://img.shields.io/badge/discord-join%20chat-5865F2?logo=discord&logoColor=white" alt="Discord" /></a>
|
||||
</p>
|
||||
|
||||
<br/>
|
||||
|
||||
<div align="center">
|
||||
<video src="https://github.com/user-attachments/assets/773bdfb2-6d1e-4e30-8c5f-3487d5b70c8f" width="600" controls></video>
|
||||
</div>
|
||||
|
||||
<br/>
|
||||
|
||||
## What is Paperclip?
|
||||
|
||||
# Open-source orchestration for zero-human companies
|
||||
|
||||
**If OpenClaw is an _employee_, Paperclip is the _company_**
|
||||
|
||||
Paperclip is a Node.js server and React UI that orchestrates a team of AI agents to run a business. Bring your own agents, assign goals, and track your agents' work and costs from one dashboard.
|
||||
|
||||
It looks like a task manager — but under the hood it has org charts, budgets, governance, goal alignment, and agent coordination.
|
||||
|
||||
**Manage business goals, not pull requests.**
|
||||
|
||||
| | Step | Example |
|
||||
| ------ | --------------- | ------------------------------------------------------------------ |
|
||||
| **01** | Define the goal | _"Build the #1 AI note-taking app to $1M MRR."_ |
|
||||
| **02** | Hire the team | CEO, CTO, engineers, designers, marketers — any bot, any provider. |
|
||||
| **03** | Approve and run | Review strategy. Set budgets. Hit go. Monitor from the dashboard. |
|
||||
|
||||
<br/>
|
||||
|
||||
> **COMING SOON: Clipmart** — Download and run entire companies with one click. Browse pre-built company templates — full org structures, agent configs, and skills — and import them into your Paperclip instance in seconds.
|
||||
|
||||
<br/>
|
||||
|
||||
<div align="center">
|
||||
<table>
|
||||
<tr>
|
||||
<td align="center"><strong>Works<br/>with</strong></td>
|
||||
<td align="center"><img src="https://raw.githubusercontent.com/paperclipai/paperclip/master/doc/assets/logos/openclaw.svg" width="32" alt="OpenClaw" /><br/><sub>OpenClaw</sub></td>
|
||||
<td align="center"><img src="https://raw.githubusercontent.com/paperclipai/paperclip/master/doc/assets/logos/claude.svg" width="32" alt="Claude" /><br/><sub>Claude Code</sub></td>
|
||||
<td align="center"><img src="https://raw.githubusercontent.com/paperclipai/paperclip/master/doc/assets/logos/codex.svg" width="32" alt="Codex" /><br/><sub>Codex</sub></td>
|
||||
<td align="center"><img src="https://raw.githubusercontent.com/paperclipai/paperclip/master/doc/assets/logos/cursor.svg" width="32" alt="Cursor" /><br/><sub>Cursor</sub></td>
|
||||
<td align="center"><img src="https://raw.githubusercontent.com/paperclipai/paperclip/master/doc/assets/logos/bash.svg" width="32" alt="Bash" /><br/><sub>Bash</sub></td>
|
||||
<td align="center"><img src="https://raw.githubusercontent.com/paperclipai/paperclip/master/doc/assets/logos/http.svg" width="32" alt="HTTP" /><br/><sub>HTTP</sub></td>
|
||||
</tr>
|
||||
</table>
|
||||
|
||||
<em>If it can receive a heartbeat, it's hired.</em>
|
||||
|
||||
</div>
|
||||
|
||||
<br/>
|
||||
|
||||
## Paperclip is right for you if
|
||||
|
||||
- ✅ You want to build **autonomous AI companies**
|
||||
- ✅ You **coordinate many different agents** (OpenClaw, Codex, Claude, Cursor) toward a common goal
|
||||
- ✅ You have **20 simultaneous Claude Code terminals** open and lose track of what everyone is doing
|
||||
- ✅ You want agents running **autonomously 24/7**, but still want to audit work and chime in when needed
|
||||
- ✅ You want to **monitor costs** and enforce budgets
|
||||
- ✅ You want a process for managing agents that **feels like using a task manager**
|
||||
- ✅ You want to manage your autonomous businesses **from your phone**
|
||||
|
||||
<br/>
|
||||
|
||||
## Features
|
||||
|
||||
<table>
|
||||
<tr>
|
||||
<td align="center" width="33%">
|
||||
<h3>🔌 Bring Your Own Agent</h3>
|
||||
Any agent, any runtime, one org chart. If it can receive a heartbeat, it's hired.
|
||||
</td>
|
||||
<td align="center" width="33%">
|
||||
<h3>🎯 Goal Alignment</h3>
|
||||
Every task traces back to the company mission. Agents know <em>what</em> to do and <em>why</em>.
|
||||
</td>
|
||||
<td align="center" width="33%">
|
||||
<h3>💓 Heartbeats</h3>
|
||||
Agents wake on a schedule, check work, and act. Delegation flows up and down the org chart.
|
||||
</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td align="center">
|
||||
<h3>💰 Cost Control</h3>
|
||||
Monthly budgets per agent. When they hit the limit, they stop. No runaway costs.
|
||||
</td>
|
||||
<td align="center">
|
||||
<h3>🏢 Multi-Company</h3>
|
||||
One deployment, many companies. Complete data isolation. One control plane for your portfolio.
|
||||
</td>
|
||||
<td align="center">
|
||||
<h3>🎫 Ticket System</h3>
|
||||
Every conversation traced. Every decision explained. Full tool-call tracing and immutable audit log.
|
||||
</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td align="center">
|
||||
<h3>🛡️ Governance</h3>
|
||||
You're the board. Approve hires, override strategy, pause or terminate any agent — at any time.
|
||||
</td>
|
||||
<td align="center">
|
||||
<h3>📊 Org Chart</h3>
|
||||
Hierarchies, roles, reporting lines. Your agents have a boss, a title, and a job description.
|
||||
</td>
|
||||
<td align="center">
|
||||
<h3>📱 Mobile Ready</h3>
|
||||
Monitor and manage your autonomous businesses from anywhere.
|
||||
</td>
|
||||
</tr>
|
||||
</table>
|
||||
|
||||
<br/>
|
||||
|
||||
## Problems Paperclip solves
|
||||
|
||||
| Without Paperclip | With Paperclip |
|
||||
| ------------------------------------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------------------------------------- |
|
||||
| ❌ You have 20 Claude Code tabs open and can't track which one does what. On reboot you lose everything. | ✅ Tasks are ticket-based, conversations are threaded, sessions persist across reboots. |
|
||||
| ❌ You manually gather context from several places to remind your bot what you're actually doing. | ✅ Context flows from the task up through the project and company goals — your agent always knows what to do and why. |
|
||||
| ❌ Folders of agent configs are disorganized and you're re-inventing task management, communication, and coordination between agents. | ✅ Paperclip gives you org charts, ticketing, delegation, and governance out of the box — so you run a company, not a pile of scripts. |
|
||||
| ❌ Runaway loops waste hundreds of dollars of tokens and max your quota before you even know what happened. | ✅ Cost tracking surfaces token budgets and throttles agents when they're out. Management prioritizes with budgets. |
|
||||
| ❌ You have recurring jobs (customer support, social, reports) and have to remember to manually kick them off. | ✅ Heartbeats handle regular work on a schedule. Management supervises. |
|
||||
| ❌ You have an idea, you have to find your repo, fire up Claude Code, keep a tab open, and babysit it. | ✅ Add a task in Paperclip. Your coding agent works on it until it's done. Management reviews their work. |
|
||||
|
||||
<br/>
|
||||
|
||||
## Why Paperclip is special
|
||||
|
||||
Paperclip handles the hard orchestration details correctly.
|
||||
|
||||
| | |
|
||||
| --------------------------------- | ------------------------------------------------------------------------------------------------------------- |
|
||||
| **Atomic execution.** | Task checkout and budget enforcement are atomic, so no double-work and no runaway spend. |
|
||||
| **Persistent agent state.** | Agents resume the same task context across heartbeats instead of restarting from scratch. |
|
||||
| **Runtime skill injection.** | Agents can learn Paperclip workflows and project context at runtime, without retraining. |
|
||||
| **Governance with rollback.** | Approval gates are enforced, config changes are revisioned, and bad changes can be rolled back safely. |
|
||||
| **Goal-aware execution.** | Tasks carry full goal ancestry so agents consistently see the "why," not just a title. |
|
||||
| **Portable company templates.** | Export/import orgs, agents, and skills with secret scrubbing and collision handling. |
|
||||
| **True multi-company isolation.** | Every entity is company-scoped, so one deployment can run many companies with separate data and audit trails. |
|
||||
|
||||
<br/>
|
||||
|
||||
## What Paperclip is not
|
||||
|
||||
| | |
|
||||
| ---------------------------- | -------------------------------------------------------------------------------------------------------------------- |
|
||||
| **Not a chatbot.** | Agents have jobs, not chat windows. |
|
||||
| **Not an agent framework.** | We don't tell you how to build agents. We tell you how to run a company made of them. |
|
||||
| **Not a workflow builder.** | No drag-and-drop pipelines. Paperclip models companies — with org charts, goals, budgets, and governance. |
|
||||
| **Not a prompt manager.** | Agents bring their own prompts, models, and runtimes. Paperclip manages the organization they work in. |
|
||||
| **Not a single-agent tool.** | This is for teams. If you have one agent, you probably don't need Paperclip. If you have twenty — you definitely do. |
|
||||
| **Not a code review tool.** | Paperclip orchestrates work, not pull requests. Bring your own review process. |
|
||||
|
||||
<br/>
|
||||
|
||||
## Quickstart
|
||||
|
||||
Open source. Self-hosted. No Paperclip account required.
|
||||
|
||||
```bash
|
||||
npx paperclipai onboard --yes
|
||||
```
|
||||
|
||||
If you already have Paperclip configured, rerunning `onboard` keeps the existing config in place. Use `paperclipai configure` to edit settings.
|
||||
|
||||
Or manually:
|
||||
|
||||
```bash
|
||||
git clone https://github.com/paperclipai/paperclip.git
|
||||
cd paperclip
|
||||
pnpm install
|
||||
pnpm dev
|
||||
```
|
||||
|
||||
This starts the API server at `http://localhost:3100`. An embedded PostgreSQL database is created automatically — no setup required.
|
||||
|
||||
> **Requirements:** Node.js 20+, pnpm 9.15+
|
||||
|
||||
<br/>
|
||||
|
||||
## FAQ
|
||||
|
||||
**What does a typical setup look like?**
|
||||
Locally, a single Node.js process manages an embedded Postgres and local file storage. For production, point it at your own Postgres and deploy however you like. Configure projects, agents, and goals — the agents take care of the rest.
|
||||
|
||||
If you're a solo-entreprenuer you can use Tailscale to access Paperclip on the go. Then later you can deploy to e.g. Vercel when you need it.
|
||||
|
||||
**Can I run multiple companies?**
|
||||
Yes. A single deployment can run an unlimited number of companies with complete data isolation.
|
||||
|
||||
**How is Paperclip different from agents like OpenClaw or Claude Code?**
|
||||
Paperclip _uses_ those agents. It orchestrates them into a company — with org charts, budgets, goals, governance, and accountability.
|
||||
|
||||
**Why should I use Paperclip instead of just pointing my OpenClaw to Asana or Trello?**
|
||||
Agent orchestration has subtleties in how you coordinate who has work checked out, how to maintain sessions, monitoring costs, establishing governance - Paperclip does this for you.
|
||||
|
||||
(Bring-your-own-ticket-system is on the Roadmap)
|
||||
|
||||
**Do agents run continuously?**
|
||||
By default, agents run on scheduled heartbeats and event-based triggers (task assignment, @-mentions). You can also hook in continuous agents like OpenClaw. You bring your agent and Paperclip coordinates.
|
||||
|
||||
<br/>
|
||||
|
||||
## Development
|
||||
|
||||
```bash
|
||||
pnpm dev # Full dev (API + UI, watch mode)
|
||||
pnpm dev:once # Full dev without file watching
|
||||
pnpm dev:server # Server only
|
||||
pnpm build # Build all
|
||||
pnpm typecheck # Type checking
|
||||
pnpm test:run # Run tests
|
||||
pnpm db:generate # Generate DB migration
|
||||
pnpm db:migrate # Apply migrations
|
||||
```
|
||||
|
||||
See [doc/DEVELOPING.md](https://github.com/paperclipai/paperclip/blob/master/doc/DEVELOPING.md) for the full development guide.
|
||||
|
||||
<br/>
|
||||
|
||||
## Roadmap
|
||||
|
||||
- ✅ Plugin system (e.g. add a knowledge base, custom tracing, queues, etc)
|
||||
- ✅ Get OpenClaw / claw-style agent employees
|
||||
- ✅ companies.sh - import and export entire organizations
|
||||
- ✅ Easy AGENTS.md configurations
|
||||
- ✅ Skills Manager
|
||||
- ✅ Scheduled Routines
|
||||
- ✅ Better Budgeting
|
||||
- ⚪ Artifacts & Deployments
|
||||
- ⚪ CEO Chat
|
||||
- ⚪ MAXIMIZER MODE
|
||||
- ⚪ Multiple Human Users
|
||||
- ⚪ Cloud / Sandbox agents (e.g. Cursor / e2b agents)
|
||||
- ⚪ Cloud deployments
|
||||
- ⚪ Desktop App
|
||||
|
||||
<br/>
|
||||
|
||||
## Community & Plugins
|
||||
|
||||
Find Plugins and more at [awesome-paperclip](https://github.com/gsxdsm/awesome-paperclip)
|
||||
|
||||
## Contributing
|
||||
|
||||
We welcome contributions. See the [contributing guide](https://github.com/paperclipai/paperclip/blob/master/CONTRIBUTING.md) for details.
|
||||
|
||||
<br/>
|
||||
|
||||
## Community
|
||||
|
||||
- [Discord](https://discord.gg/m4HZY7xNG3) — Join the community
|
||||
- [GitHub Issues](https://github.com/paperclipai/paperclip/issues) — bugs and feature requests
|
||||
- [GitHub Discussions](https://github.com/paperclipai/paperclip/discussions) — ideas and RFC
|
||||
|
||||
<br/>
|
||||
|
||||
## License
|
||||
|
||||
MIT © 2026 Paperclip
|
||||
|
||||
## Star History
|
||||
|
||||
[](https://www.star-history.com/?repos=paperclipai%2Fpaperclip&type=date&legend=top-left)
|
||||
|
||||
<br/>
|
||||
|
||||
---
|
||||
|
||||
<p align="center">
|
||||
<img src="https://raw.githubusercontent.com/paperclipai/paperclip/master/doc/assets/footer.jpg" alt="" width="720" />
|
||||
</p>
|
||||
|
||||
<p align="center">
|
||||
<sub>Open source under MIT. Built for people who want to run companies, not babysit agents.</sub>
|
||||
</p>
|
||||
@@ -44,6 +44,9 @@ function writeBaseConfig(configPath: string) {
|
||||
baseUrlMode: "auto",
|
||||
disableSignUp: false,
|
||||
},
|
||||
telemetry: {
|
||||
enabled: true,
|
||||
},
|
||||
storage: {
|
||||
provider: "local_disk",
|
||||
localDisk: { baseDir: "/tmp/paperclip-storage" },
|
||||
|
||||
@@ -15,6 +15,10 @@ function makeCompany(overrides: Partial<Company>): Company {
|
||||
budgetMonthlyCents: 0,
|
||||
spentMonthlyCents: 0,
|
||||
requireBoardApprovalForNewAgents: false,
|
||||
feedbackDataSharingEnabled: false,
|
||||
feedbackDataSharingConsentAt: null,
|
||||
feedbackDataSharingConsentByUserId: null,
|
||||
feedbackDataSharingTermsVersion: null,
|
||||
brandColor: null,
|
||||
logoAssetId: null,
|
||||
logoUrl: null,
|
||||
|
||||
@@ -6,33 +6,15 @@ import path from "node:path";
|
||||
import { fileURLToPath } from "node:url";
|
||||
import { promisify } from "node:util";
|
||||
import { afterAll, beforeAll, describe, expect, it } from "vitest";
|
||||
import {
|
||||
getEmbeddedPostgresTestSupport,
|
||||
startEmbeddedPostgresTestDatabase,
|
||||
} from "./helpers/embedded-postgres.js";
|
||||
import { createStoredZipArchive } from "./helpers/zip.js";
|
||||
|
||||
type EmbeddedPostgresInstance = {
|
||||
initialise(): Promise<void>;
|
||||
start(): Promise<void>;
|
||||
stop(): Promise<void>;
|
||||
};
|
||||
|
||||
type EmbeddedPostgresCtor = new (opts: {
|
||||
databaseDir: string;
|
||||
user: string;
|
||||
password: string;
|
||||
port: number;
|
||||
persistent: boolean;
|
||||
initdbFlags?: string[];
|
||||
onLog?: (message: unknown) => void;
|
||||
onError?: (message: unknown) => void;
|
||||
}) => EmbeddedPostgresInstance;
|
||||
|
||||
const execFileAsync = promisify(execFile);
|
||||
type ServerProcess = ReturnType<typeof spawn>;
|
||||
|
||||
async function getEmbeddedPostgresCtor(): Promise<EmbeddedPostgresCtor> {
|
||||
const mod = await import("embedded-postgres");
|
||||
return mod.default as EmbeddedPostgresCtor;
|
||||
}
|
||||
|
||||
async function getAvailablePort(): Promise<number> {
|
||||
return await new Promise((resolve, reject) => {
|
||||
const server = net.createServer();
|
||||
@@ -53,30 +35,13 @@ async function getAvailablePort(): Promise<number> {
|
||||
});
|
||||
}
|
||||
|
||||
async function startTempDatabase() {
|
||||
const dataDir = mkdtempSync(path.join(os.tmpdir(), "paperclip-company-cli-db-"));
|
||||
const port = await getAvailablePort();
|
||||
const EmbeddedPostgres = await getEmbeddedPostgresCtor();
|
||||
const instance = new EmbeddedPostgres({
|
||||
databaseDir: dataDir,
|
||||
user: "paperclip",
|
||||
password: "paperclip",
|
||||
port,
|
||||
persistent: true,
|
||||
initdbFlags: ["--encoding=UTF8", "--locale=C"],
|
||||
onLog: () => {},
|
||||
onError: () => {},
|
||||
});
|
||||
await instance.initialise();
|
||||
await instance.start();
|
||||
const embeddedPostgresSupport = await getEmbeddedPostgresTestSupport();
|
||||
const describeEmbeddedPostgres = embeddedPostgresSupport.supported ? describe : describe.skip;
|
||||
|
||||
const { applyPendingMigrations, ensurePostgresDatabase } = await import("@paperclipai/db");
|
||||
const adminConnectionString = `postgres://paperclip:paperclip@127.0.0.1:${port}/postgres`;
|
||||
await ensurePostgresDatabase(adminConnectionString, "paperclip");
|
||||
const connectionString = `postgres://paperclip:paperclip@127.0.0.1:${port}/paperclip`;
|
||||
await applyPendingMigrations(connectionString);
|
||||
|
||||
return { connectionString, dataDir, instance };
|
||||
if (!embeddedPostgresSupport.supported) {
|
||||
console.warn(
|
||||
`Skipping embedded Postgres company import/export e2e tests on this host: ${embeddedPostgresSupport.reason ?? "unsupported environment"}`,
|
||||
);
|
||||
}
|
||||
|
||||
function writeTestConfig(configPath: string, tempRoot: string, port: number, connectionString: string) {
|
||||
@@ -265,26 +230,23 @@ async function waitForServer(
|
||||
);
|
||||
}
|
||||
|
||||
describe("paperclipai company import/export e2e", () => {
|
||||
describeEmbeddedPostgres("paperclipai company import/export e2e", () => {
|
||||
let tempRoot = "";
|
||||
let configPath = "";
|
||||
let exportDir = "";
|
||||
let apiBase = "";
|
||||
let serverProcess: ServerProcess | null = null;
|
||||
let dbDataDir = "";
|
||||
let dbInstance: EmbeddedPostgresInstance | null = null;
|
||||
let tempDb: Awaited<ReturnType<typeof startEmbeddedPostgresTestDatabase>> | null = null;
|
||||
|
||||
beforeAll(async () => {
|
||||
tempRoot = mkdtempSync(path.join(os.tmpdir(), "paperclip-company-cli-e2e-"));
|
||||
configPath = path.join(tempRoot, "config", "config.json");
|
||||
exportDir = path.join(tempRoot, "exported-company");
|
||||
|
||||
const db = await startTempDatabase();
|
||||
dbDataDir = db.dataDir;
|
||||
dbInstance = db.instance;
|
||||
tempDb = await startEmbeddedPostgresTestDatabase("paperclip-company-cli-db-");
|
||||
|
||||
const port = await getAvailablePort();
|
||||
writeTestConfig(configPath, tempRoot, port, db.connectionString);
|
||||
writeTestConfig(configPath, tempRoot, port, tempDb.connectionString);
|
||||
apiBase = `http://127.0.0.1:${port}`;
|
||||
|
||||
const repoRoot = path.resolve(path.dirname(fileURLToPath(import.meta.url)), "../../..");
|
||||
@@ -294,7 +256,7 @@ describe("paperclipai company import/export e2e", () => {
|
||||
["paperclipai", "run", "--config", configPath],
|
||||
{
|
||||
cwd: repoRoot,
|
||||
env: createServerEnv(configPath, port, db.connectionString),
|
||||
env: createServerEnv(configPath, port, tempDb.connectionString),
|
||||
stdio: ["ignore", "pipe", "pipe"],
|
||||
},
|
||||
);
|
||||
@@ -311,10 +273,7 @@ describe("paperclipai company import/export e2e", () => {
|
||||
|
||||
afterAll(async () => {
|
||||
await stopServerProcess(serverProcess);
|
||||
await dbInstance?.stop();
|
||||
if (dbDataDir) {
|
||||
rmSync(dbDataDir, { recursive: true, force: true });
|
||||
}
|
||||
await tempDb?.cleanup();
|
||||
if (tempRoot) {
|
||||
rmSync(tempRoot, { recursive: true, force: true });
|
||||
}
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
import { describe, expect, it } from "vitest";
|
||||
import {
|
||||
isGithubShorthand,
|
||||
isGithubUrl,
|
||||
looksLikeRepoUrl,
|
||||
isHttpUrl,
|
||||
normalizeGithubImportSource,
|
||||
} from "../commands/client/company.js";
|
||||
@@ -21,17 +21,17 @@ describe("isHttpUrl", () => {
|
||||
});
|
||||
});
|
||||
|
||||
describe("isGithubUrl", () => {
|
||||
describe("looksLikeRepoUrl", () => {
|
||||
it("matches GitHub URLs", () => {
|
||||
expect(isGithubUrl("https://github.com/org/repo")).toBe(true);
|
||||
expect(looksLikeRepoUrl("https://github.com/org/repo")).toBe(true);
|
||||
});
|
||||
|
||||
it("rejects non-GitHub HTTP URLs", () => {
|
||||
expect(isGithubUrl("https://example.com/foo")).toBe(false);
|
||||
it("rejects URLs without owner/repo path", () => {
|
||||
expect(looksLikeRepoUrl("https://example.com/foo")).toBe(false);
|
||||
});
|
||||
|
||||
it("rejects local paths", () => {
|
||||
expect(isGithubUrl("/tmp/my-company")).toBe(false);
|
||||
expect(looksLikeRepoUrl("/tmp/my-company")).toBe(false);
|
||||
});
|
||||
});
|
||||
|
||||
|
||||
@@ -163,6 +163,10 @@ describe("renderCompanyImportPreview", () => {
|
||||
brandColor: null,
|
||||
logoPath: null,
|
||||
requireBoardApprovalForNewAgents: false,
|
||||
feedbackDataSharingEnabled: false,
|
||||
feedbackDataSharingConsentAt: null,
|
||||
feedbackDataSharingConsentByUserId: null,
|
||||
feedbackDataSharingTermsVersion: null,
|
||||
},
|
||||
sidebar: {
|
||||
agents: ["ceo"],
|
||||
@@ -371,6 +375,10 @@ describe("import selection catalog", () => {
|
||||
brandColor: null,
|
||||
logoPath: "images/company-logo.png",
|
||||
requireBoardApprovalForNewAgents: false,
|
||||
feedbackDataSharingEnabled: false,
|
||||
feedbackDataSharingConsentAt: null,
|
||||
feedbackDataSharingConsentByUserId: null,
|
||||
feedbackDataSharingTermsVersion: null,
|
||||
},
|
||||
sidebar: {
|
||||
agents: ["ceo"],
|
||||
|
||||
@@ -46,6 +46,9 @@ function createTempConfig(): string {
|
||||
baseUrlMode: "auto",
|
||||
disableSignUp: false,
|
||||
},
|
||||
telemetry: {
|
||||
enabled: true,
|
||||
},
|
||||
storage: {
|
||||
provider: "local_disk",
|
||||
localDisk: {
|
||||
|
||||
177
cli/src/__tests__/feedback.test.ts
Normal file
177
cli/src/__tests__/feedback.test.ts
Normal file
@@ -0,0 +1,177 @@
|
||||
import os from "node:os";
|
||||
import path from "node:path";
|
||||
import { mkdtemp, readFile } from "node:fs/promises";
|
||||
import { Command } from "commander";
|
||||
import { describe, expect, it } from "vitest";
|
||||
import type { FeedbackTrace } from "@paperclipai/shared";
|
||||
import { readZipArchive } from "../commands/client/zip.js";
|
||||
import {
|
||||
buildFeedbackTraceQuery,
|
||||
registerFeedbackCommands,
|
||||
renderFeedbackReport,
|
||||
summarizeFeedbackTraces,
|
||||
writeFeedbackExportBundle,
|
||||
} from "../commands/client/feedback.js";
|
||||
|
||||
function makeTrace(overrides: Partial<FeedbackTrace> = {}): FeedbackTrace {
|
||||
return {
|
||||
id: "trace-12345678",
|
||||
companyId: "company-123",
|
||||
feedbackVoteId: "vote-12345678",
|
||||
issueId: "issue-123",
|
||||
projectId: "project-123",
|
||||
issueIdentifier: "PAP-123",
|
||||
issueTitle: "Fix the feedback command",
|
||||
authorUserId: "user-123",
|
||||
targetType: "issue_comment",
|
||||
targetId: "comment-123",
|
||||
vote: "down",
|
||||
status: "pending",
|
||||
destination: "paperclip_labs_feedback_v1",
|
||||
exportId: null,
|
||||
consentVersion: "feedback-data-sharing-v1",
|
||||
schemaVersion: "1",
|
||||
bundleVersion: "1",
|
||||
payloadVersion: "1",
|
||||
payloadDigest: null,
|
||||
payloadSnapshot: {
|
||||
vote: {
|
||||
value: "down",
|
||||
reason: "Needed more detail",
|
||||
},
|
||||
},
|
||||
targetSummary: {
|
||||
label: "Comment",
|
||||
excerpt: "The first answer was too vague.",
|
||||
authorAgentId: "agent-123",
|
||||
authorUserId: null,
|
||||
createdAt: new Date("2026-03-31T12:00:00.000Z"),
|
||||
documentKey: null,
|
||||
documentTitle: null,
|
||||
revisionNumber: null,
|
||||
},
|
||||
redactionSummary: null,
|
||||
attemptCount: 0,
|
||||
lastAttemptedAt: null,
|
||||
exportedAt: null,
|
||||
failureReason: null,
|
||||
createdAt: new Date("2026-03-31T12:01:00.000Z"),
|
||||
updatedAt: new Date("2026-03-31T12:02:00.000Z"),
|
||||
...overrides,
|
||||
};
|
||||
}
|
||||
|
||||
describe("registerFeedbackCommands", () => {
|
||||
it("registers the top-level feedback commands", () => {
|
||||
const program = new Command();
|
||||
|
||||
expect(() => registerFeedbackCommands(program)).not.toThrow();
|
||||
|
||||
const feedback = program.commands.find((command) => command.name() === "feedback");
|
||||
expect(feedback).toBeDefined();
|
||||
expect(feedback?.commands.map((command) => command.name())).toEqual(["report", "export"]);
|
||||
expect(feedback?.commands[0]?.options.filter((option) => option.long === "--company-id")).toHaveLength(1);
|
||||
});
|
||||
});
|
||||
|
||||
describe("buildFeedbackTraceQuery", () => {
|
||||
it("encodes all supported filters", () => {
|
||||
expect(
|
||||
buildFeedbackTraceQuery({
|
||||
targetType: "issue_comment",
|
||||
vote: "down",
|
||||
status: "pending",
|
||||
projectId: "project-123",
|
||||
issueId: "issue-123",
|
||||
from: "2026-03-31T00:00:00.000Z",
|
||||
to: "2026-03-31T23:59:59.999Z",
|
||||
sharedOnly: true,
|
||||
}),
|
||||
).toBe(
|
||||
"?targetType=issue_comment&vote=down&status=pending&projectId=project-123&issueId=issue-123&from=2026-03-31T00%3A00%3A00.000Z&to=2026-03-31T23%3A59%3A59.999Z&sharedOnly=true&includePayload=true",
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe("renderFeedbackReport", () => {
|
||||
it("includes summary counts and the optional reason", () => {
|
||||
const traces = [
|
||||
makeTrace(),
|
||||
makeTrace({
|
||||
id: "trace-87654321",
|
||||
feedbackVoteId: "vote-87654321",
|
||||
vote: "up",
|
||||
status: "local_only",
|
||||
payloadSnapshot: {
|
||||
vote: {
|
||||
value: "up",
|
||||
reason: null,
|
||||
},
|
||||
},
|
||||
}),
|
||||
];
|
||||
|
||||
const report = renderFeedbackReport({
|
||||
apiBase: "http://127.0.0.1:3100",
|
||||
companyId: "company-123",
|
||||
traces,
|
||||
summary: summarizeFeedbackTraces(traces),
|
||||
includePayloads: false,
|
||||
});
|
||||
|
||||
expect(report).toContain("Paperclip Feedback Report");
|
||||
expect(report).toContain("thumbs up");
|
||||
expect(report).toContain("thumbs down");
|
||||
expect(report).toContain("Needed more detail");
|
||||
});
|
||||
});
|
||||
|
||||
describe("writeFeedbackExportBundle", () => {
|
||||
it("writes votes, traces, a manifest, and a zip archive", async () => {
|
||||
const tempDir = await mkdtemp(path.join(os.tmpdir(), "paperclip-feedback-export-"));
|
||||
const outputDir = path.join(tempDir, "feedback-export");
|
||||
const traces = [
|
||||
makeTrace(),
|
||||
makeTrace({
|
||||
id: "trace-abcdef12",
|
||||
feedbackVoteId: "vote-abcdef12",
|
||||
issueIdentifier: "PAP-124",
|
||||
issueId: "issue-124",
|
||||
vote: "up",
|
||||
status: "local_only",
|
||||
payloadSnapshot: {
|
||||
vote: {
|
||||
value: "up",
|
||||
reason: null,
|
||||
},
|
||||
},
|
||||
}),
|
||||
];
|
||||
|
||||
const exported = await writeFeedbackExportBundle({
|
||||
apiBase: "http://127.0.0.1:3100",
|
||||
companyId: "company-123",
|
||||
traces,
|
||||
outputDir,
|
||||
});
|
||||
|
||||
expect(exported.manifest.summary.total).toBe(2);
|
||||
expect(exported.manifest.summary.withReason).toBe(1);
|
||||
|
||||
const manifest = JSON.parse(await readFile(path.join(outputDir, "index.json"), "utf8")) as {
|
||||
files: { votes: string[]; traces: string[]; zip: string };
|
||||
};
|
||||
expect(manifest.files.votes).toHaveLength(2);
|
||||
expect(manifest.files.traces).toHaveLength(2);
|
||||
|
||||
const archive = await readFile(exported.zipPath);
|
||||
const zip = await readZipArchive(archive);
|
||||
expect(Object.keys(zip.files)).toEqual(
|
||||
expect.arrayContaining([
|
||||
"index.json",
|
||||
`votes/${manifest.files.votes[0]}`,
|
||||
`traces/${manifest.files.traces[0]}`,
|
||||
]),
|
||||
);
|
||||
});
|
||||
});
|
||||
6
cli/src/__tests__/helpers/embedded-postgres.ts
Normal file
6
cli/src/__tests__/helpers/embedded-postgres.ts
Normal file
@@ -0,0 +1,6 @@
|
||||
export {
|
||||
getEmbeddedPostgresTestSupport,
|
||||
startEmbeddedPostgresTestDatabase,
|
||||
type EmbeddedPostgresTestDatabase,
|
||||
type EmbeddedPostgresTestSupport,
|
||||
} from "@paperclipai/db";
|
||||
@@ -1,5 +1,5 @@
|
||||
import { afterEach, describe, expect, it, vi } from "vitest";
|
||||
import { ApiRequestError, PaperclipApiClient } from "../client/http.js";
|
||||
import { ApiConnectionError, ApiRequestError, PaperclipApiClient } from "../client/http.js";
|
||||
|
||||
describe("PaperclipApiClient", () => {
|
||||
afterEach(() => {
|
||||
@@ -59,6 +59,29 @@ describe("PaperclipApiClient", () => {
|
||||
} satisfies Partial<ApiRequestError>);
|
||||
});
|
||||
|
||||
it("throws ApiConnectionError with recovery guidance when fetch fails", async () => {
|
||||
const fetchMock = vi.fn().mockRejectedValue(new TypeError("fetch failed"));
|
||||
vi.stubGlobal("fetch", fetchMock);
|
||||
|
||||
const client = new PaperclipApiClient({ apiBase: "http://localhost:3100" });
|
||||
|
||||
await expect(client.post("/api/companies/import/preview", {})).rejects.toBeInstanceOf(ApiConnectionError);
|
||||
await expect(client.post("/api/companies/import/preview", {})).rejects.toMatchObject({
|
||||
url: "http://localhost:3100/api/companies/import/preview",
|
||||
method: "POST",
|
||||
causeMessage: "fetch failed",
|
||||
} satisfies Partial<ApiConnectionError>);
|
||||
await expect(client.post("/api/companies/import/preview", {})).rejects.toThrow(
|
||||
/Could not reach the Paperclip API\./,
|
||||
);
|
||||
await expect(client.post("/api/companies/import/preview", {})).rejects.toThrow(
|
||||
/curl http:\/\/localhost:3100\/api\/health/,
|
||||
);
|
||||
await expect(client.post("/api/companies/import/preview", {})).rejects.toThrow(
|
||||
/pnpm dev|pnpm paperclipai run/,
|
||||
);
|
||||
});
|
||||
|
||||
it("retries once after interactive auth recovery", async () => {
|
||||
const fetchMock = vi
|
||||
.fn()
|
||||
|
||||
108
cli/src/__tests__/onboard.test.ts
Normal file
108
cli/src/__tests__/onboard.test.ts
Normal file
@@ -0,0 +1,108 @@
|
||||
import fs from "node:fs";
|
||||
import os from "node:os";
|
||||
import path from "node:path";
|
||||
import { afterEach, beforeEach, describe, expect, it } from "vitest";
|
||||
import { onboard } from "../commands/onboard.js";
|
||||
import type { PaperclipConfig } from "../config/schema.js";
|
||||
|
||||
const ORIGINAL_ENV = { ...process.env };
|
||||
|
||||
function createExistingConfigFixture() {
|
||||
const root = fs.mkdtempSync(path.join(os.tmpdir(), "paperclip-onboard-"));
|
||||
const runtimeRoot = path.join(root, "runtime");
|
||||
const configPath = path.join(root, ".paperclip", "config.json");
|
||||
const config: PaperclipConfig = {
|
||||
$meta: {
|
||||
version: 1,
|
||||
updatedAt: "2026-03-29T00:00:00.000Z",
|
||||
source: "configure",
|
||||
},
|
||||
database: {
|
||||
mode: "embedded-postgres",
|
||||
embeddedPostgresDataDir: path.join(runtimeRoot, "db"),
|
||||
embeddedPostgresPort: 54329,
|
||||
backup: {
|
||||
enabled: true,
|
||||
intervalMinutes: 60,
|
||||
retentionDays: 30,
|
||||
dir: path.join(runtimeRoot, "backups"),
|
||||
},
|
||||
},
|
||||
logging: {
|
||||
mode: "file",
|
||||
logDir: path.join(runtimeRoot, "logs"),
|
||||
},
|
||||
server: {
|
||||
deploymentMode: "local_trusted",
|
||||
exposure: "private",
|
||||
host: "127.0.0.1",
|
||||
port: 3100,
|
||||
allowedHostnames: [],
|
||||
serveUi: true,
|
||||
},
|
||||
auth: {
|
||||
baseUrlMode: "auto",
|
||||
disableSignUp: false,
|
||||
},
|
||||
telemetry: {
|
||||
enabled: true,
|
||||
},
|
||||
storage: {
|
||||
provider: "local_disk",
|
||||
localDisk: {
|
||||
baseDir: path.join(runtimeRoot, "storage"),
|
||||
},
|
||||
s3: {
|
||||
bucket: "paperclip",
|
||||
region: "us-east-1",
|
||||
prefix: "",
|
||||
forcePathStyle: false,
|
||||
},
|
||||
},
|
||||
secrets: {
|
||||
provider: "local_encrypted",
|
||||
strictMode: false,
|
||||
localEncrypted: {
|
||||
keyFilePath: path.join(runtimeRoot, "secrets", "master.key"),
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
fs.mkdirSync(path.dirname(configPath), { recursive: true });
|
||||
fs.writeFileSync(configPath, `${JSON.stringify(config, null, 2)}\n`, { mode: 0o600 });
|
||||
|
||||
return { configPath, configText: fs.readFileSync(configPath, "utf8") };
|
||||
}
|
||||
|
||||
describe("onboard", () => {
|
||||
beforeEach(() => {
|
||||
process.env = { ...ORIGINAL_ENV };
|
||||
delete process.env.PAPERCLIP_AGENT_JWT_SECRET;
|
||||
delete process.env.PAPERCLIP_SECRETS_MASTER_KEY;
|
||||
delete process.env.PAPERCLIP_SECRETS_MASTER_KEY_FILE;
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
process.env = { ...ORIGINAL_ENV };
|
||||
});
|
||||
|
||||
it("preserves an existing config when rerun without flags", async () => {
|
||||
const fixture = createExistingConfigFixture();
|
||||
|
||||
await onboard({ config: fixture.configPath });
|
||||
|
||||
expect(fs.readFileSync(fixture.configPath, "utf8")).toBe(fixture.configText);
|
||||
expect(fs.existsSync(`${fixture.configPath}.backup`)).toBe(false);
|
||||
expect(fs.existsSync(path.join(path.dirname(fixture.configPath), ".env"))).toBe(true);
|
||||
});
|
||||
|
||||
it("preserves an existing config when rerun with --yes", async () => {
|
||||
const fixture = createExistingConfigFixture();
|
||||
|
||||
await onboard({ config: fixture.configPath, yes: true, invokedByRun: true });
|
||||
|
||||
expect(fs.readFileSync(fixture.configPath, "utf8")).toBe(fixture.configText);
|
||||
expect(fs.existsSync(`${fixture.configPath}.backup`)).toBe(false);
|
||||
expect(fs.existsSync(path.join(path.dirname(fixture.configPath), ".env"))).toBe(true);
|
||||
});
|
||||
});
|
||||
249
cli/src/__tests__/routines.test.ts
Normal file
249
cli/src/__tests__/routines.test.ts
Normal file
@@ -0,0 +1,249 @@
|
||||
import { randomUUID } from "node:crypto";
|
||||
import { mkdirSync, mkdtempSync, rmSync, writeFileSync } from "node:fs";
|
||||
import os from "node:os";
|
||||
import path from "node:path";
|
||||
import { afterAll, afterEach, beforeAll, describe, expect, it } from "vitest";
|
||||
import { eq } from "drizzle-orm";
|
||||
import {
|
||||
agents,
|
||||
companies,
|
||||
createDb,
|
||||
projects,
|
||||
routines,
|
||||
} from "@paperclipai/db";
|
||||
import {
|
||||
getEmbeddedPostgresTestSupport,
|
||||
startEmbeddedPostgresTestDatabase,
|
||||
} from "./helpers/embedded-postgres.js";
|
||||
import { disableAllRoutinesInConfig } from "../commands/routines.js";
|
||||
|
||||
const embeddedPostgresSupport = await getEmbeddedPostgresTestSupport();
|
||||
const describeEmbeddedPostgres = embeddedPostgresSupport.supported ? describe : describe.skip;
|
||||
|
||||
if (!embeddedPostgresSupport.supported) {
|
||||
console.warn(
|
||||
`Skipping embedded Postgres routines CLI tests on this host: ${embeddedPostgresSupport.reason ?? "unsupported environment"}`,
|
||||
);
|
||||
}
|
||||
|
||||
function writeTestConfig(configPath: string, tempRoot: string, connectionString: string) {
|
||||
const config = {
|
||||
$meta: {
|
||||
version: 1,
|
||||
updatedAt: new Date().toISOString(),
|
||||
source: "doctor" as const,
|
||||
},
|
||||
database: {
|
||||
mode: "postgres" as const,
|
||||
connectionString,
|
||||
embeddedPostgresDataDir: path.join(tempRoot, "embedded-db"),
|
||||
embeddedPostgresPort: 54329,
|
||||
backup: {
|
||||
enabled: false,
|
||||
intervalMinutes: 60,
|
||||
retentionDays: 30,
|
||||
dir: path.join(tempRoot, "backups"),
|
||||
},
|
||||
},
|
||||
logging: {
|
||||
mode: "file" as const,
|
||||
logDir: path.join(tempRoot, "logs"),
|
||||
},
|
||||
server: {
|
||||
deploymentMode: "local_trusted" as const,
|
||||
exposure: "private" as const,
|
||||
host: "127.0.0.1",
|
||||
port: 3100,
|
||||
allowedHostnames: [],
|
||||
serveUi: false,
|
||||
},
|
||||
auth: {
|
||||
baseUrlMode: "auto" as const,
|
||||
disableSignUp: false,
|
||||
},
|
||||
storage: {
|
||||
provider: "local_disk" as const,
|
||||
localDisk: {
|
||||
baseDir: path.join(tempRoot, "storage"),
|
||||
},
|
||||
s3: {
|
||||
bucket: "paperclip",
|
||||
region: "us-east-1",
|
||||
prefix: "",
|
||||
forcePathStyle: false,
|
||||
},
|
||||
},
|
||||
secrets: {
|
||||
provider: "local_encrypted" as const,
|
||||
strictMode: false,
|
||||
localEncrypted: {
|
||||
keyFilePath: path.join(tempRoot, "secrets", "master.key"),
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
mkdirSync(path.dirname(configPath), { recursive: true });
|
||||
writeFileSync(configPath, `${JSON.stringify(config, null, 2)}\n`, "utf8");
|
||||
}
|
||||
|
||||
describeEmbeddedPostgres("disableAllRoutinesInConfig", () => {
|
||||
let db!: ReturnType<typeof createDb>;
|
||||
let tempDb: Awaited<ReturnType<typeof startEmbeddedPostgresTestDatabase>> | null = null;
|
||||
let tempRoot = "";
|
||||
let configPath = "";
|
||||
|
||||
beforeAll(async () => {
|
||||
tempDb = await startEmbeddedPostgresTestDatabase("paperclip-routines-cli-db-");
|
||||
db = createDb(tempDb.connectionString);
|
||||
tempRoot = mkdtempSync(path.join(os.tmpdir(), "paperclip-routines-cli-config-"));
|
||||
configPath = path.join(tempRoot, "config.json");
|
||||
writeTestConfig(configPath, tempRoot, tempDb.connectionString);
|
||||
}, 20_000);
|
||||
|
||||
afterEach(async () => {
|
||||
await db.delete(routines);
|
||||
await db.delete(projects);
|
||||
await db.delete(agents);
|
||||
await db.delete(companies);
|
||||
});
|
||||
|
||||
afterAll(async () => {
|
||||
await tempDb?.cleanup();
|
||||
if (tempRoot) {
|
||||
rmSync(tempRoot, { recursive: true, force: true });
|
||||
}
|
||||
});
|
||||
|
||||
it("pauses only non-archived routines for the selected company", async () => {
|
||||
const companyId = randomUUID();
|
||||
const otherCompanyId = randomUUID();
|
||||
const projectId = randomUUID();
|
||||
const otherProjectId = randomUUID();
|
||||
const agentId = randomUUID();
|
||||
const otherAgentId = randomUUID();
|
||||
const activeRoutineId = randomUUID();
|
||||
const pausedRoutineId = randomUUID();
|
||||
const archivedRoutineId = randomUUID();
|
||||
const otherCompanyRoutineId = randomUUID();
|
||||
|
||||
await db.insert(companies).values([
|
||||
{
|
||||
id: companyId,
|
||||
name: "Paperclip",
|
||||
issuePrefix: `T${companyId.replace(/-/g, "").slice(0, 6).toUpperCase()}`,
|
||||
requireBoardApprovalForNewAgents: false,
|
||||
},
|
||||
{
|
||||
id: otherCompanyId,
|
||||
name: "Other company",
|
||||
issuePrefix: `T${otherCompanyId.replace(/-/g, "").slice(0, 6).toUpperCase()}`,
|
||||
requireBoardApprovalForNewAgents: false,
|
||||
},
|
||||
]);
|
||||
|
||||
await db.insert(agents).values([
|
||||
{
|
||||
id: agentId,
|
||||
companyId,
|
||||
name: "Coder",
|
||||
adapterType: "process",
|
||||
adapterConfig: {},
|
||||
runtimeConfig: {},
|
||||
permissions: {},
|
||||
},
|
||||
{
|
||||
id: otherAgentId,
|
||||
companyId: otherCompanyId,
|
||||
name: "Other coder",
|
||||
adapterType: "process",
|
||||
adapterConfig: {},
|
||||
runtimeConfig: {},
|
||||
permissions: {},
|
||||
},
|
||||
]);
|
||||
|
||||
await db.insert(projects).values([
|
||||
{
|
||||
id: projectId,
|
||||
companyId,
|
||||
name: "Project",
|
||||
status: "in_progress",
|
||||
},
|
||||
{
|
||||
id: otherProjectId,
|
||||
companyId: otherCompanyId,
|
||||
name: "Other project",
|
||||
status: "in_progress",
|
||||
},
|
||||
]);
|
||||
|
||||
await db.insert(routines).values([
|
||||
{
|
||||
id: activeRoutineId,
|
||||
companyId,
|
||||
projectId,
|
||||
assigneeAgentId: agentId,
|
||||
title: "Active routine",
|
||||
status: "active",
|
||||
},
|
||||
{
|
||||
id: pausedRoutineId,
|
||||
companyId,
|
||||
projectId,
|
||||
assigneeAgentId: agentId,
|
||||
title: "Paused routine",
|
||||
status: "paused",
|
||||
},
|
||||
{
|
||||
id: archivedRoutineId,
|
||||
companyId,
|
||||
projectId,
|
||||
assigneeAgentId: agentId,
|
||||
title: "Archived routine",
|
||||
status: "archived",
|
||||
},
|
||||
{
|
||||
id: otherCompanyRoutineId,
|
||||
companyId: otherCompanyId,
|
||||
projectId: otherProjectId,
|
||||
assigneeAgentId: otherAgentId,
|
||||
title: "Other company routine",
|
||||
status: "active",
|
||||
},
|
||||
]);
|
||||
|
||||
const result = await disableAllRoutinesInConfig({
|
||||
config: configPath,
|
||||
companyId,
|
||||
});
|
||||
|
||||
expect(result).toMatchObject({
|
||||
companyId,
|
||||
totalRoutines: 3,
|
||||
pausedCount: 1,
|
||||
alreadyPausedCount: 1,
|
||||
archivedCount: 1,
|
||||
});
|
||||
|
||||
const companyRoutines = await db
|
||||
.select({
|
||||
id: routines.id,
|
||||
status: routines.status,
|
||||
})
|
||||
.from(routines)
|
||||
.where(eq(routines.companyId, companyId));
|
||||
const statusById = new Map(companyRoutines.map((routine) => [routine.id, routine.status]));
|
||||
|
||||
expect(statusById.get(activeRoutineId)).toBe("paused");
|
||||
expect(statusById.get(pausedRoutineId)).toBe("paused");
|
||||
expect(statusById.get(archivedRoutineId)).toBe("archived");
|
||||
|
||||
const otherCompanyRoutine = await db
|
||||
.select({
|
||||
status: routines.status,
|
||||
})
|
||||
.from(routines)
|
||||
.where(eq(routines.id, otherCompanyRoutineId));
|
||||
expect(otherCompanyRoutine[0]?.status).toBe("active");
|
||||
});
|
||||
});
|
||||
117
cli/src/__tests__/telemetry.test.ts
Normal file
117
cli/src/__tests__/telemetry.test.ts
Normal file
@@ -0,0 +1,117 @@
|
||||
import fs from "node:fs";
|
||||
import os from "node:os";
|
||||
import path from "node:path";
|
||||
import { afterEach, beforeEach, describe, expect, it, vi } from "vitest";
|
||||
|
||||
const ORIGINAL_ENV = { ...process.env };
|
||||
const CI_ENV_VARS = ["CI", "CONTINUOUS_INTEGRATION", "BUILD_NUMBER", "GITHUB_ACTIONS", "GITLAB_CI"];
|
||||
|
||||
function makeConfigPath(root: string, enabled: boolean): string {
|
||||
const configPath = path.join(root, ".paperclip", "config.json");
|
||||
fs.mkdirSync(path.dirname(configPath), { recursive: true });
|
||||
fs.writeFileSync(configPath, JSON.stringify({
|
||||
$meta: {
|
||||
version: 1,
|
||||
updatedAt: "2026-03-31T00:00:00.000Z",
|
||||
source: "configure",
|
||||
},
|
||||
database: {
|
||||
mode: "embedded-postgres",
|
||||
embeddedPostgresDataDir: path.join(root, "runtime", "db"),
|
||||
embeddedPostgresPort: 54329,
|
||||
backup: {
|
||||
enabled: true,
|
||||
intervalMinutes: 60,
|
||||
retentionDays: 30,
|
||||
dir: path.join(root, "runtime", "backups"),
|
||||
},
|
||||
},
|
||||
logging: {
|
||||
mode: "file",
|
||||
logDir: path.join(root, "runtime", "logs"),
|
||||
},
|
||||
server: {
|
||||
deploymentMode: "local_trusted",
|
||||
exposure: "private",
|
||||
host: "127.0.0.1",
|
||||
port: 3100,
|
||||
allowedHostnames: [],
|
||||
serveUi: true,
|
||||
},
|
||||
auth: {
|
||||
baseUrlMode: "auto",
|
||||
disableSignUp: false,
|
||||
},
|
||||
telemetry: {
|
||||
enabled,
|
||||
},
|
||||
storage: {
|
||||
provider: "local_disk",
|
||||
localDisk: {
|
||||
baseDir: path.join(root, "runtime", "storage"),
|
||||
},
|
||||
s3: {
|
||||
bucket: "paperclip",
|
||||
region: "us-east-1",
|
||||
prefix: "",
|
||||
forcePathStyle: false,
|
||||
},
|
||||
},
|
||||
secrets: {
|
||||
provider: "local_encrypted",
|
||||
strictMode: false,
|
||||
localEncrypted: {
|
||||
keyFilePath: path.join(root, "runtime", "secrets", "master.key"),
|
||||
},
|
||||
},
|
||||
}, null, 2));
|
||||
return configPath;
|
||||
}
|
||||
|
||||
describe("cli telemetry", () => {
|
||||
beforeEach(() => {
|
||||
process.env = { ...ORIGINAL_ENV };
|
||||
for (const key of CI_ENV_VARS) {
|
||||
delete process.env[key];
|
||||
}
|
||||
vi.stubGlobal("fetch", vi.fn(async () => ({ ok: true })));
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
process.env = { ...ORIGINAL_ENV };
|
||||
vi.unstubAllGlobals();
|
||||
vi.resetModules();
|
||||
});
|
||||
|
||||
it("respects telemetry.enabled=false from the config file", async () => {
|
||||
const root = fs.mkdtempSync(path.join(os.tmpdir(), "paperclip-cli-telemetry-"));
|
||||
const configPath = makeConfigPath(root, false);
|
||||
process.env.PAPERCLIP_HOME = path.join(root, "home");
|
||||
process.env.PAPERCLIP_INSTANCE_ID = "telemetry-test";
|
||||
|
||||
const { initTelemetryFromConfigFile } = await import("../telemetry.js");
|
||||
const client = initTelemetryFromConfigFile(configPath);
|
||||
|
||||
expect(client).toBeNull();
|
||||
expect(fs.existsSync(path.join(root, "home", "instances", "telemetry-test", "telemetry", "state.json"))).toBe(false);
|
||||
});
|
||||
|
||||
it("creates telemetry state only after the first event is tracked", async () => {
|
||||
const root = fs.mkdtempSync(path.join(os.tmpdir(), "paperclip-cli-telemetry-"));
|
||||
process.env.PAPERCLIP_HOME = path.join(root, "home");
|
||||
process.env.PAPERCLIP_INSTANCE_ID = "telemetry-test";
|
||||
|
||||
const { initTelemetry, flushTelemetry } = await import("../telemetry.js");
|
||||
const client = initTelemetry({ enabled: true });
|
||||
const statePath = path.join(root, "home", "instances", "telemetry-test", "telemetry", "state.json");
|
||||
|
||||
expect(client).not.toBeNull();
|
||||
expect(fs.existsSync(statePath)).toBe(false);
|
||||
|
||||
client!.track("install.started", { setupMode: "quickstart" });
|
||||
|
||||
expect(fs.existsSync(statePath)).toBe(true);
|
||||
|
||||
await flushTelemetry();
|
||||
});
|
||||
});
|
||||
@@ -75,6 +75,9 @@ function buildSourceConfig(): PaperclipConfig {
|
||||
publicBaseUrl: "http://127.0.0.1:3100",
|
||||
disableSignUp: false,
|
||||
},
|
||||
telemetry: {
|
||||
enabled: true,
|
||||
},
|
||||
storage: {
|
||||
provider: "local_disk",
|
||||
localDisk: {
|
||||
@@ -344,6 +347,87 @@ describe("worktree helpers", () => {
|
||||
}
|
||||
});
|
||||
|
||||
it("avoids ports already claimed by sibling worktree instance configs", async () => {
|
||||
const tempRoot = fs.mkdtempSync(path.join(os.tmpdir(), "paperclip-worktree-claimed-ports-"));
|
||||
const repoRoot = path.join(tempRoot, "repo");
|
||||
const homeDir = path.join(tempRoot, ".paperclip-worktrees");
|
||||
const siblingInstanceRoot = path.join(homeDir, "instances", "existing-worktree");
|
||||
const originalCwd = process.cwd();
|
||||
|
||||
try {
|
||||
fs.mkdirSync(repoRoot, { recursive: true });
|
||||
fs.mkdirSync(siblingInstanceRoot, { recursive: true });
|
||||
fs.writeFileSync(
|
||||
path.join(siblingInstanceRoot, "config.json"),
|
||||
JSON.stringify(
|
||||
{
|
||||
...buildSourceConfig(),
|
||||
database: {
|
||||
mode: "embedded-postgres",
|
||||
embeddedPostgresDataDir: path.join(siblingInstanceRoot, "db"),
|
||||
embeddedPostgresPort: 54330,
|
||||
backup: {
|
||||
enabled: true,
|
||||
intervalMinutes: 60,
|
||||
retentionDays: 30,
|
||||
dir: path.join(siblingInstanceRoot, "backups"),
|
||||
},
|
||||
},
|
||||
logging: {
|
||||
mode: "file",
|
||||
logDir: path.join(siblingInstanceRoot, "logs"),
|
||||
},
|
||||
server: {
|
||||
deploymentMode: "authenticated",
|
||||
exposure: "private",
|
||||
host: "127.0.0.1",
|
||||
port: 3101,
|
||||
allowedHostnames: ["localhost"],
|
||||
serveUi: true,
|
||||
},
|
||||
storage: {
|
||||
provider: "local_disk",
|
||||
localDisk: {
|
||||
baseDir: path.join(siblingInstanceRoot, "storage"),
|
||||
},
|
||||
s3: {
|
||||
bucket: "paperclip",
|
||||
region: "us-east-1",
|
||||
prefix: "",
|
||||
forcePathStyle: false,
|
||||
},
|
||||
},
|
||||
secrets: {
|
||||
provider: "local_encrypted",
|
||||
strictMode: false,
|
||||
localEncrypted: {
|
||||
keyFilePath: path.join(siblingInstanceRoot, "secrets", "master.key"),
|
||||
},
|
||||
},
|
||||
},
|
||||
null,
|
||||
2,
|
||||
) + "\n",
|
||||
);
|
||||
|
||||
process.chdir(repoRoot);
|
||||
await worktreeInitCommand({
|
||||
seed: false,
|
||||
fromConfig: path.join(tempRoot, "missing", "config.json"),
|
||||
home: homeDir,
|
||||
});
|
||||
|
||||
const config = JSON.parse(fs.readFileSync(path.join(repoRoot, ".paperclip", "config.json"), "utf8"));
|
||||
expect(config.server.port).toBeGreaterThan(3101);
|
||||
expect(config.database.embeddedPostgresPort).not.toBe(54330);
|
||||
expect(config.database.embeddedPostgresPort).not.toBe(config.server.port);
|
||||
expect(config.database.embeddedPostgresPort).toBeGreaterThan(54330);
|
||||
} finally {
|
||||
process.chdir(originalCwd);
|
||||
fs.rmSync(tempRoot, { recursive: true, force: true });
|
||||
}
|
||||
});
|
||||
|
||||
it("defaults the seed source config to the current repo-local Paperclip config", () => {
|
||||
const tempRoot = fs.mkdtempSync(path.join(os.tmpdir(), "paperclip-worktree-source-config-"));
|
||||
const repoRoot = path.join(tempRoot, "repo");
|
||||
|
||||
@@ -13,6 +13,26 @@ export class ApiRequestError extends Error {
|
||||
}
|
||||
}
|
||||
|
||||
export class ApiConnectionError extends Error {
|
||||
url: string;
|
||||
method: string;
|
||||
causeMessage?: string;
|
||||
|
||||
constructor(input: {
|
||||
apiBase: string;
|
||||
path: string;
|
||||
method: string;
|
||||
cause?: unknown;
|
||||
}) {
|
||||
const url = buildUrl(input.apiBase, input.path);
|
||||
const causeMessage = formatConnectionCause(input.cause);
|
||||
super(buildConnectionErrorMessage({ apiBase: input.apiBase, url, method: input.method, causeMessage }));
|
||||
this.url = url;
|
||||
this.method = input.method;
|
||||
this.causeMessage = causeMessage;
|
||||
}
|
||||
}
|
||||
|
||||
interface RequestOptions {
|
||||
ignoreNotFound?: boolean;
|
||||
}
|
||||
@@ -76,6 +96,7 @@ export class PaperclipApiClient {
|
||||
hasRetriedAuth = false,
|
||||
): Promise<T | null> {
|
||||
const url = buildUrl(this.apiBase, path);
|
||||
const method = String(init.method ?? "GET").toUpperCase();
|
||||
|
||||
const headers: Record<string, string> = {
|
||||
accept: "application/json",
|
||||
@@ -94,10 +115,20 @@ export class PaperclipApiClient {
|
||||
headers["x-paperclip-run-id"] = this.runId;
|
||||
}
|
||||
|
||||
const response = await fetch(url, {
|
||||
...init,
|
||||
headers,
|
||||
});
|
||||
let response: Response;
|
||||
try {
|
||||
response = await fetch(url, {
|
||||
...init,
|
||||
headers,
|
||||
});
|
||||
} catch (error) {
|
||||
throw new ApiConnectionError({
|
||||
apiBase: this.apiBase,
|
||||
path,
|
||||
method,
|
||||
cause: error,
|
||||
});
|
||||
}
|
||||
|
||||
if (opts?.ignoreNotFound && response.status === 404) {
|
||||
return null;
|
||||
@@ -108,7 +139,7 @@ export class PaperclipApiClient {
|
||||
if (!hasRetriedAuth && this.recoverAuth) {
|
||||
const recoveredToken = await this.recoverAuth({
|
||||
path,
|
||||
method: String(init.method ?? "GET").toUpperCase(),
|
||||
method,
|
||||
error: apiError,
|
||||
});
|
||||
if (recoveredToken) {
|
||||
@@ -166,6 +197,50 @@ async function toApiError(response: Response): Promise<ApiRequestError> {
|
||||
return new ApiRequestError(response.status, `Request failed with status ${response.status}`, undefined, parsed);
|
||||
}
|
||||
|
||||
function buildConnectionErrorMessage(input: {
|
||||
apiBase: string;
|
||||
url: string;
|
||||
method: string;
|
||||
causeMessage?: string;
|
||||
}): string {
|
||||
const healthUrl = buildHealthCheckUrl(input.url);
|
||||
const lines = [
|
||||
"Could not reach the Paperclip API.",
|
||||
"",
|
||||
`Request: ${input.method} ${input.url}`,
|
||||
];
|
||||
if (input.causeMessage) {
|
||||
lines.push(`Cause: ${input.causeMessage}`);
|
||||
}
|
||||
lines.push(
|
||||
"",
|
||||
"This usually means the Paperclip server is not running, the configured URL is wrong, or the request is being blocked before it reaches Paperclip.",
|
||||
"",
|
||||
"Try:",
|
||||
"- Start Paperclip with `pnpm dev` or `pnpm paperclipai run`.",
|
||||
`- Verify the server is reachable with \`curl ${healthUrl}\`.`,
|
||||
`- If Paperclip is running elsewhere, pass \`--api-base ${input.apiBase.replace(/\/+$/, "")}\` or set \`PAPERCLIP_API_URL\`.`,
|
||||
);
|
||||
return lines.join("\n");
|
||||
}
|
||||
|
||||
function buildHealthCheckUrl(requestUrl: string): string {
|
||||
const url = new URL(requestUrl);
|
||||
url.pathname = `${url.pathname.replace(/\/+$/, "").replace(/\/api(?:\/.*)?$/, "")}/api/health`;
|
||||
url.search = "";
|
||||
url.hash = "";
|
||||
return url.toString();
|
||||
}
|
||||
|
||||
function formatConnectionCause(error: unknown): string | undefined {
|
||||
if (!error) return undefined;
|
||||
if (error instanceof Error) {
|
||||
return error.message.trim() || error.name;
|
||||
}
|
||||
const message = String(error).trim();
|
||||
return message || undefined;
|
||||
}
|
||||
|
||||
function toStringRecord(headers: HeadersInit | undefined): Record<string, string> {
|
||||
if (!headers) return {};
|
||||
if (Array.isArray(headers)) {
|
||||
|
||||
@@ -5,12 +5,14 @@ import * as p from "@clack/prompts";
|
||||
import pc from "picocolors";
|
||||
import type {
|
||||
Company,
|
||||
FeedbackTrace,
|
||||
CompanyPortabilityFileEntry,
|
||||
CompanyPortabilityExportResult,
|
||||
CompanyPortabilityInclude,
|
||||
CompanyPortabilityPreviewResult,
|
||||
CompanyPortabilityImportResult,
|
||||
} from "@paperclipai/shared";
|
||||
import { getTelemetryClient, trackCompanyImported } from "../../telemetry.js";
|
||||
import { ApiRequestError } from "../../client/http.js";
|
||||
import { openUrl } from "../../client/board-auth.js";
|
||||
import { binaryContentTypeByExtension, readZipArchive } from "./zip.js";
|
||||
@@ -22,6 +24,11 @@ import {
|
||||
resolveCommandContext,
|
||||
type BaseClientOptions,
|
||||
} from "./common.js";
|
||||
import {
|
||||
buildFeedbackTraceQuery,
|
||||
normalizeFeedbackTraceExportFormat,
|
||||
serializeFeedbackTraces,
|
||||
} from "./feedback.js";
|
||||
|
||||
interface CompanyCommandOptions extends BaseClientOptions {}
|
||||
type CompanyDeleteSelectorMode = "auto" | "id" | "prefix";
|
||||
@@ -44,6 +51,20 @@ interface CompanyExportOptions extends BaseClientOptions {
|
||||
expandReferencedSkills?: boolean;
|
||||
}
|
||||
|
||||
interface CompanyFeedbackOptions extends BaseClientOptions {
|
||||
targetType?: string;
|
||||
vote?: string;
|
||||
status?: string;
|
||||
projectId?: string;
|
||||
issueId?: string;
|
||||
from?: string;
|
||||
to?: string;
|
||||
sharedOnly?: boolean;
|
||||
includePayload?: boolean;
|
||||
out?: string;
|
||||
format?: string;
|
||||
}
|
||||
|
||||
interface CompanyImportOptions extends BaseClientOptions {
|
||||
include?: string;
|
||||
target?: CompanyImportTargetMode;
|
||||
@@ -765,8 +786,15 @@ export function isHttpUrl(input: string): boolean {
|
||||
return /^https?:\/\//i.test(input.trim());
|
||||
}
|
||||
|
||||
export function isGithubUrl(input: string): boolean {
|
||||
return /^https?:\/\/github\.com\//i.test(input.trim());
|
||||
export function looksLikeRepoUrl(input: string): boolean {
|
||||
try {
|
||||
const url = new URL(input.trim());
|
||||
if (url.protocol !== "https:") return false;
|
||||
const segments = url.pathname.split("/").filter(Boolean);
|
||||
return segments.length >= 2;
|
||||
} catch {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
function isGithubSegment(input: string): boolean {
|
||||
@@ -797,13 +825,15 @@ function normalizeGithubImportPath(input: string | null | undefined): string | n
|
||||
}
|
||||
|
||||
function buildGithubImportUrl(input: {
|
||||
hostname?: string;
|
||||
owner: string;
|
||||
repo: string;
|
||||
ref?: string | null;
|
||||
path?: string | null;
|
||||
companyPath?: string | null;
|
||||
}): string {
|
||||
const url = new URL(`https://github.com/${input.owner}/${input.repo.replace(/\.git$/i, "")}`);
|
||||
const host = input.hostname || "github.com";
|
||||
const url = new URL(`https://${host}/${input.owner}/${input.repo.replace(/\.git$/i, "")}`);
|
||||
const ref = input.ref?.trim();
|
||||
if (ref) {
|
||||
url.searchParams.set("ref", ref);
|
||||
@@ -834,14 +864,15 @@ export function normalizeGithubImportSource(input: string, refOverride?: string)
|
||||
});
|
||||
}
|
||||
|
||||
if (!isGithubUrl(trimmed)) {
|
||||
throw new Error("GitHub source must be a github.com URL or owner/repo[/path] shorthand.");
|
||||
if (!looksLikeRepoUrl(trimmed)) {
|
||||
throw new Error("GitHub source must be a GitHub or GitHub Enterprise URL, or owner/repo[/path] shorthand.");
|
||||
}
|
||||
if (!ref) {
|
||||
return trimmed;
|
||||
}
|
||||
|
||||
const url = new URL(trimmed);
|
||||
const hostname = url.hostname;
|
||||
const parts = url.pathname.split("/").filter(Boolean);
|
||||
if (parts.length < 2) {
|
||||
throw new Error("Invalid GitHub URL.");
|
||||
@@ -852,18 +883,18 @@ export function normalizeGithubImportSource(input: string, refOverride?: string)
|
||||
const existingPath = normalizeGithubImportPath(url.searchParams.get("path"));
|
||||
const existingCompanyPath = normalizeGithubImportPath(url.searchParams.get("companyPath"));
|
||||
if (existingCompanyPath) {
|
||||
return buildGithubImportUrl({ owner, repo, ref, companyPath: existingCompanyPath });
|
||||
return buildGithubImportUrl({ hostname, owner, repo, ref, companyPath: existingCompanyPath });
|
||||
}
|
||||
if (existingPath) {
|
||||
return buildGithubImportUrl({ owner, repo, ref, path: existingPath });
|
||||
return buildGithubImportUrl({ hostname, owner, repo, ref, path: existingPath });
|
||||
}
|
||||
if (parts[2] === "tree") {
|
||||
return buildGithubImportUrl({ owner, repo, ref, path: parts.slice(4).join("/") });
|
||||
return buildGithubImportUrl({ hostname, owner, repo, ref, path: parts.slice(4).join("/") });
|
||||
}
|
||||
if (parts[2] === "blob") {
|
||||
return buildGithubImportUrl({ owner, repo, ref, companyPath: parts.slice(4).join("/") });
|
||||
return buildGithubImportUrl({ hostname, owner, repo, ref, companyPath: parts.slice(4).join("/") });
|
||||
}
|
||||
return buildGithubImportUrl({ owner, repo, ref });
|
||||
return buildGithubImportUrl({ hostname, owner, repo, ref });
|
||||
}
|
||||
|
||||
async function pathExists(inputPath: string): Promise<boolean> {
|
||||
@@ -1093,6 +1124,91 @@ export function registerCompanyCommands(program: Command): void {
|
||||
}),
|
||||
);
|
||||
|
||||
addCommonClientOptions(
|
||||
company
|
||||
.command("feedback:list")
|
||||
.description("List feedback traces for a company")
|
||||
.requiredOption("-C, --company-id <id>", "Company ID")
|
||||
.option("--target-type <type>", "Filter by target type")
|
||||
.option("--vote <vote>", "Filter by vote value")
|
||||
.option("--status <status>", "Filter by trace status")
|
||||
.option("--project-id <id>", "Filter by project ID")
|
||||
.option("--issue-id <id>", "Filter by issue ID")
|
||||
.option("--from <iso8601>", "Only include traces created at or after this timestamp")
|
||||
.option("--to <iso8601>", "Only include traces created at or before this timestamp")
|
||||
.option("--shared-only", "Only include traces eligible for sharing/export")
|
||||
.option("--include-payload", "Include stored payload snapshots in the response")
|
||||
.action(async (opts: CompanyFeedbackOptions) => {
|
||||
try {
|
||||
const ctx = resolveCommandContext(opts, { requireCompany: true });
|
||||
const traces = (await ctx.api.get<FeedbackTrace[]>(
|
||||
`/api/companies/${ctx.companyId}/feedback-traces${buildFeedbackTraceQuery(opts)}`,
|
||||
)) ?? [];
|
||||
if (ctx.json) {
|
||||
printOutput(traces, { json: true });
|
||||
return;
|
||||
}
|
||||
printOutput(
|
||||
traces.map((trace) => ({
|
||||
id: trace.id,
|
||||
issue: trace.issueIdentifier ?? trace.issueId,
|
||||
vote: trace.vote,
|
||||
status: trace.status,
|
||||
targetType: trace.targetType,
|
||||
target: trace.targetSummary.label,
|
||||
})),
|
||||
{ json: false },
|
||||
);
|
||||
} catch (err) {
|
||||
handleCommandError(err);
|
||||
}
|
||||
}),
|
||||
{ includeCompany: false },
|
||||
);
|
||||
|
||||
addCommonClientOptions(
|
||||
company
|
||||
.command("feedback:export")
|
||||
.description("Export feedback traces for a company")
|
||||
.requiredOption("-C, --company-id <id>", "Company ID")
|
||||
.option("--target-type <type>", "Filter by target type")
|
||||
.option("--vote <vote>", "Filter by vote value")
|
||||
.option("--status <status>", "Filter by trace status")
|
||||
.option("--project-id <id>", "Filter by project ID")
|
||||
.option("--issue-id <id>", "Filter by issue ID")
|
||||
.option("--from <iso8601>", "Only include traces created at or after this timestamp")
|
||||
.option("--to <iso8601>", "Only include traces created at or before this timestamp")
|
||||
.option("--shared-only", "Only include traces eligible for sharing/export")
|
||||
.option("--include-payload", "Include stored payload snapshots in the export")
|
||||
.option("--out <path>", "Write export to a file path instead of stdout")
|
||||
.option("--format <format>", "Export format: json or ndjson", "ndjson")
|
||||
.action(async (opts: CompanyFeedbackOptions) => {
|
||||
try {
|
||||
const ctx = resolveCommandContext(opts, { requireCompany: true });
|
||||
const traces = (await ctx.api.get<FeedbackTrace[]>(
|
||||
`/api/companies/${ctx.companyId}/feedback-traces${buildFeedbackTraceQuery(opts, opts.includePayload ?? true)}`,
|
||||
)) ?? [];
|
||||
const serialized = serializeFeedbackTraces(traces, opts.format);
|
||||
if (opts.out?.trim()) {
|
||||
await writeFile(opts.out, serialized, "utf8");
|
||||
if (ctx.json) {
|
||||
printOutput(
|
||||
{ out: opts.out, count: traces.length, format: normalizeFeedbackTraceExportFormat(opts.format) },
|
||||
{ json: true },
|
||||
);
|
||||
return;
|
||||
}
|
||||
console.log(`Wrote ${traces.length} feedback trace(s) to ${opts.out}`);
|
||||
return;
|
||||
}
|
||||
process.stdout.write(`${serialized}${serialized.endsWith("\n") ? "" : "\n"}`);
|
||||
} catch (err) {
|
||||
handleCommandError(err);
|
||||
}
|
||||
}),
|
||||
{ includeCompany: false },
|
||||
);
|
||||
|
||||
addCommonClientOptions(
|
||||
company
|
||||
.command("export")
|
||||
@@ -1208,13 +1324,13 @@ export function registerCompanyCommands(program: Command): void {
|
||||
| { type: "github"; url: string };
|
||||
|
||||
const treatAsLocalPath = !isHttpUrl(from) && await pathExists(from);
|
||||
const isGithubSource = isGithubUrl(from) || (isGithubShorthand(from) && !treatAsLocalPath);
|
||||
const isGithubSource = looksLikeRepoUrl(from) || (isGithubShorthand(from) && !treatAsLocalPath);
|
||||
|
||||
if (isHttpUrl(from) || isGithubSource) {
|
||||
if (!isGithubUrl(from) && !isGithubShorthand(from)) {
|
||||
if (!looksLikeRepoUrl(from) && !isGithubShorthand(from)) {
|
||||
throw new Error(
|
||||
"Only GitHub URLs and local paths are supported for import. " +
|
||||
"Generic HTTP URLs are not supported. Use a GitHub URL (https://github.com/...) or a local directory path.",
|
||||
"Generic HTTP URLs are not supported. Use a GitHub or GitHub Enterprise URL (https://github.com/... or https://ghe.example.com/...) or a local directory path.",
|
||||
);
|
||||
}
|
||||
sourcePayload = { type: "github", url: normalizeGithubImportSource(from, opts.ref) };
|
||||
@@ -1325,6 +1441,12 @@ export function registerCompanyCommands(program: Command): void {
|
||||
if (!imported) {
|
||||
throw new Error("Import request returned no data.");
|
||||
}
|
||||
const tc = getTelemetryClient();
|
||||
if (tc) {
|
||||
const isPrivate = sourcePayload.type !== "github";
|
||||
const sourceRef = sourcePayload.type === "github" ? sourcePayload.url : from;
|
||||
trackCompanyImported(tc, { sourceType: sourcePayload.type, sourceRef, isPrivate });
|
||||
}
|
||||
let companyUrl: string | undefined;
|
||||
if (!ctx.json) {
|
||||
try {
|
||||
|
||||
645
cli/src/commands/client/feedback.ts
Normal file
645
cli/src/commands/client/feedback.ts
Normal file
@@ -0,0 +1,645 @@
|
||||
import { mkdir, readdir, readFile, stat, writeFile } from "node:fs/promises";
|
||||
import path from "node:path";
|
||||
import pc from "picocolors";
|
||||
import { Command } from "commander";
|
||||
import type { Company, FeedbackTrace, FeedbackTraceBundle } from "@paperclipai/shared";
|
||||
import {
|
||||
addCommonClientOptions,
|
||||
handleCommandError,
|
||||
printOutput,
|
||||
resolveCommandContext,
|
||||
type BaseClientOptions,
|
||||
type ResolvedClientContext,
|
||||
} from "./common.js";
|
||||
|
||||
interface FeedbackFilterOptions extends BaseClientOptions {
|
||||
targetType?: string;
|
||||
vote?: string;
|
||||
status?: string;
|
||||
projectId?: string;
|
||||
issueId?: string;
|
||||
from?: string;
|
||||
to?: string;
|
||||
sharedOnly?: boolean;
|
||||
}
|
||||
|
||||
export interface FeedbackTraceQueryOptions {
|
||||
targetType?: string;
|
||||
vote?: string;
|
||||
status?: string;
|
||||
projectId?: string;
|
||||
issueId?: string;
|
||||
from?: string;
|
||||
to?: string;
|
||||
sharedOnly?: boolean;
|
||||
}
|
||||
|
||||
interface FeedbackReportOptions extends FeedbackFilterOptions {
|
||||
payloads?: boolean;
|
||||
}
|
||||
|
||||
interface FeedbackExportOptions extends FeedbackFilterOptions {
|
||||
out?: string;
|
||||
}
|
||||
|
||||
interface FeedbackSummary {
|
||||
total: number;
|
||||
thumbsUp: number;
|
||||
thumbsDown: number;
|
||||
withReason: number;
|
||||
statuses: Record<string, number>;
|
||||
}
|
||||
|
||||
interface FeedbackExportManifest {
|
||||
exportedAt: string;
|
||||
serverUrl: string;
|
||||
companyId: string;
|
||||
summary: FeedbackSummary & {
|
||||
uniqueIssues: number;
|
||||
issues: string[];
|
||||
};
|
||||
files: {
|
||||
votes: string[];
|
||||
traces: string[];
|
||||
fullTraces: string[];
|
||||
zip: string;
|
||||
};
|
||||
}
|
||||
|
||||
interface FeedbackExportResult {
|
||||
outputDir: string;
|
||||
zipPath: string;
|
||||
manifest: FeedbackExportManifest;
|
||||
}
|
||||
|
||||
export function registerFeedbackCommands(program: Command): void {
|
||||
const feedback = program.command("feedback").description("Inspect and export local feedback traces");
|
||||
|
||||
addCommonClientOptions(
|
||||
feedback
|
||||
.command("report")
|
||||
.description("Render a terminal report for company feedback traces")
|
||||
.option("-C, --company-id <id>", "Company ID (overrides context default)")
|
||||
.option("--target-type <type>", "Filter by target type")
|
||||
.option("--vote <vote>", "Filter by vote value")
|
||||
.option("--status <status>", "Filter by trace status")
|
||||
.option("--project-id <id>", "Filter by project ID")
|
||||
.option("--issue-id <id>", "Filter by issue ID")
|
||||
.option("--from <iso8601>", "Only include traces created at or after this timestamp")
|
||||
.option("--to <iso8601>", "Only include traces created at or before this timestamp")
|
||||
.option("--shared-only", "Only include traces eligible for sharing/export")
|
||||
.option("--payloads", "Include raw payload dumps in the terminal report", false)
|
||||
.action(async (opts: FeedbackReportOptions) => {
|
||||
try {
|
||||
const ctx = resolveCommandContext(opts);
|
||||
const companyId = await resolveFeedbackCompanyId(ctx, opts.companyId);
|
||||
const traces = await fetchCompanyFeedbackTraces(ctx, companyId, opts);
|
||||
const summary = summarizeFeedbackTraces(traces);
|
||||
if (ctx.json) {
|
||||
printOutput(
|
||||
{
|
||||
apiBase: ctx.api.apiBase,
|
||||
companyId,
|
||||
summary,
|
||||
traces,
|
||||
},
|
||||
{ json: true },
|
||||
);
|
||||
return;
|
||||
}
|
||||
console.log(renderFeedbackReport({
|
||||
apiBase: ctx.api.apiBase,
|
||||
companyId,
|
||||
traces,
|
||||
summary,
|
||||
includePayloads: Boolean(opts.payloads),
|
||||
}));
|
||||
} catch (err) {
|
||||
handleCommandError(err);
|
||||
}
|
||||
}),
|
||||
{ includeCompany: false },
|
||||
);
|
||||
|
||||
addCommonClientOptions(
|
||||
feedback
|
||||
.command("export")
|
||||
.description("Export feedback votes and raw trace bundles into a folder plus zip archive")
|
||||
.option("-C, --company-id <id>", "Company ID (overrides context default)")
|
||||
.option("--target-type <type>", "Filter by target type")
|
||||
.option("--vote <vote>", "Filter by vote value")
|
||||
.option("--status <status>", "Filter by trace status")
|
||||
.option("--project-id <id>", "Filter by project ID")
|
||||
.option("--issue-id <id>", "Filter by issue ID")
|
||||
.option("--from <iso8601>", "Only include traces created at or after this timestamp")
|
||||
.option("--to <iso8601>", "Only include traces created at or before this timestamp")
|
||||
.option("--shared-only", "Only include traces eligible for sharing/export")
|
||||
.option("--out <path>", "Output directory (default: ./feedback-export-<timestamp>)")
|
||||
.action(async (opts: FeedbackExportOptions) => {
|
||||
try {
|
||||
const ctx = resolveCommandContext(opts);
|
||||
const companyId = await resolveFeedbackCompanyId(ctx, opts.companyId);
|
||||
const traces = await fetchCompanyFeedbackTraces(ctx, companyId, opts);
|
||||
const outputDir = path.resolve(opts.out?.trim() || defaultFeedbackExportDirName());
|
||||
const exported = await writeFeedbackExportBundle({
|
||||
apiBase: ctx.api.apiBase,
|
||||
companyId,
|
||||
traces,
|
||||
outputDir,
|
||||
traceBundleFetcher: (trace) => fetchFeedbackTraceBundle(ctx, trace.id),
|
||||
});
|
||||
if (ctx.json) {
|
||||
printOutput(
|
||||
{
|
||||
companyId,
|
||||
outputDir: exported.outputDir,
|
||||
zipPath: exported.zipPath,
|
||||
summary: exported.manifest.summary,
|
||||
},
|
||||
{ json: true },
|
||||
);
|
||||
return;
|
||||
}
|
||||
console.log(renderFeedbackExportSummary(exported));
|
||||
} catch (err) {
|
||||
handleCommandError(err);
|
||||
}
|
||||
}),
|
||||
{ includeCompany: false },
|
||||
);
|
||||
}
|
||||
|
||||
export async function resolveFeedbackCompanyId(
|
||||
ctx: ResolvedClientContext,
|
||||
explicitCompanyId?: string,
|
||||
): Promise<string> {
|
||||
const direct = explicitCompanyId?.trim() || ctx.companyId?.trim();
|
||||
if (direct) return direct;
|
||||
const companies = (await ctx.api.get<Company[]>("/api/companies")) ?? [];
|
||||
const companyId = companies[0]?.id?.trim();
|
||||
if (!companyId) {
|
||||
throw new Error(
|
||||
"Company ID is required. Pass --company-id, set PAPERCLIP_COMPANY_ID, or configure a CLI context default.",
|
||||
);
|
||||
}
|
||||
return companyId;
|
||||
}
|
||||
|
||||
export function buildFeedbackTraceQuery(opts: FeedbackTraceQueryOptions, includePayload = true): string {
|
||||
const params = new URLSearchParams();
|
||||
if (opts.targetType) params.set("targetType", opts.targetType);
|
||||
if (opts.vote) params.set("vote", opts.vote);
|
||||
if (opts.status) params.set("status", opts.status);
|
||||
if (opts.projectId) params.set("projectId", opts.projectId);
|
||||
if (opts.issueId) params.set("issueId", opts.issueId);
|
||||
if (opts.from) params.set("from", opts.from);
|
||||
if (opts.to) params.set("to", opts.to);
|
||||
if (opts.sharedOnly) params.set("sharedOnly", "true");
|
||||
if (includePayload) params.set("includePayload", "true");
|
||||
const query = params.toString();
|
||||
return query ? `?${query}` : "";
|
||||
}
|
||||
|
||||
export function normalizeFeedbackTraceExportFormat(value: string | undefined): "json" | "ndjson" {
|
||||
if (!value || value === "ndjson") return "ndjson";
|
||||
if (value === "json") return "json";
|
||||
throw new Error(`Unsupported export format: ${value}`);
|
||||
}
|
||||
|
||||
export function serializeFeedbackTraces(traces: FeedbackTrace[], format: string | undefined): string {
|
||||
if (normalizeFeedbackTraceExportFormat(format) === "json") {
|
||||
return JSON.stringify(traces, null, 2);
|
||||
}
|
||||
return traces.map((trace) => JSON.stringify(trace)).join("\n");
|
||||
}
|
||||
|
||||
export async function fetchCompanyFeedbackTraces(
|
||||
ctx: ResolvedClientContext,
|
||||
companyId: string,
|
||||
opts: FeedbackFilterOptions,
|
||||
): Promise<FeedbackTrace[]> {
|
||||
return (
|
||||
(await ctx.api.get<FeedbackTrace[]>(
|
||||
`/api/companies/${companyId}/feedback-traces${buildFeedbackTraceQuery(opts, true)}`,
|
||||
)) ?? []
|
||||
);
|
||||
}
|
||||
|
||||
export async function fetchFeedbackTraceBundle(
|
||||
ctx: ResolvedClientContext,
|
||||
traceId: string,
|
||||
): Promise<FeedbackTraceBundle> {
|
||||
const bundle = await ctx.api.get<FeedbackTraceBundle>(`/api/feedback-traces/${traceId}/bundle`);
|
||||
if (!bundle) {
|
||||
throw new Error(`Feedback trace bundle ${traceId} not found`);
|
||||
}
|
||||
return bundle;
|
||||
}
|
||||
|
||||
export function summarizeFeedbackTraces(traces: FeedbackTrace[]): FeedbackSummary {
|
||||
const statuses: Record<string, number> = {};
|
||||
let thumbsUp = 0;
|
||||
let thumbsDown = 0;
|
||||
let withReason = 0;
|
||||
|
||||
for (const trace of traces) {
|
||||
if (trace.vote === "up") thumbsUp += 1;
|
||||
if (trace.vote === "down") thumbsDown += 1;
|
||||
if (readFeedbackReason(trace)) withReason += 1;
|
||||
statuses[trace.status] = (statuses[trace.status] ?? 0) + 1;
|
||||
}
|
||||
|
||||
return {
|
||||
total: traces.length,
|
||||
thumbsUp,
|
||||
thumbsDown,
|
||||
withReason,
|
||||
statuses,
|
||||
};
|
||||
}
|
||||
|
||||
export function renderFeedbackReport(input: {
|
||||
apiBase: string;
|
||||
companyId: string;
|
||||
traces: FeedbackTrace[];
|
||||
summary: FeedbackSummary;
|
||||
includePayloads: boolean;
|
||||
}): string {
|
||||
const lines: string[] = [];
|
||||
lines.push("");
|
||||
lines.push(pc.bold(pc.magenta("Paperclip Feedback Report")));
|
||||
lines.push(pc.dim(new Date().toISOString()));
|
||||
lines.push(horizontalRule());
|
||||
lines.push(`${pc.dim("Server:")} ${input.apiBase}`);
|
||||
lines.push(`${pc.dim("Company:")} ${input.companyId}`);
|
||||
lines.push("");
|
||||
|
||||
if (input.traces.length === 0) {
|
||||
lines.push(pc.yellow("[!!] No feedback traces found."));
|
||||
lines.push("");
|
||||
return lines.join("\n");
|
||||
}
|
||||
|
||||
lines.push(pc.bold(pc.cyan("Summary")));
|
||||
lines.push(horizontalRule());
|
||||
lines.push(` ${pc.green(pc.bold(String(input.summary.thumbsUp)))} thumbs up`);
|
||||
lines.push(` ${pc.red(pc.bold(String(input.summary.thumbsDown)))} thumbs down`);
|
||||
lines.push(` ${pc.yellow(pc.bold(String(input.summary.withReason)))} downvotes with a reason`);
|
||||
lines.push(` ${pc.bold(String(input.summary.total))} total traces`);
|
||||
lines.push("");
|
||||
lines.push(pc.dim("Export status:"));
|
||||
for (const status of ["pending", "sent", "local_only", "failed"]) {
|
||||
lines.push(` ${padRight(status, 10)} ${input.summary.statuses[status] ?? 0}`);
|
||||
}
|
||||
lines.push("");
|
||||
lines.push(pc.bold(pc.cyan("Trace Details")));
|
||||
lines.push(horizontalRule());
|
||||
|
||||
for (const trace of input.traces) {
|
||||
const voteColor = trace.vote === "up" ? pc.green : pc.red;
|
||||
const voteIcon = trace.vote === "up" ? "^" : "v";
|
||||
const issueRef = trace.issueIdentifier ?? trace.issueId;
|
||||
const label = trace.targetSummary.label?.trim() || trace.targetType;
|
||||
const excerpt = compactText(trace.targetSummary.excerpt);
|
||||
const reason = readFeedbackReason(trace);
|
||||
lines.push(
|
||||
` ${voteColor(voteIcon)} ${pc.bold(issueRef)} ${pc.dim(compactText(trace.issueTitle, 64))}`,
|
||||
);
|
||||
lines.push(
|
||||
` ${pc.dim("Trace:")} ${trace.id.slice(0, 8)} ${pc.dim("Status:")} ${trace.status} ${pc.dim("Date:")} ${formatTimestamp(trace.createdAt)}`,
|
||||
);
|
||||
lines.push(` ${pc.dim("Target:")} ${label}`);
|
||||
if (excerpt) {
|
||||
lines.push(` ${pc.dim("Excerpt:")} ${excerpt}`);
|
||||
}
|
||||
if (reason) {
|
||||
lines.push(` ${pc.yellow(pc.bold("Reason:"))} ${pc.yellow(reason)}`);
|
||||
}
|
||||
lines.push("");
|
||||
}
|
||||
|
||||
if (input.includePayloads) {
|
||||
lines.push(pc.bold(pc.cyan("Raw Payloads")));
|
||||
lines.push(horizontalRule());
|
||||
for (const trace of input.traces) {
|
||||
if (!trace.payloadSnapshot) continue;
|
||||
const issueRef = trace.issueIdentifier ?? trace.issueId;
|
||||
lines.push(` ${pc.bold(`${issueRef} (${trace.id.slice(0, 8)})`)}`);
|
||||
const body = JSON.stringify(trace.payloadSnapshot, null, 2)?.split("\n") ?? [];
|
||||
for (const line of body) {
|
||||
lines.push(` ${pc.dim(line)}`);
|
||||
}
|
||||
lines.push("");
|
||||
}
|
||||
}
|
||||
|
||||
lines.push(horizontalRule());
|
||||
lines.push(pc.dim(`Report complete. ${input.traces.length} trace(s) displayed.`));
|
||||
lines.push("");
|
||||
return lines.join("\n");
|
||||
}
|
||||
|
||||
export async function writeFeedbackExportBundle(input: {
|
||||
apiBase: string;
|
||||
companyId: string;
|
||||
traces: FeedbackTrace[];
|
||||
outputDir: string;
|
||||
traceBundleFetcher?: (trace: FeedbackTrace) => Promise<FeedbackTraceBundle>;
|
||||
}): Promise<FeedbackExportResult> {
|
||||
await ensureEmptyOutputDirectory(input.outputDir);
|
||||
await mkdir(path.join(input.outputDir, "votes"), { recursive: true });
|
||||
await mkdir(path.join(input.outputDir, "traces"), { recursive: true });
|
||||
await mkdir(path.join(input.outputDir, "full-traces"), { recursive: true });
|
||||
|
||||
const summary = summarizeFeedbackTraces(input.traces);
|
||||
const voteFiles: string[] = [];
|
||||
const traceFiles: string[] = [];
|
||||
const fullTraceDirs: string[] = [];
|
||||
const fullTraceFiles: string[] = [];
|
||||
const issueSet = new Set<string>();
|
||||
|
||||
for (const trace of input.traces) {
|
||||
const issueRef = sanitizeFileSegment(trace.issueIdentifier ?? trace.issueId);
|
||||
const voteRecord = buildFeedbackVoteRecord(trace);
|
||||
const voteFileName = `${issueRef}-${trace.feedbackVoteId.slice(0, 8)}.json`;
|
||||
const traceFileName = `${issueRef}-${trace.id.slice(0, 8)}.json`;
|
||||
voteFiles.push(voteFileName);
|
||||
traceFiles.push(traceFileName);
|
||||
issueSet.add(trace.issueIdentifier ?? trace.issueId);
|
||||
await writeFile(
|
||||
path.join(input.outputDir, "votes", voteFileName),
|
||||
`${JSON.stringify(voteRecord, null, 2)}\n`,
|
||||
"utf8",
|
||||
);
|
||||
await writeFile(
|
||||
path.join(input.outputDir, "traces", traceFileName),
|
||||
`${JSON.stringify(trace, null, 2)}\n`,
|
||||
"utf8",
|
||||
);
|
||||
|
||||
if (input.traceBundleFetcher) {
|
||||
const bundle = await input.traceBundleFetcher(trace);
|
||||
const bundleDirName = `${issueRef}-${trace.id.slice(0, 8)}`;
|
||||
const bundleDir = path.join(input.outputDir, "full-traces", bundleDirName);
|
||||
await mkdir(bundleDir, { recursive: true });
|
||||
fullTraceDirs.push(bundleDirName);
|
||||
await writeFile(
|
||||
path.join(bundleDir, "bundle.json"),
|
||||
`${JSON.stringify(bundle, null, 2)}\n`,
|
||||
"utf8",
|
||||
);
|
||||
fullTraceFiles.push(path.posix.join("full-traces", bundleDirName, "bundle.json"));
|
||||
for (const file of bundle.files) {
|
||||
const targetPath = path.join(bundleDir, file.path);
|
||||
await mkdir(path.dirname(targetPath), { recursive: true });
|
||||
await writeFile(targetPath, file.contents, "utf8");
|
||||
fullTraceFiles.push(path.posix.join("full-traces", bundleDirName, file.path.replace(/\\/g, "/")));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const zipPath = `${input.outputDir}.zip`;
|
||||
const manifest: FeedbackExportManifest = {
|
||||
exportedAt: new Date().toISOString(),
|
||||
serverUrl: input.apiBase,
|
||||
companyId: input.companyId,
|
||||
summary: {
|
||||
...summary,
|
||||
uniqueIssues: issueSet.size,
|
||||
issues: Array.from(issueSet).sort((left, right) => left.localeCompare(right)),
|
||||
},
|
||||
files: {
|
||||
votes: voteFiles.slice().sort((left, right) => left.localeCompare(right)),
|
||||
traces: traceFiles.slice().sort((left, right) => left.localeCompare(right)),
|
||||
fullTraces: fullTraceDirs.slice().sort((left, right) => left.localeCompare(right)),
|
||||
zip: path.basename(zipPath),
|
||||
},
|
||||
};
|
||||
|
||||
await writeFile(
|
||||
path.join(input.outputDir, "index.json"),
|
||||
`${JSON.stringify(manifest, null, 2)}\n`,
|
||||
"utf8",
|
||||
);
|
||||
const archiveFiles = await collectJsonFilesForArchive(input.outputDir, [
|
||||
"index.json",
|
||||
...manifest.files.votes.map((file) => path.posix.join("votes", file)),
|
||||
...manifest.files.traces.map((file) => path.posix.join("traces", file)),
|
||||
...fullTraceFiles,
|
||||
]);
|
||||
await writeFile(zipPath, createStoredZipArchive(archiveFiles, path.basename(input.outputDir)));
|
||||
|
||||
return {
|
||||
outputDir: input.outputDir,
|
||||
zipPath,
|
||||
manifest,
|
||||
};
|
||||
}
|
||||
|
||||
export function renderFeedbackExportSummary(exported: FeedbackExportResult): string {
|
||||
const lines: string[] = [];
|
||||
lines.push("");
|
||||
lines.push(pc.bold(pc.magenta("Paperclip Feedback Export")));
|
||||
lines.push(pc.dim(exported.manifest.exportedAt));
|
||||
lines.push(horizontalRule());
|
||||
lines.push(`${pc.dim("Company:")} ${exported.manifest.companyId}`);
|
||||
lines.push(`${pc.dim("Output:")} ${exported.outputDir}`);
|
||||
lines.push(`${pc.dim("Archive:")} ${exported.zipPath}`);
|
||||
lines.push("");
|
||||
lines.push(pc.bold("Export Summary"));
|
||||
lines.push(horizontalRule());
|
||||
lines.push(` ${pc.green(pc.bold(String(exported.manifest.summary.thumbsUp)))} thumbs up`);
|
||||
lines.push(` ${pc.red(pc.bold(String(exported.manifest.summary.thumbsDown)))} thumbs down`);
|
||||
lines.push(` ${pc.yellow(pc.bold(String(exported.manifest.summary.withReason)))} with reason`);
|
||||
lines.push(` ${pc.bold(String(exported.manifest.summary.uniqueIssues))} unique issues`);
|
||||
lines.push("");
|
||||
lines.push(pc.dim("Files:"));
|
||||
lines.push(` ${path.join(exported.outputDir, "index.json")}`);
|
||||
lines.push(` ${path.join(exported.outputDir, "votes")} (${exported.manifest.files.votes.length} files)`);
|
||||
lines.push(` ${path.join(exported.outputDir, "traces")} (${exported.manifest.files.traces.length} files)`);
|
||||
lines.push(` ${path.join(exported.outputDir, "full-traces")} (${exported.manifest.files.fullTraces.length} bundles)`);
|
||||
lines.push(` ${exported.zipPath}`);
|
||||
lines.push("");
|
||||
return lines.join("\n");
|
||||
}
|
||||
|
||||
function readFeedbackReason(trace: FeedbackTrace): string | null {
|
||||
const payload = asRecord(trace.payloadSnapshot);
|
||||
const vote = asRecord(payload?.vote);
|
||||
const reason = vote?.reason;
|
||||
return typeof reason === "string" && reason.trim() ? reason.trim() : null;
|
||||
}
|
||||
|
||||
function buildFeedbackVoteRecord(trace: FeedbackTrace) {
|
||||
return {
|
||||
voteId: trace.feedbackVoteId,
|
||||
traceId: trace.id,
|
||||
issueId: trace.issueId,
|
||||
issueIdentifier: trace.issueIdentifier,
|
||||
issueTitle: trace.issueTitle,
|
||||
vote: trace.vote,
|
||||
targetType: trace.targetType,
|
||||
targetId: trace.targetId,
|
||||
targetSummary: trace.targetSummary,
|
||||
status: trace.status,
|
||||
consentVersion: trace.consentVersion,
|
||||
createdAt: trace.createdAt,
|
||||
updatedAt: trace.updatedAt,
|
||||
reason: readFeedbackReason(trace),
|
||||
};
|
||||
}
|
||||
|
||||
function asRecord(value: unknown): Record<string, unknown> | null {
|
||||
if (!value || typeof value !== "object" || Array.isArray(value)) return null;
|
||||
return value as Record<string, unknown>;
|
||||
}
|
||||
|
||||
function compactText(value: string | null | undefined, maxLength = 88): string | null {
|
||||
if (!value) return null;
|
||||
const compact = value.replace(/\s+/g, " ").trim();
|
||||
if (!compact) return null;
|
||||
if (compact.length <= maxLength) return compact;
|
||||
return `${compact.slice(0, maxLength - 3)}...`;
|
||||
}
|
||||
|
||||
function formatTimestamp(value: unknown): string {
|
||||
if (value instanceof Date) return value.toISOString().slice(0, 19).replace("T", " ");
|
||||
if (typeof value === "string") return value.slice(0, 19).replace("T", " ");
|
||||
return "-";
|
||||
}
|
||||
|
||||
function horizontalRule(): string {
|
||||
return pc.dim("-".repeat(72));
|
||||
}
|
||||
|
||||
function padRight(value: string, width: number): string {
|
||||
return `${value}${" ".repeat(Math.max(0, width - value.length))}`;
|
||||
}
|
||||
|
||||
function defaultFeedbackExportDirName(): string {
|
||||
const iso = new Date().toISOString().replace(/[-:]/g, "").replace(/\.\d{3}Z$/, "Z");
|
||||
return `feedback-export-${iso}`;
|
||||
}
|
||||
|
||||
async function ensureEmptyOutputDirectory(outputDir: string): Promise<void> {
|
||||
try {
|
||||
const info = await stat(outputDir);
|
||||
if (!info.isDirectory()) {
|
||||
throw new Error(`Output path already exists and is not a directory: ${outputDir}`);
|
||||
}
|
||||
const entries = await readdir(outputDir);
|
||||
if (entries.length > 0) {
|
||||
throw new Error(`Output directory already exists and is not empty: ${outputDir}`);
|
||||
}
|
||||
} catch (error) {
|
||||
const message = error instanceof Error ? error.message : "";
|
||||
if (/ENOENT/.test(message)) {
|
||||
await mkdir(outputDir, { recursive: true });
|
||||
return;
|
||||
}
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
async function collectJsonFilesForArchive(
|
||||
outputDir: string,
|
||||
relativePaths: string[],
|
||||
): Promise<Record<string, string>> {
|
||||
const files: Record<string, string> = {};
|
||||
for (const relativePath of relativePaths) {
|
||||
const normalized = relativePath.replace(/\\/g, "/");
|
||||
files[normalized] = await readFile(path.join(outputDir, normalized), "utf8");
|
||||
}
|
||||
return files;
|
||||
}
|
||||
|
||||
function sanitizeFileSegment(value: string): string {
|
||||
return value.replace(/[^a-zA-Z0-9._-]+/g, "-").replace(/^-+|-+$/g, "") || "feedback";
|
||||
}
|
||||
|
||||
function writeUint16(target: Uint8Array, offset: number, value: number) {
|
||||
target[offset] = value & 0xff;
|
||||
target[offset + 1] = (value >>> 8) & 0xff;
|
||||
}
|
||||
|
||||
function writeUint32(target: Uint8Array, offset: number, value: number) {
|
||||
target[offset] = value & 0xff;
|
||||
target[offset + 1] = (value >>> 8) & 0xff;
|
||||
target[offset + 2] = (value >>> 16) & 0xff;
|
||||
target[offset + 3] = (value >>> 24) & 0xff;
|
||||
}
|
||||
|
||||
function crc32(bytes: Uint8Array) {
|
||||
let crc = 0xffffffff;
|
||||
for (const byte of bytes) {
|
||||
crc ^= byte;
|
||||
for (let bit = 0; bit < 8; bit += 1) {
|
||||
crc = (crc & 1) === 1 ? (crc >>> 1) ^ 0xedb88320 : crc >>> 1;
|
||||
}
|
||||
}
|
||||
return (crc ^ 0xffffffff) >>> 0;
|
||||
}
|
||||
|
||||
function createStoredZipArchive(files: Record<string, string>, rootPath: string): Uint8Array {
|
||||
const encoder = new TextEncoder();
|
||||
const localChunks: Uint8Array[] = [];
|
||||
const centralChunks: Uint8Array[] = [];
|
||||
let localOffset = 0;
|
||||
let entryCount = 0;
|
||||
|
||||
for (const [relativePath, content] of Object.entries(files).sort(([left], [right]) => left.localeCompare(right))) {
|
||||
const fileName = encoder.encode(`${rootPath}/${relativePath}`);
|
||||
const body = encoder.encode(content);
|
||||
const checksum = crc32(body);
|
||||
|
||||
const localHeader = new Uint8Array(30 + fileName.length);
|
||||
writeUint32(localHeader, 0, 0x04034b50);
|
||||
writeUint16(localHeader, 4, 20);
|
||||
writeUint16(localHeader, 6, 0x0800);
|
||||
writeUint16(localHeader, 8, 0);
|
||||
writeUint32(localHeader, 14, checksum);
|
||||
writeUint32(localHeader, 18, body.length);
|
||||
writeUint32(localHeader, 22, body.length);
|
||||
writeUint16(localHeader, 26, fileName.length);
|
||||
localHeader.set(fileName, 30);
|
||||
|
||||
const centralHeader = new Uint8Array(46 + fileName.length);
|
||||
writeUint32(centralHeader, 0, 0x02014b50);
|
||||
writeUint16(centralHeader, 4, 20);
|
||||
writeUint16(centralHeader, 6, 20);
|
||||
writeUint16(centralHeader, 8, 0x0800);
|
||||
writeUint16(centralHeader, 10, 0);
|
||||
writeUint32(centralHeader, 16, checksum);
|
||||
writeUint32(centralHeader, 20, body.length);
|
||||
writeUint32(centralHeader, 24, body.length);
|
||||
writeUint16(centralHeader, 28, fileName.length);
|
||||
writeUint32(centralHeader, 42, localOffset);
|
||||
centralHeader.set(fileName, 46);
|
||||
|
||||
localChunks.push(localHeader, body);
|
||||
centralChunks.push(centralHeader);
|
||||
localOffset += localHeader.length + body.length;
|
||||
entryCount += 1;
|
||||
}
|
||||
|
||||
const centralDirectoryLength = centralChunks.reduce((sum, chunk) => sum + chunk.length, 0);
|
||||
const archive = new Uint8Array(
|
||||
localChunks.reduce((sum, chunk) => sum + chunk.length, 0) + centralDirectoryLength + 22,
|
||||
);
|
||||
let offset = 0;
|
||||
for (const chunk of localChunks) {
|
||||
archive.set(chunk, offset);
|
||||
offset += chunk.length;
|
||||
}
|
||||
const centralDirectoryOffset = offset;
|
||||
for (const chunk of centralChunks) {
|
||||
archive.set(chunk, offset);
|
||||
offset += chunk.length;
|
||||
}
|
||||
writeUint32(archive, offset, 0x06054b50);
|
||||
writeUint16(archive, offset + 8, entryCount);
|
||||
writeUint16(archive, offset + 10, entryCount);
|
||||
writeUint32(archive, offset + 12, centralDirectoryLength);
|
||||
writeUint32(archive, offset + 16, centralDirectoryOffset);
|
||||
return archive;
|
||||
}
|
||||
@@ -1,8 +1,10 @@
|
||||
import { Command } from "commander";
|
||||
import { writeFile } from "node:fs/promises";
|
||||
import {
|
||||
addIssueCommentSchema,
|
||||
checkoutIssueSchema,
|
||||
createIssueSchema,
|
||||
type FeedbackTrace,
|
||||
updateIssueSchema,
|
||||
type Issue,
|
||||
type IssueComment,
|
||||
@@ -15,6 +17,11 @@ import {
|
||||
resolveCommandContext,
|
||||
type BaseClientOptions,
|
||||
} from "./common.js";
|
||||
import {
|
||||
buildFeedbackTraceQuery,
|
||||
normalizeFeedbackTraceExportFormat,
|
||||
serializeFeedbackTraces,
|
||||
} from "./feedback.js";
|
||||
|
||||
interface IssueBaseOptions extends BaseClientOptions {
|
||||
status?: string;
|
||||
@@ -61,6 +68,18 @@ interface IssueCheckoutOptions extends BaseClientOptions {
|
||||
expectedStatuses?: string;
|
||||
}
|
||||
|
||||
interface IssueFeedbackOptions extends BaseClientOptions {
|
||||
targetType?: string;
|
||||
vote?: string;
|
||||
status?: string;
|
||||
from?: string;
|
||||
to?: string;
|
||||
sharedOnly?: boolean;
|
||||
includePayload?: boolean;
|
||||
out?: string;
|
||||
format?: string;
|
||||
}
|
||||
|
||||
export function registerIssueCommands(program: Command): void {
|
||||
const issue = program.command("issue").description("Issue operations");
|
||||
|
||||
@@ -237,6 +256,85 @@ export function registerIssueCommands(program: Command): void {
|
||||
}),
|
||||
);
|
||||
|
||||
addCommonClientOptions(
|
||||
issue
|
||||
.command("feedback:list")
|
||||
.description("List feedback traces for an issue")
|
||||
.argument("<issueId>", "Issue ID")
|
||||
.option("--target-type <type>", "Filter by target type")
|
||||
.option("--vote <vote>", "Filter by vote value")
|
||||
.option("--status <status>", "Filter by trace status")
|
||||
.option("--from <iso8601>", "Only include traces created at or after this timestamp")
|
||||
.option("--to <iso8601>", "Only include traces created at or before this timestamp")
|
||||
.option("--shared-only", "Only include traces eligible for sharing/export")
|
||||
.option("--include-payload", "Include stored payload snapshots in the response")
|
||||
.action(async (issueId: string, opts: IssueFeedbackOptions) => {
|
||||
try {
|
||||
const ctx = resolveCommandContext(opts);
|
||||
const traces = (await ctx.api.get<FeedbackTrace[]>(
|
||||
`/api/issues/${issueId}/feedback-traces${buildFeedbackTraceQuery(opts)}`,
|
||||
)) ?? [];
|
||||
if (ctx.json) {
|
||||
printOutput(traces, { json: true });
|
||||
return;
|
||||
}
|
||||
printOutput(
|
||||
traces.map((trace) => ({
|
||||
id: trace.id,
|
||||
issue: trace.issueIdentifier ?? trace.issueId,
|
||||
vote: trace.vote,
|
||||
status: trace.status,
|
||||
targetType: trace.targetType,
|
||||
target: trace.targetSummary.label,
|
||||
})),
|
||||
{ json: false },
|
||||
);
|
||||
} catch (err) {
|
||||
handleCommandError(err);
|
||||
}
|
||||
}),
|
||||
);
|
||||
|
||||
addCommonClientOptions(
|
||||
issue
|
||||
.command("feedback:export")
|
||||
.description("Export feedback traces for an issue")
|
||||
.argument("<issueId>", "Issue ID")
|
||||
.option("--target-type <type>", "Filter by target type")
|
||||
.option("--vote <vote>", "Filter by vote value")
|
||||
.option("--status <status>", "Filter by trace status")
|
||||
.option("--from <iso8601>", "Only include traces created at or after this timestamp")
|
||||
.option("--to <iso8601>", "Only include traces created at or before this timestamp")
|
||||
.option("--shared-only", "Only include traces eligible for sharing/export")
|
||||
.option("--include-payload", "Include stored payload snapshots in the export")
|
||||
.option("--out <path>", "Write export to a file path instead of stdout")
|
||||
.option("--format <format>", "Export format: json or ndjson", "ndjson")
|
||||
.action(async (issueId: string, opts: IssueFeedbackOptions) => {
|
||||
try {
|
||||
const ctx = resolveCommandContext(opts);
|
||||
const traces = (await ctx.api.get<FeedbackTrace[]>(
|
||||
`/api/issues/${issueId}/feedback-traces${buildFeedbackTraceQuery(opts, opts.includePayload ?? true)}`,
|
||||
)) ?? [];
|
||||
const serialized = serializeFeedbackTraces(traces, opts.format);
|
||||
if (opts.out?.trim()) {
|
||||
await writeFile(opts.out, serialized, "utf8");
|
||||
if (ctx.json) {
|
||||
printOutput(
|
||||
{ out: opts.out, count: traces.length, format: normalizeFeedbackTraceExportFormat(opts.format) },
|
||||
{ json: true },
|
||||
);
|
||||
return;
|
||||
}
|
||||
console.log(`Wrote ${traces.length} feedback trace(s) to ${opts.out}`);
|
||||
return;
|
||||
}
|
||||
process.stdout.write(`${serialized}${serialized.endsWith("\n") ? "" : "\n"}`);
|
||||
} catch (err) {
|
||||
handleCommandError(err);
|
||||
}
|
||||
}),
|
||||
);
|
||||
|
||||
addCommonClientOptions(
|
||||
issue
|
||||
.command("checkout")
|
||||
|
||||
@@ -63,6 +63,9 @@ function defaultConfig(): PaperclipConfig {
|
||||
baseUrlMode: "auto",
|
||||
disableSignUp: false,
|
||||
},
|
||||
telemetry: {
|
||||
enabled: true,
|
||||
},
|
||||
storage: defaultStorageConfig(),
|
||||
secrets: defaultSecretsConfig(),
|
||||
};
|
||||
|
||||
@@ -33,6 +33,11 @@ import {
|
||||
} from "../config/home.js";
|
||||
import { bootstrapCeoInvite } from "./auth-bootstrap-ceo.js";
|
||||
import { printPaperclipCliBanner } from "../utils/banner.js";
|
||||
import {
|
||||
getTelemetryClient,
|
||||
trackInstallStarted,
|
||||
trackInstallCompleted,
|
||||
} from "../telemetry.js";
|
||||
|
||||
type SetupMode = "quickstart" | "advanced";
|
||||
|
||||
@@ -244,11 +249,12 @@ export async function onboard(opts: OnboardOptions): Promise<void> {
|
||||
),
|
||||
);
|
||||
|
||||
let existingConfig: PaperclipConfig | null = null;
|
||||
if (configExists(opts.config)) {
|
||||
p.log.message(pc.dim(`${configPath} exists, updating config`));
|
||||
p.log.message(pc.dim(`${configPath} exists`));
|
||||
|
||||
try {
|
||||
readConfig(opts.config);
|
||||
existingConfig = readConfig(opts.config);
|
||||
} catch (err) {
|
||||
p.log.message(
|
||||
pc.yellow(
|
||||
@@ -258,6 +264,76 @@ export async function onboard(opts: OnboardOptions): Promise<void> {
|
||||
}
|
||||
}
|
||||
|
||||
if (existingConfig) {
|
||||
p.log.message(
|
||||
pc.dim("Existing Paperclip install detected; keeping the current configuration unchanged."),
|
||||
);
|
||||
p.log.message(pc.dim(`Use ${pc.cyan("paperclipai configure")} if you want to change settings.`));
|
||||
|
||||
const jwtSecret = ensureAgentJwtSecret(configPath);
|
||||
const envFilePath = resolveAgentJwtEnvFile(configPath);
|
||||
if (jwtSecret.created) {
|
||||
p.log.success(`Created ${pc.cyan("PAPERCLIP_AGENT_JWT_SECRET")} in ${pc.dim(envFilePath)}`);
|
||||
} else if (process.env.PAPERCLIP_AGENT_JWT_SECRET?.trim()) {
|
||||
p.log.info(`Using existing ${pc.cyan("PAPERCLIP_AGENT_JWT_SECRET")} from environment`);
|
||||
} else {
|
||||
p.log.info(`Using existing ${pc.cyan("PAPERCLIP_AGENT_JWT_SECRET")} in ${pc.dim(envFilePath)}`);
|
||||
}
|
||||
|
||||
const keyResult = ensureLocalSecretsKeyFile(existingConfig, configPath);
|
||||
if (keyResult.status === "created") {
|
||||
p.log.success(`Created local secrets key file at ${pc.dim(keyResult.path)}`);
|
||||
} else if (keyResult.status === "existing") {
|
||||
p.log.message(pc.dim(`Using existing local secrets key file at ${keyResult.path}`));
|
||||
}
|
||||
|
||||
p.note(
|
||||
[
|
||||
"Existing config preserved",
|
||||
`Database: ${existingConfig.database.mode}`,
|
||||
existingConfig.llm ? `LLM: ${existingConfig.llm.provider}` : "LLM: not configured",
|
||||
`Logging: ${existingConfig.logging.mode} -> ${existingConfig.logging.logDir}`,
|
||||
`Server: ${existingConfig.server.deploymentMode}/${existingConfig.server.exposure} @ ${existingConfig.server.host}:${existingConfig.server.port}`,
|
||||
`Allowed hosts: ${existingConfig.server.allowedHostnames.length > 0 ? existingConfig.server.allowedHostnames.join(", ") : "(loopback only)"}`,
|
||||
`Auth URL mode: ${existingConfig.auth.baseUrlMode}${existingConfig.auth.publicBaseUrl ? ` (${existingConfig.auth.publicBaseUrl})` : ""}`,
|
||||
`Storage: ${existingConfig.storage.provider}`,
|
||||
`Secrets: ${existingConfig.secrets.provider} (strict mode ${existingConfig.secrets.strictMode ? "on" : "off"})`,
|
||||
"Agent auth: PAPERCLIP_AGENT_JWT_SECRET configured",
|
||||
].join("\n"),
|
||||
"Configuration ready",
|
||||
);
|
||||
|
||||
p.note(
|
||||
[
|
||||
`Run: ${pc.cyan("paperclipai run")}`,
|
||||
`Reconfigure later: ${pc.cyan("paperclipai configure")}`,
|
||||
`Diagnose setup: ${pc.cyan("paperclipai doctor")}`,
|
||||
].join("\n"),
|
||||
"Next commands",
|
||||
);
|
||||
|
||||
let shouldRunNow = opts.run === true || opts.yes === true;
|
||||
if (!shouldRunNow && !opts.invokedByRun && process.stdin.isTTY && process.stdout.isTTY) {
|
||||
const answer = await p.confirm({
|
||||
message: "Start Paperclip now?",
|
||||
initialValue: true,
|
||||
});
|
||||
if (!p.isCancel(answer)) {
|
||||
shouldRunNow = answer;
|
||||
}
|
||||
}
|
||||
|
||||
if (shouldRunNow && !opts.invokedByRun) {
|
||||
process.env.PAPERCLIP_OPEN_ON_LISTEN = "true";
|
||||
const { runCommand } = await import("./run.js");
|
||||
await runCommand({ config: configPath, repair: true, yes: true });
|
||||
return;
|
||||
}
|
||||
|
||||
p.outro("Existing Paperclip setup is ready.");
|
||||
return;
|
||||
}
|
||||
|
||||
let setupMode: SetupMode = "quickstart";
|
||||
if (opts.yes) {
|
||||
p.log.message(pc.dim("`--yes` enabled: using Quickstart defaults."));
|
||||
@@ -285,6 +361,9 @@ export async function onboard(opts: OnboardOptions): Promise<void> {
|
||||
setupMode = setupModeChoice as SetupMode;
|
||||
}
|
||||
|
||||
const tc = getTelemetryClient();
|
||||
if (tc) trackInstallStarted(tc);
|
||||
|
||||
let llm: PaperclipConfig["llm"] | undefined;
|
||||
const { defaults: derivedDefaults, usedEnvKeys, ignoredEnvKeys } = quickstartDefaultsFromEnv();
|
||||
let {
|
||||
@@ -417,6 +496,9 @@ export async function onboard(opts: OnboardOptions): Promise<void> {
|
||||
logging,
|
||||
server,
|
||||
auth,
|
||||
telemetry: {
|
||||
enabled: true,
|
||||
},
|
||||
storage,
|
||||
secrets,
|
||||
};
|
||||
@@ -430,6 +512,10 @@ export async function onboard(opts: OnboardOptions): Promise<void> {
|
||||
|
||||
writeConfig(config, opts.config);
|
||||
|
||||
if (tc) trackInstallCompleted(tc, {
|
||||
adapterType: server.deploymentMode,
|
||||
});
|
||||
|
||||
p.note(
|
||||
[
|
||||
`Database: ${database.mode}`,
|
||||
|
||||
352
cli/src/commands/routines.ts
Normal file
352
cli/src/commands/routines.ts
Normal file
@@ -0,0 +1,352 @@
|
||||
import fs from "node:fs";
|
||||
import net from "node:net";
|
||||
import path from "node:path";
|
||||
import { Command } from "commander";
|
||||
import pc from "picocolors";
|
||||
import {
|
||||
applyPendingMigrations,
|
||||
createDb,
|
||||
createEmbeddedPostgresLogBuffer,
|
||||
ensurePostgresDatabase,
|
||||
formatEmbeddedPostgresError,
|
||||
routines,
|
||||
} from "@paperclipai/db";
|
||||
import { eq, inArray } from "drizzle-orm";
|
||||
import { loadPaperclipEnvFile } from "../config/env.js";
|
||||
import { readConfig, resolveConfigPath } from "../config/store.js";
|
||||
|
||||
type RoutinesDisableAllOptions = {
|
||||
config?: string;
|
||||
dataDir?: string;
|
||||
companyId?: string;
|
||||
json?: boolean;
|
||||
};
|
||||
|
||||
type DisableAllRoutinesResult = {
|
||||
companyId: string;
|
||||
totalRoutines: number;
|
||||
pausedCount: number;
|
||||
alreadyPausedCount: number;
|
||||
archivedCount: number;
|
||||
};
|
||||
|
||||
type EmbeddedPostgresInstance = {
|
||||
initialise(): Promise<void>;
|
||||
start(): Promise<void>;
|
||||
stop(): Promise<void>;
|
||||
};
|
||||
|
||||
type EmbeddedPostgresCtor = new (opts: {
|
||||
databaseDir: string;
|
||||
user: string;
|
||||
password: string;
|
||||
port: number;
|
||||
persistent: boolean;
|
||||
initdbFlags?: string[];
|
||||
onLog?: (message: unknown) => void;
|
||||
onError?: (message: unknown) => void;
|
||||
}) => EmbeddedPostgresInstance;
|
||||
|
||||
type EmbeddedPostgresHandle = {
|
||||
port: number;
|
||||
startedByThisProcess: boolean;
|
||||
stop: () => Promise<void>;
|
||||
};
|
||||
|
||||
type ClosableDb = ReturnType<typeof createDb> & {
|
||||
$client?: {
|
||||
end?: (options?: { timeout?: number }) => Promise<void>;
|
||||
};
|
||||
};
|
||||
|
||||
function nonEmpty(value: string | null | undefined): string | null {
|
||||
return typeof value === "string" && value.trim().length > 0 ? value.trim() : null;
|
||||
}
|
||||
|
||||
async function isPortAvailable(port: number): Promise<boolean> {
|
||||
return await new Promise<boolean>((resolve) => {
|
||||
const server = net.createServer();
|
||||
server.unref();
|
||||
server.once("error", () => resolve(false));
|
||||
server.listen(port, "127.0.0.1", () => {
|
||||
server.close(() => resolve(true));
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
async function findAvailablePort(preferredPort: number): Promise<number> {
|
||||
let port = Math.max(1, Math.trunc(preferredPort));
|
||||
while (!(await isPortAvailable(port))) {
|
||||
port += 1;
|
||||
}
|
||||
return port;
|
||||
}
|
||||
|
||||
function readPidFilePort(postmasterPidFile: string): number | null {
|
||||
if (!fs.existsSync(postmasterPidFile)) return null;
|
||||
try {
|
||||
const lines = fs.readFileSync(postmasterPidFile, "utf8").split("\n");
|
||||
const port = Number(lines[3]?.trim());
|
||||
return Number.isInteger(port) && port > 0 ? port : null;
|
||||
} catch {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
function readRunningPostmasterPid(postmasterPidFile: string): number | null {
|
||||
if (!fs.existsSync(postmasterPidFile)) return null;
|
||||
try {
|
||||
const pid = Number(fs.readFileSync(postmasterPidFile, "utf8").split("\n")[0]?.trim());
|
||||
if (!Number.isInteger(pid) || pid <= 0) return null;
|
||||
process.kill(pid, 0);
|
||||
return pid;
|
||||
} catch {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
async function ensureEmbeddedPostgres(dataDir: string, preferredPort: number): Promise<EmbeddedPostgresHandle> {
|
||||
const moduleName = "embedded-postgres";
|
||||
let EmbeddedPostgres: EmbeddedPostgresCtor;
|
||||
try {
|
||||
const mod = await import(moduleName);
|
||||
EmbeddedPostgres = mod.default as EmbeddedPostgresCtor;
|
||||
} catch {
|
||||
throw new Error(
|
||||
"Embedded PostgreSQL support requires dependency `embedded-postgres`. Reinstall dependencies and try again.",
|
||||
);
|
||||
}
|
||||
|
||||
const postmasterPidFile = path.resolve(dataDir, "postmaster.pid");
|
||||
const runningPid = readRunningPostmasterPid(postmasterPidFile);
|
||||
if (runningPid) {
|
||||
return {
|
||||
port: readPidFilePort(postmasterPidFile) ?? preferredPort,
|
||||
startedByThisProcess: false,
|
||||
stop: async () => {},
|
||||
};
|
||||
}
|
||||
|
||||
const port = await findAvailablePort(preferredPort);
|
||||
const logBuffer = createEmbeddedPostgresLogBuffer();
|
||||
const instance = new EmbeddedPostgres({
|
||||
databaseDir: dataDir,
|
||||
user: "paperclip",
|
||||
password: "paperclip",
|
||||
port,
|
||||
persistent: true,
|
||||
initdbFlags: ["--encoding=UTF8", "--locale=C", "--lc-messages=C"],
|
||||
onLog: logBuffer.append,
|
||||
onError: logBuffer.append,
|
||||
});
|
||||
|
||||
if (!fs.existsSync(path.resolve(dataDir, "PG_VERSION"))) {
|
||||
try {
|
||||
await instance.initialise();
|
||||
} catch (error) {
|
||||
throw formatEmbeddedPostgresError(error, {
|
||||
fallbackMessage: `Failed to initialize embedded PostgreSQL cluster in ${dataDir} on port ${port}`,
|
||||
recentLogs: logBuffer.getRecentLogs(),
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
if (fs.existsSync(postmasterPidFile)) {
|
||||
fs.rmSync(postmasterPidFile, { force: true });
|
||||
}
|
||||
|
||||
try {
|
||||
await instance.start();
|
||||
} catch (error) {
|
||||
throw formatEmbeddedPostgresError(error, {
|
||||
fallbackMessage: `Failed to start embedded PostgreSQL on port ${port}`,
|
||||
recentLogs: logBuffer.getRecentLogs(),
|
||||
});
|
||||
}
|
||||
|
||||
return {
|
||||
port,
|
||||
startedByThisProcess: true,
|
||||
stop: async () => {
|
||||
await instance.stop();
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
async function closeDb(db: ClosableDb): Promise<void> {
|
||||
await db.$client?.end?.({ timeout: 5 }).catch(() => undefined);
|
||||
}
|
||||
|
||||
async function openConfiguredDb(configPath: string): Promise<{
|
||||
db: ClosableDb;
|
||||
stop: () => Promise<void>;
|
||||
}> {
|
||||
const config = readConfig(configPath);
|
||||
if (!config) {
|
||||
throw new Error(`Config not found at ${configPath}.`);
|
||||
}
|
||||
|
||||
let embeddedHandle: EmbeddedPostgresHandle | null = null;
|
||||
try {
|
||||
if (config.database.mode === "embedded-postgres") {
|
||||
embeddedHandle = await ensureEmbeddedPostgres(
|
||||
config.database.embeddedPostgresDataDir,
|
||||
config.database.embeddedPostgresPort,
|
||||
);
|
||||
const adminConnectionString = `postgres://paperclip:paperclip@127.0.0.1:${embeddedHandle.port}/postgres`;
|
||||
await ensurePostgresDatabase(adminConnectionString, "paperclip");
|
||||
const connectionString = `postgres://paperclip:paperclip@127.0.0.1:${embeddedHandle.port}/paperclip`;
|
||||
await applyPendingMigrations(connectionString);
|
||||
const db = createDb(connectionString) as ClosableDb;
|
||||
return {
|
||||
db,
|
||||
stop: async () => {
|
||||
await closeDb(db);
|
||||
if (embeddedHandle?.startedByThisProcess) {
|
||||
await embeddedHandle.stop().catch(() => undefined);
|
||||
}
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
const connectionString = nonEmpty(config.database.connectionString);
|
||||
if (!connectionString) {
|
||||
throw new Error(`Config at ${configPath} does not define a database connection string.`);
|
||||
}
|
||||
|
||||
await applyPendingMigrations(connectionString);
|
||||
const db = createDb(connectionString) as ClosableDb;
|
||||
return {
|
||||
db,
|
||||
stop: async () => {
|
||||
await closeDb(db);
|
||||
},
|
||||
};
|
||||
} catch (error) {
|
||||
if (embeddedHandle?.startedByThisProcess) {
|
||||
await embeddedHandle.stop().catch(() => undefined);
|
||||
}
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
export async function disableAllRoutinesInConfig(
|
||||
options: Pick<RoutinesDisableAllOptions, "config" | "companyId">,
|
||||
): Promise<DisableAllRoutinesResult> {
|
||||
const configPath = resolveConfigPath(options.config);
|
||||
loadPaperclipEnvFile(configPath);
|
||||
const companyId =
|
||||
nonEmpty(options.companyId)
|
||||
?? nonEmpty(process.env.PAPERCLIP_COMPANY_ID)
|
||||
?? null;
|
||||
if (!companyId) {
|
||||
throw new Error("Company ID is required. Pass --company-id or set PAPERCLIP_COMPANY_ID.");
|
||||
}
|
||||
|
||||
const config = readConfig(configPath);
|
||||
if (!config) {
|
||||
throw new Error(`Config not found at ${configPath}.`);
|
||||
}
|
||||
|
||||
let embeddedHandle: EmbeddedPostgresHandle | null = null;
|
||||
let db: ClosableDb | null = null;
|
||||
try {
|
||||
if (config.database.mode === "embedded-postgres") {
|
||||
embeddedHandle = await ensureEmbeddedPostgres(
|
||||
config.database.embeddedPostgresDataDir,
|
||||
config.database.embeddedPostgresPort,
|
||||
);
|
||||
const adminConnectionString = `postgres://paperclip:paperclip@127.0.0.1:${embeddedHandle.port}/postgres`;
|
||||
await ensurePostgresDatabase(adminConnectionString, "paperclip");
|
||||
const connectionString = `postgres://paperclip:paperclip@127.0.0.1:${embeddedHandle.port}/paperclip`;
|
||||
await applyPendingMigrations(connectionString);
|
||||
db = createDb(connectionString) as ClosableDb;
|
||||
} else {
|
||||
const connectionString = nonEmpty(config.database.connectionString);
|
||||
if (!connectionString) {
|
||||
throw new Error(`Config at ${configPath} does not define a database connection string.`);
|
||||
}
|
||||
await applyPendingMigrations(connectionString);
|
||||
db = createDb(connectionString) as ClosableDb;
|
||||
}
|
||||
|
||||
const existing = await db
|
||||
.select({
|
||||
id: routines.id,
|
||||
status: routines.status,
|
||||
})
|
||||
.from(routines)
|
||||
.where(eq(routines.companyId, companyId));
|
||||
|
||||
const alreadyPausedCount = existing.filter((routine) => routine.status === "paused").length;
|
||||
const archivedCount = existing.filter((routine) => routine.status === "archived").length;
|
||||
const idsToPause = existing
|
||||
.filter((routine) => routine.status !== "paused" && routine.status !== "archived")
|
||||
.map((routine) => routine.id);
|
||||
|
||||
if (idsToPause.length > 0) {
|
||||
await db
|
||||
.update(routines)
|
||||
.set({
|
||||
status: "paused",
|
||||
updatedAt: new Date(),
|
||||
})
|
||||
.where(inArray(routines.id, idsToPause));
|
||||
}
|
||||
|
||||
return {
|
||||
companyId,
|
||||
totalRoutines: existing.length,
|
||||
pausedCount: idsToPause.length,
|
||||
alreadyPausedCount,
|
||||
archivedCount,
|
||||
};
|
||||
} finally {
|
||||
if (db) {
|
||||
await closeDb(db);
|
||||
}
|
||||
if (embeddedHandle?.startedByThisProcess) {
|
||||
await embeddedHandle.stop().catch(() => undefined);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export async function disableAllRoutinesCommand(options: RoutinesDisableAllOptions): Promise<void> {
|
||||
const result = await disableAllRoutinesInConfig(options);
|
||||
|
||||
if (options.json) {
|
||||
console.log(JSON.stringify(result, null, 2));
|
||||
return;
|
||||
}
|
||||
|
||||
if (result.totalRoutines === 0) {
|
||||
console.log(pc.dim(`No routines found for company ${result.companyId}.`));
|
||||
return;
|
||||
}
|
||||
|
||||
console.log(
|
||||
`Paused ${result.pausedCount} routine(s) for company ${result.companyId} ` +
|
||||
`(${result.alreadyPausedCount} already paused, ${result.archivedCount} archived).`,
|
||||
);
|
||||
}
|
||||
|
||||
export function registerRoutineCommands(program: Command): void {
|
||||
const routinesCommand = program.command("routines").description("Local routine maintenance commands");
|
||||
|
||||
routinesCommand
|
||||
.command("disable-all")
|
||||
.description("Pause all non-archived routines in the configured local instance for one company")
|
||||
.option("-c, --config <path>", "Path to config file")
|
||||
.option("-d, --data-dir <path>", "Paperclip data directory root (isolates state from ~/.paperclip)")
|
||||
.option("-C, --company-id <id>", "Company ID")
|
||||
.option("--json", "Output raw JSON")
|
||||
.action(async (opts: RoutinesDisableAllOptions) => {
|
||||
try {
|
||||
await disableAllRoutinesCommand(opts);
|
||||
} catch (error) {
|
||||
const message = error instanceof Error ? error.message : String(error);
|
||||
console.error(pc.red(message));
|
||||
process.exit(1);
|
||||
}
|
||||
});
|
||||
}
|
||||
@@ -224,6 +224,9 @@ export function buildWorktreeConfig(input: {
|
||||
...(authPublicBaseUrl ? { publicBaseUrl: authPublicBaseUrl } : {}),
|
||||
disableSignUp: source?.auth.disableSignUp ?? false,
|
||||
},
|
||||
telemetry: {
|
||||
enabled: source?.telemetry?.enabled ?? true,
|
||||
},
|
||||
storage: {
|
||||
provider: source?.storage.provider ?? "local_disk",
|
||||
localDisk: {
|
||||
|
||||
@@ -41,6 +41,8 @@ import {
|
||||
projects,
|
||||
runDatabaseBackup,
|
||||
runDatabaseRestore,
|
||||
createEmbeddedPostgresLogBuffer,
|
||||
formatEmbeddedPostgresError,
|
||||
} from "@paperclipai/db";
|
||||
import type { Command } from "commander";
|
||||
import { ensureAgentJwtSecret, loadPaperclipEnvFile, mergePaperclipEnvEntries, readPaperclipEnvEntries, resolvePaperclipEnvFile } from "../config/env.js";
|
||||
@@ -465,6 +467,62 @@ async function findAvailablePort(preferredPort: number, reserved = new Set<numbe
|
||||
return port;
|
||||
}
|
||||
|
||||
function resolveRepoManagedWorktreesRoot(cwd: string): string | null {
|
||||
const normalized = path.resolve(cwd);
|
||||
const marker = `${path.sep}.paperclip${path.sep}worktrees${path.sep}`;
|
||||
const index = normalized.indexOf(marker);
|
||||
if (index === -1) return null;
|
||||
const repoRoot = normalized.slice(0, index);
|
||||
return path.resolve(repoRoot, ".paperclip", "worktrees");
|
||||
}
|
||||
|
||||
function collectClaimedWorktreePorts(homeDir: string, currentInstanceId: string, cwd: string): {
|
||||
serverPorts: Set<number>;
|
||||
databasePorts: Set<number>;
|
||||
} {
|
||||
const serverPorts = new Set<number>();
|
||||
const databasePorts = new Set<number>();
|
||||
const configPaths = new Set<string>();
|
||||
const instancesDir = path.resolve(homeDir, "instances");
|
||||
if (existsSync(instancesDir)) {
|
||||
for (const entry of readdirSync(instancesDir, { withFileTypes: true })) {
|
||||
if (!entry.isDirectory() || entry.name === currentInstanceId) continue;
|
||||
|
||||
const configPath = path.resolve(instancesDir, entry.name, "config.json");
|
||||
if (existsSync(configPath)) {
|
||||
configPaths.add(configPath);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const repoManagedWorktreesRoot = resolveRepoManagedWorktreesRoot(cwd);
|
||||
if (repoManagedWorktreesRoot && existsSync(repoManagedWorktreesRoot)) {
|
||||
for (const entry of readdirSync(repoManagedWorktreesRoot, { withFileTypes: true })) {
|
||||
if (!entry.isDirectory()) continue;
|
||||
const configPath = path.resolve(repoManagedWorktreesRoot, entry.name, ".paperclip", "config.json");
|
||||
if (existsSync(configPath)) {
|
||||
configPaths.add(configPath);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
for (const configPath of configPaths) {
|
||||
try {
|
||||
const config = readConfig(configPath);
|
||||
if (config?.server.port) {
|
||||
serverPorts.add(config.server.port);
|
||||
}
|
||||
if (config?.database.mode === "embedded-postgres") {
|
||||
databasePorts.add(config.database.embeddedPostgresPort);
|
||||
}
|
||||
} catch {
|
||||
// Ignore malformed sibling configs.
|
||||
}
|
||||
}
|
||||
|
||||
return { serverPorts, databasePorts };
|
||||
}
|
||||
|
||||
function detectGitBranchName(cwd: string): string | null {
|
||||
try {
|
||||
const value = execFileSync("git", ["branch", "--show-current"], {
|
||||
@@ -750,24 +808,39 @@ async function ensureEmbeddedPostgres(dataDir: string, preferredPort: number): P
|
||||
}
|
||||
|
||||
const port = await findAvailablePort(preferredPort);
|
||||
const logBuffer = createEmbeddedPostgresLogBuffer();
|
||||
const instance = new EmbeddedPostgres({
|
||||
databaseDir: dataDir,
|
||||
user: "paperclip",
|
||||
password: "paperclip",
|
||||
port,
|
||||
persistent: true,
|
||||
initdbFlags: ["--encoding=UTF8", "--locale=C"],
|
||||
onLog: () => {},
|
||||
onError: () => {},
|
||||
initdbFlags: ["--encoding=UTF8", "--locale=C", "--lc-messages=C"],
|
||||
onLog: logBuffer.append,
|
||||
onError: logBuffer.append,
|
||||
});
|
||||
|
||||
if (!existsSync(path.resolve(dataDir, "PG_VERSION"))) {
|
||||
await instance.initialise();
|
||||
try {
|
||||
await instance.initialise();
|
||||
} catch (error) {
|
||||
throw formatEmbeddedPostgresError(error, {
|
||||
fallbackMessage: `Failed to initialize embedded PostgreSQL cluster in ${dataDir} on port ${port}`,
|
||||
recentLogs: logBuffer.getRecentLogs(),
|
||||
});
|
||||
}
|
||||
}
|
||||
if (existsSync(postmasterPidFile)) {
|
||||
rmSync(postmasterPidFile, { force: true });
|
||||
}
|
||||
await instance.start();
|
||||
try {
|
||||
await instance.start();
|
||||
} catch (error) {
|
||||
throw formatEmbeddedPostgresError(error, {
|
||||
fallbackMessage: `Failed to start embedded PostgreSQL on port ${port}`,
|
||||
recentLogs: logBuffer.getRecentLogs(),
|
||||
});
|
||||
}
|
||||
|
||||
return {
|
||||
port,
|
||||
@@ -886,10 +959,14 @@ async function runWorktreeInit(opts: WorktreeInitOptions): Promise<void> {
|
||||
rmSync(paths.instanceRoot, { recursive: true, force: true });
|
||||
}
|
||||
|
||||
const claimedPorts = collectClaimedWorktreePorts(paths.homeDir, paths.instanceId, paths.cwd);
|
||||
const preferredServerPort = opts.serverPort ?? ((sourceConfig?.server.port ?? 3100) + 1);
|
||||
const serverPort = await findAvailablePort(preferredServerPort);
|
||||
const serverPort = await findAvailablePort(preferredServerPort, claimedPorts.serverPorts);
|
||||
const preferredDbPort = opts.dbPort ?? ((sourceConfig?.database.embeddedPostgresPort ?? 54329) + 1);
|
||||
const databasePort = await findAvailablePort(preferredDbPort, new Set([serverPort]));
|
||||
const databasePort = await findAvailablePort(
|
||||
preferredDbPort,
|
||||
new Set([...claimedPorts.databasePorts, serverPort]),
|
||||
);
|
||||
const targetConfig = buildWorktreeConfig({
|
||||
sourceConfig,
|
||||
paths,
|
||||
|
||||
@@ -7,6 +7,7 @@ export {
|
||||
loggingConfigSchema,
|
||||
serverConfigSchema,
|
||||
authConfigSchema,
|
||||
telemetryConfigSchema,
|
||||
storageConfigSchema,
|
||||
storageLocalDiskConfigSchema,
|
||||
storageS3ConfigSchema,
|
||||
@@ -19,10 +20,11 @@ export {
|
||||
type LoggingConfig,
|
||||
type ServerConfig,
|
||||
type AuthConfig,
|
||||
type TelemetryConfig,
|
||||
type StorageConfig,
|
||||
type StorageLocalDiskConfig,
|
||||
type StorageS3Config,
|
||||
type SecretsConfig,
|
||||
type SecretsLocalEncryptedConfig,
|
||||
type ConfigMeta,
|
||||
} from "@paperclipai/shared";
|
||||
} from "../../../packages/shared/src/config-schema.js";
|
||||
|
||||
@@ -15,11 +15,15 @@ import { registerAgentCommands } from "./commands/client/agent.js";
|
||||
import { registerApprovalCommands } from "./commands/client/approval.js";
|
||||
import { registerActivityCommands } from "./commands/client/activity.js";
|
||||
import { registerDashboardCommands } from "./commands/client/dashboard.js";
|
||||
import { registerRoutineCommands } from "./commands/routines.js";
|
||||
import { registerFeedbackCommands } from "./commands/client/feedback.js";
|
||||
import { applyDataDirOverride, type DataDirOptionLike } from "./config/data-dir.js";
|
||||
import { loadPaperclipEnvFile } from "./config/env.js";
|
||||
import { initTelemetryFromConfigFile, flushTelemetry } from "./telemetry.js";
|
||||
import { registerWorktreeCommands } from "./commands/worktree.js";
|
||||
import { registerPluginCommands } from "./commands/client/plugin.js";
|
||||
import { registerClientAuthCommands } from "./commands/client/auth.js";
|
||||
import { cliVersion } from "./version.js";
|
||||
|
||||
const program = new Command();
|
||||
const DATA_DIR_OPTION_HELP =
|
||||
@@ -28,7 +32,7 @@ const DATA_DIR_OPTION_HELP =
|
||||
program
|
||||
.name("paperclipai")
|
||||
.description("Paperclip CLI — setup, diagnose, and configure your instance")
|
||||
.version("0.2.7");
|
||||
.version(cliVersion);
|
||||
|
||||
program.hook("preAction", (_thisCommand, actionCommand) => {
|
||||
const options = actionCommand.optsWithGlobals() as DataDirOptionLike;
|
||||
@@ -38,6 +42,7 @@ program.hook("preAction", (_thisCommand, actionCommand) => {
|
||||
hasContextOption: optionNames.has("context"),
|
||||
});
|
||||
loadPaperclipEnvFile(options.config);
|
||||
initTelemetryFromConfigFile(options.config);
|
||||
});
|
||||
|
||||
program
|
||||
@@ -137,6 +142,8 @@ registerAgentCommands(program);
|
||||
registerApprovalCommands(program);
|
||||
registerActivityCommands(program);
|
||||
registerDashboardCommands(program);
|
||||
registerRoutineCommands(program);
|
||||
registerFeedbackCommands(program);
|
||||
registerWorktreeCommands(program);
|
||||
registerPluginCommands(program);
|
||||
|
||||
@@ -154,7 +161,20 @@ auth
|
||||
|
||||
registerClientAuthCommands(auth);
|
||||
|
||||
program.parseAsync().catch((err) => {
|
||||
console.error(err instanceof Error ? err.message : String(err));
|
||||
process.exit(1);
|
||||
});
|
||||
async function main(): Promise<void> {
|
||||
let failed = false;
|
||||
try {
|
||||
await program.parseAsync();
|
||||
} catch (err) {
|
||||
failed = true;
|
||||
console.error(err instanceof Error ? err.message : String(err));
|
||||
} finally {
|
||||
await flushTelemetry();
|
||||
}
|
||||
|
||||
if (failed) {
|
||||
process.exit(1);
|
||||
}
|
||||
}
|
||||
|
||||
void main();
|
||||
|
||||
49
cli/src/telemetry.ts
Normal file
49
cli/src/telemetry.ts
Normal file
@@ -0,0 +1,49 @@
|
||||
import path from "node:path";
|
||||
import {
|
||||
TelemetryClient,
|
||||
resolveTelemetryConfig,
|
||||
loadOrCreateState,
|
||||
trackInstallStarted,
|
||||
trackInstallCompleted,
|
||||
trackCompanyImported,
|
||||
} from "../../packages/shared/src/telemetry/index.js";
|
||||
import { resolvePaperclipInstanceRoot } from "./config/home.js";
|
||||
import { readConfig } from "./config/store.js";
|
||||
import { cliVersion } from "./version.js";
|
||||
|
||||
let client: TelemetryClient | null = null;
|
||||
|
||||
export function initTelemetry(fileConfig?: { enabled?: boolean }): TelemetryClient | null {
|
||||
if (client) return client;
|
||||
|
||||
const config = resolveTelemetryConfig(fileConfig);
|
||||
if (!config.enabled) return null;
|
||||
|
||||
const stateDir = path.join(resolvePaperclipInstanceRoot(), "telemetry");
|
||||
client = new TelemetryClient(config, () => loadOrCreateState(stateDir, cliVersion), cliVersion);
|
||||
return client;
|
||||
}
|
||||
|
||||
export function initTelemetryFromConfigFile(configPath?: string): TelemetryClient | null {
|
||||
try {
|
||||
return initTelemetry(readConfig(configPath)?.telemetry);
|
||||
} catch {
|
||||
return initTelemetry();
|
||||
}
|
||||
}
|
||||
|
||||
export function getTelemetryClient(): TelemetryClient | null {
|
||||
return client;
|
||||
}
|
||||
|
||||
export async function flushTelemetry(): Promise<void> {
|
||||
if (client) {
|
||||
await client.flush();
|
||||
}
|
||||
}
|
||||
|
||||
export {
|
||||
trackInstallStarted,
|
||||
trackInstallCompleted,
|
||||
trackCompanyImported,
|
||||
};
|
||||
10
cli/src/version.ts
Normal file
10
cli/src/version.ts
Normal file
@@ -0,0 +1,10 @@
|
||||
import { createRequire } from "node:module";
|
||||
|
||||
type PackageJson = {
|
||||
version?: string;
|
||||
};
|
||||
|
||||
const require = createRequire(import.meta.url);
|
||||
const pkg = require("../package.json") as PackageJson;
|
||||
|
||||
export const cliVersion = pkg.version ?? "0.0.0";
|
||||
@@ -2,7 +2,7 @@
|
||||
"extends": "../tsconfig.base.json",
|
||||
"compilerOptions": {
|
||||
"outDir": "dist",
|
||||
"rootDir": "src"
|
||||
"rootDir": ".."
|
||||
},
|
||||
"include": ["src"]
|
||||
"include": ["src", "../packages/shared/src"]
|
||||
}
|
||||
|
||||
@@ -39,6 +39,17 @@ This starts:
|
||||
|
||||
`pnpm dev` runs the server in watch mode and restarts on changes from workspace packages (including adapter packages). Use `pnpm dev:once` to run without file watching.
|
||||
|
||||
`pnpm dev:once` auto-applies pending local migrations by default before starting the dev server.
|
||||
|
||||
`pnpm dev` and `pnpm dev:once` are now idempotent for the current repo and instance: if the matching Paperclip dev runner is already alive, Paperclip reports the existing process instead of starting a duplicate.
|
||||
|
||||
Inspect or stop the current repo's managed dev runner:
|
||||
|
||||
```sh
|
||||
pnpm dev:list
|
||||
pnpm dev:stop
|
||||
```
|
||||
|
||||
`pnpm dev:once` now tracks backend-relevant file changes and pending migrations. When the current boot is stale, the board UI shows a `Restart required` banner. You can also enable guarded auto-restart in `Instance Settings > Experimental`, which waits for queued/running local agent runs to finish before restarting the dev server.
|
||||
|
||||
Tailscale/private-auth dev mode:
|
||||
@@ -86,7 +97,7 @@ docker run --name paperclip \
|
||||
Or use Compose:
|
||||
|
||||
```sh
|
||||
docker compose -f docker-compose.quickstart.yml up --build
|
||||
docker compose -f docker/docker-compose.quickstart.yml up --build
|
||||
```
|
||||
|
||||
See `doc/DOCKER.md` for API key wiring (`OPENAI_API_KEY` / `ANTHROPIC_API_KEY`) and persistence details.
|
||||
@@ -134,6 +145,8 @@ For `codex_local`, Paperclip also manages a per-company Codex home under the ins
|
||||
|
||||
- `~/.paperclip/instances/default/companies/<company-id>/codex-home`
|
||||
|
||||
If the `codex` CLI is not installed or not on `PATH`, `codex_local` agent runs fail at execution time with a clear adapter error. Quota polling uses a short-lived `codex app-server` subprocess: when `codex` cannot be spawned, that provider reports `ok: false` in aggregated quota results and the API server keeps running (it must not exit on a missing binary).
|
||||
|
||||
## Worktree-local Instances
|
||||
|
||||
When developing from multiple git worktrees, do not point two Paperclip servers at the same embedded PostgreSQL data directory.
|
||||
@@ -162,6 +175,8 @@ Seed modes:
|
||||
|
||||
After `worktree init`, both the server and the CLI auto-load the repo-local `.paperclip/.env` when run inside that worktree, so normal commands like `pnpm dev`, `paperclipai doctor`, and `paperclipai db:backup` stay scoped to the worktree instance.
|
||||
|
||||
Provisioned git worktrees also pause all seeded routines in the isolated worktree database by default. This prevents copied daily/cron routines from firing unexpectedly inside the new workspace instance during development.
|
||||
|
||||
That repo-local env also sets:
|
||||
|
||||
- `PAPERCLIP_IN_WORKTREE=true`
|
||||
@@ -206,6 +221,17 @@ paperclipai worktree init --from-data-dir ~/.paperclip
|
||||
paperclipai worktree init --force
|
||||
```
|
||||
|
||||
Repair an already-created repo-managed worktree and reseed its isolated instance from the main default install:
|
||||
|
||||
```sh
|
||||
cd ~/.paperclip/worktrees/PAP-884-ai-commits-component
|
||||
pnpm paperclipai worktree init --force --seed-mode minimal \
|
||||
--name PAP-884-ai-commits-component \
|
||||
--from-config ~/.paperclip/instances/default/config.json
|
||||
```
|
||||
|
||||
That rewrites the worktree-local `.paperclip/config.json` + `.paperclip/.env`, recreates the isolated instance under `~/.paperclip-worktrees/instances/<worktree-id>/`, and preserves the git worktree contents themselves.
|
||||
|
||||
**`pnpm paperclipai worktree:make <name> [options]`** — Create `~/NAME` as a git worktree, then initialize an isolated Paperclip instance inside it. This combines `git worktree add` with `worktree init` in a single step.
|
||||
|
||||
| Option | Description |
|
||||
|
||||
132
doc/DOCKER.md
132
doc/DOCKER.md
@@ -2,6 +2,28 @@
|
||||
|
||||
Run Paperclip in Docker without installing Node or pnpm locally.
|
||||
|
||||
All commands below assume you are in the **project root** (the directory containing `package.json`), not inside `docker/`.
|
||||
|
||||
## Building the image
|
||||
|
||||
```sh
|
||||
docker build -t paperclip-local .
|
||||
```
|
||||
|
||||
The Dockerfile installs common agent tools (`git`, `gh`, `curl`, `wget`, `ripgrep`, `python3`) and the Claude, Codex, and OpenCode CLIs.
|
||||
|
||||
Build arguments:
|
||||
|
||||
| Arg | Default | Purpose |
|
||||
|-----|---------|---------|
|
||||
| `USER_UID` | `1000` | UID for the container `node` user (match your host UID to avoid permission issues on bind mounts) |
|
||||
| `USER_GID` | `1000` | GID for the container `node` group |
|
||||
|
||||
```sh
|
||||
docker build -t paperclip-local \
|
||||
--build-arg USER_UID=$(id -u) --build-arg USER_GID=$(id -g) .
|
||||
```
|
||||
|
||||
## One-liner (build + run)
|
||||
|
||||
```sh
|
||||
@@ -10,6 +32,7 @@ docker run --name paperclip \
|
||||
-p 3100:3100 \
|
||||
-e HOST=0.0.0.0 \
|
||||
-e PAPERCLIP_HOME=/paperclip \
|
||||
-e BETTER_AUTH_SECRET=$(openssl rand -hex 32) \
|
||||
-v "$(pwd)/data/docker-paperclip:/paperclip" \
|
||||
paperclip-local
|
||||
```
|
||||
@@ -25,10 +48,15 @@ Data persistence:
|
||||
|
||||
All persisted under your bind mount (`./data/docker-paperclip` in the example above).
|
||||
|
||||
## Compose Quickstart
|
||||
## Docker Compose
|
||||
|
||||
### Quickstart (embedded SQLite)
|
||||
|
||||
Single container, no external database. Data persists via a bind mount.
|
||||
|
||||
```sh
|
||||
docker compose -f docker-compose.quickstart.yml up --build
|
||||
BETTER_AUTH_SECRET=$(openssl rand -hex 32) \
|
||||
docker compose -f docker/docker-compose.quickstart.yml up --build
|
||||
```
|
||||
|
||||
Defaults:
|
||||
@@ -39,11 +67,36 @@ Defaults:
|
||||
Optional overrides:
|
||||
|
||||
```sh
|
||||
PAPERCLIP_PORT=3200 PAPERCLIP_DATA_DIR=./data/pc docker compose -f docker-compose.quickstart.yml up --build
|
||||
PAPERCLIP_PORT=3200 PAPERCLIP_DATA_DIR=../data/pc \
|
||||
docker compose -f docker/docker-compose.quickstart.yml up --build
|
||||
```
|
||||
|
||||
**Note:** `PAPERCLIP_DATA_DIR` is resolved relative to the compose file (`docker/`), so `../data/pc` maps to `data/pc` in the project root.
|
||||
|
||||
If you change host port or use a non-local domain, set `PAPERCLIP_PUBLIC_URL` to the external URL you will use in browser/auth flows.
|
||||
|
||||
Pass `OPENAI_API_KEY` and/or `ANTHROPIC_API_KEY` to enable local adapter runs.
|
||||
|
||||
### Full stack (with PostgreSQL)
|
||||
|
||||
Paperclip server + PostgreSQL 17. The database is health-checked before the server starts.
|
||||
|
||||
```sh
|
||||
BETTER_AUTH_SECRET=$(openssl rand -hex 32) \
|
||||
docker compose -f docker/docker-compose.yml up --build
|
||||
```
|
||||
|
||||
PostgreSQL data persists in a named Docker volume (`pgdata`). Paperclip data persists in `paperclip-data`.
|
||||
|
||||
### Untrusted PR review
|
||||
|
||||
Isolated container for reviewing untrusted pull requests with Codex or Claude, without exposing your host machine. See `doc/UNTRUSTED-PR-REVIEW.md` for the full workflow.
|
||||
|
||||
```sh
|
||||
docker compose -f docker/docker-compose.untrusted-review.yml build
|
||||
docker compose -f docker/docker-compose.untrusted-review.yml run --rm --service-ports review
|
||||
```
|
||||
|
||||
## Authenticated Compose (Single Public URL)
|
||||
|
||||
For authenticated deployments, set one canonical public URL and let Paperclip derive auth/callback defaults:
|
||||
@@ -93,11 +146,71 @@ Notes:
|
||||
- Without API keys, the app still runs normally.
|
||||
- Adapter environment checks in Paperclip will surface missing auth/CLI prerequisites.
|
||||
|
||||
## Untrusted PR Review Container
|
||||
## Podman Quadlet (systemd)
|
||||
|
||||
If you want a separate Docker environment for reviewing untrusted pull requests with `codex` or `claude`, use the dedicated review workflow in `doc/UNTRUSTED-PR-REVIEW.md`.
|
||||
The `docker/quadlet/` directory contains unit files to run Paperclip + PostgreSQL as systemd services via Podman Quadlet.
|
||||
|
||||
That setup keeps CLI auth state in Docker volumes instead of your host home directory and uses a separate scratch workspace for PR checkouts and preview runs.
|
||||
| File | Purpose |
|
||||
|------|---------|
|
||||
| `docker/quadlet/paperclip.pod` | Pod definition — groups containers into a shared network namespace |
|
||||
| `docker/quadlet/paperclip.container` | Paperclip server — joins the pod, connects to Postgres at `127.0.0.1` |
|
||||
| `docker/quadlet/paperclip-db.container` | PostgreSQL 17 — joins the pod, health-checked |
|
||||
|
||||
### Setup
|
||||
|
||||
1. Build the image (see above).
|
||||
|
||||
2. Copy quadlet files to your systemd directory:
|
||||
|
||||
```sh
|
||||
# Rootless (recommended)
|
||||
cp docker/quadlet/*.pod docker/quadlet/*.container \
|
||||
~/.config/containers/systemd/
|
||||
|
||||
# Or rootful
|
||||
sudo cp docker/quadlet/*.pod docker/quadlet/*.container \
|
||||
/etc/containers/systemd/
|
||||
```
|
||||
|
||||
3. Create a secrets env file (keep out of version control):
|
||||
|
||||
```sh
|
||||
cat > ~/.config/containers/systemd/paperclip.env <<EOL
|
||||
BETTER_AUTH_SECRET=$(openssl rand -hex 32)
|
||||
POSTGRES_USER=paperclip
|
||||
POSTGRES_PASSWORD=paperclip
|
||||
POSTGRES_DB=paperclip
|
||||
DATABASE_URL=postgres://paperclip:paperclip@127.0.0.1:5432/paperclip
|
||||
# OPENAI_API_KEY=sk-...
|
||||
# ANTHROPIC_API_KEY=sk-...
|
||||
EOL
|
||||
```
|
||||
|
||||
4. Create the data directory and start:
|
||||
|
||||
```sh
|
||||
mkdir -p ~/.local/share/paperclip
|
||||
systemctl --user daemon-reload
|
||||
systemctl --user start paperclip-pod
|
||||
```
|
||||
|
||||
### Quadlet management
|
||||
|
||||
```sh
|
||||
journalctl --user -u paperclip -f # App logs
|
||||
journalctl --user -u paperclip-db -f # DB logs
|
||||
systemctl --user status paperclip-pod # Pod status
|
||||
systemctl --user restart paperclip-pod # Restart all
|
||||
systemctl --user stop paperclip-pod # Stop all
|
||||
```
|
||||
|
||||
### Quadlet notes
|
||||
|
||||
- **First boot**: Unlike Docker Compose's `condition: service_healthy`, Quadlet's `After=` only waits for the DB unit to *start*, not for PostgreSQL to be ready. On a cold first boot you may see one or two restart attempts in `journalctl --user -u paperclip` while PostgreSQL initialises — this is expected and resolves automatically via `Restart=on-failure`.
|
||||
- Containers in a pod share `localhost`, so Paperclip reaches Postgres at `127.0.0.1:5432`.
|
||||
- PostgreSQL data persists in the `paperclip-pgdata` named volume.
|
||||
- Paperclip data persists at `~/.local/share/paperclip`.
|
||||
- For rootful quadlet deployment, remove `%h` prefixes and use absolute paths.
|
||||
|
||||
## Onboard Smoke Test (Ubuntu + npm only)
|
||||
|
||||
@@ -133,4 +246,9 @@ Notes:
|
||||
- In authenticated mode, the smoke script defaults `SMOKE_AUTO_BOOTSTRAP=true` and drives the real bootstrap path automatically: it signs up a real user, runs `paperclipai auth bootstrap-ceo` inside the container to mint a real bootstrap invite, accepts that invite over HTTP, and verifies board session access.
|
||||
- Run the script in the foreground to watch the onboarding flow; stop with `Ctrl+C` after validation.
|
||||
- Set `SMOKE_DETACH=true` to leave the container running for automation and optionally write shell-ready metadata to `SMOKE_METADATA_FILE`.
|
||||
- The image definition is in `Dockerfile.onboard-smoke`.
|
||||
- The image definition is in `docker/Dockerfile.onboard-smoke`.
|
||||
|
||||
## General Notes
|
||||
|
||||
- The `docker-entrypoint.sh` adjusts the container `node` user UID/GID at startup to match the values passed via `USER_UID`/`USER_GID`, avoiding permission issues on bind-mounted volumes.
|
||||
- Paperclip data persists via Docker volumes/bind mounts (compose) or at `~/.local/share/paperclip` (quadlet).
|
||||
|
||||
@@ -51,10 +51,9 @@ Public packages are discovered from:
|
||||
|
||||
- `packages/`
|
||||
- `server/`
|
||||
- `ui/`
|
||||
- `cli/`
|
||||
|
||||
`ui/` is ignored because it is private.
|
||||
|
||||
The version rewrite step now uses [`scripts/release-package-map.mjs`](../scripts/release-package-map.mjs), which:
|
||||
|
||||
- finds all public packages
|
||||
@@ -65,6 +64,57 @@ The version rewrite step now uses [`scripts/release-package-map.mjs`](../scripts
|
||||
|
||||
Those rewrites are temporary. The working tree is restored after publish or dry-run.
|
||||
|
||||
## `@paperclipai/ui` packaging
|
||||
|
||||
The UI package publishes prebuilt static assets, not the source workspace.
|
||||
|
||||
The `ui` package uses [`scripts/generate-ui-package-json.mjs`](../scripts/generate-ui-package-json.mjs) during `prepack` to swap in a lean publish manifest that:
|
||||
|
||||
- keeps the release-managed `name` and `version`
|
||||
- publishes only `dist/`
|
||||
- omits the source-only dependency graph from downstream installs
|
||||
|
||||
After packing or publishing, `postpack` restores the development manifest automatically.
|
||||
|
||||
### Manual first publish for `@paperclipai/ui`
|
||||
|
||||
If you need to publish only the UI package once by hand, use the real package name:
|
||||
|
||||
- `@paperclipai/ui`
|
||||
|
||||
Recommended flow from the repo root:
|
||||
|
||||
```bash
|
||||
# optional sanity check: this 404s until the first publish exists
|
||||
npm view @paperclipai/ui version
|
||||
|
||||
# make sure the dist payload is fresh
|
||||
pnpm --filter @paperclipai/ui build
|
||||
|
||||
# confirm your local npm auth before the real publish
|
||||
npm whoami
|
||||
|
||||
# safe preview of the exact publish payload
|
||||
cd ui
|
||||
pnpm publish --dry-run --no-git-checks --access public
|
||||
|
||||
# real publish
|
||||
pnpm publish --no-git-checks --access public
|
||||
```
|
||||
|
||||
Notes:
|
||||
|
||||
- Publish from `ui/`, not the repo root.
|
||||
- `prepack` automatically rewrites `ui/package.json` to the lean publish manifest, and `postpack` restores the dev manifest after the command finishes.
|
||||
- If `npm view @paperclipai/ui version` already returns the same version that is in [`ui/package.json`](../ui/package.json), do not republish. Bump the version or use the normal repo-wide release flow in [`scripts/release.sh`](../scripts/release.sh).
|
||||
|
||||
If the first real publish returns npm `E404`, check npm-side prerequisites before retrying:
|
||||
|
||||
- `npm whoami` must succeed first. An expired or missing npm login will block the publish.
|
||||
- For an organization-scoped package like `@paperclipai/ui`, the `paperclipai` npm organization must exist and the publisher must be a member with permission to publish to that scope.
|
||||
- The initial publish must include `--access public` for a public scoped package.
|
||||
- npm also requires either account 2FA for publishing or a granular token that is allowed to bypass 2FA.
|
||||
|
||||
## Version formats
|
||||
|
||||
Paperclip uses calendar versions:
|
||||
@@ -135,6 +185,7 @@ This is the fastest way to restore the default install path if a stable release
|
||||
|
||||
- [`scripts/build-npm.sh`](../scripts/build-npm.sh)
|
||||
- [`scripts/generate-npm-package-json.mjs`](../scripts/generate-npm-package-json.mjs)
|
||||
- [`scripts/generate-ui-package-json.mjs`](../scripts/generate-ui-package-json.mjs)
|
||||
- [`scripts/release-package-map.mjs`](../scripts/release-package-map.mjs)
|
||||
- [`cli/esbuild.config.mjs`](../cli/esbuild.config.mjs)
|
||||
- [`doc/RELEASING.md`](RELEASING.md)
|
||||
|
||||
@@ -35,6 +35,7 @@ At minimum that includes:
|
||||
|
||||
- `paperclipai`
|
||||
- `@paperclipai/server`
|
||||
- `@paperclipai/ui`
|
||||
- public packages under `packages/`
|
||||
|
||||
### 2.1. In npm, open each package settings page
|
||||
|
||||
33
doc/SPEC.md
33
doc/SPEC.md
@@ -186,17 +186,21 @@ The heartbeat is a protocol, not a runtime. Paperclip defines how to initiate an
|
||||
|
||||
### Execution Adapters
|
||||
|
||||
Agent configuration includes an **adapter** that defines how Paperclip invokes the agent. Initial adapters:
|
||||
Agent configuration includes an **adapter** that defines how Paperclip invokes the agent. Built-in adapters include:
|
||||
|
||||
| Adapter | Mechanism | Example |
|
||||
| -------------------- | ----------------------- | --------------------------------------------- |
|
||||
| `process` | Execute a child process | `python run_agent.py --agent-id {id}` |
|
||||
| `http` | Send an HTTP request | `POST https://openclaw.example.com/hook/{id}` |
|
||||
| `openclaw_gateway` | OpenClaw gateway API | Managed OpenClaw agent via gateway |
|
||||
| `gemini_local` | Gemini CLI process | Local Gemini CLI with sandbox and approval |
|
||||
| `hermes_local` | Hermes agent process | Local Hermes agent |
|
||||
| Adapter | Mechanism | Example |
|
||||
| ---------------- | -------------------------- | -------------------------------------------------- |
|
||||
| `process` | Execute a child process | `python run_agent.py --agent-id {id}` |
|
||||
| `http` | Send an HTTP request | `POST https://openclaw.example.com/hook/{id}` |
|
||||
| `claude_local` | Local Claude Code process | Claude Code heartbeat worker |
|
||||
| `codex_local` | Local Codex process | Codex CLI heartbeat worker |
|
||||
| `opencode_local` | Local OpenCode process | OpenCode heartbeat worker |
|
||||
| `pi_local` | Local Pi process | Pi CLI heartbeat worker |
|
||||
| `cursor` | Cursor API/CLI bridge | Cursor-integrated heartbeat worker |
|
||||
| `openclaw_gateway` | OpenClaw gateway API | Managed OpenClaw agent via gateway |
|
||||
| `hermes_local` | Local Hermes process | Hermes agent heartbeat worker |
|
||||
|
||||
The `process` and `http` adapters ship as defaults. Additional adapters have been added for specific agent runtimes (see list above), and new adapter types can be registered via the plugin system (see Plugin / Extension Architecture).
|
||||
The `process` and `http` adapters ship as generic defaults. Additional built-in adapters cover common local coding runtimes (see list above), and new adapter types can be registered via the plugin system (see Plugin / Extension Architecture).
|
||||
|
||||
### Adapter Interface
|
||||
|
||||
@@ -376,7 +380,7 @@ Flow:
|
||||
| Layer | Technology |
|
||||
| -------- | ------------------------------------------------------------ |
|
||||
| Frontend | React + Vite |
|
||||
| Backend | TypeScript + Hono (REST API, not tRPC — need non-TS clients) |
|
||||
| Backend | TypeScript + Express (REST API, not tRPC — need non-TS clients) |
|
||||
| Database | PostgreSQL (see [doc/DATABASE.md](./doc/DATABASE.md) for details — PGlite embedded for dev, Docker or hosted Supabase for production) |
|
||||
| Auth | [Better Auth](https://www.better-auth.com/) |
|
||||
|
||||
@@ -406,7 +410,7 @@ No separate "agent API" vs. "board API." Same endpoints, different authorization
|
||||
|
||||
### Work Artifacts
|
||||
|
||||
Paperclip does **not** manage work artifacts (code repos, file systems, deployments, documents). That's entirely the agent's domain. Paperclip tracks tasks and costs. Where and how work gets done is outside scope.
|
||||
Paperclip manages task-linked work artifacts: issue documents (rich-text plans, specs, notes attached to issues) and file attachments. Agents read and write these through the API as part of normal task execution. Full delivery infrastructure (code repos, deployments, production runtime) remains the agent's domain — Paperclip orchestrates the work, not the build pipeline.
|
||||
|
||||
### Open Questions
|
||||
|
||||
@@ -476,15 +480,14 @@ Each is a distinct page/route:
|
||||
- [ ] **Default agent** — basic Claude Code/Codex loop with Paperclip skill
|
||||
- [ ] **Default CEO** — strategic planning, delegation, board communication
|
||||
- [ ] **Paperclip skill (SKILL.md)** — teaches agents to interact with the API
|
||||
- [ ] **REST API** — full API for agent interaction (Hono)
|
||||
- [ ] **REST API** — full API for agent interaction (Express)
|
||||
- [ ] **Web UI** — React/Vite: org chart, task board, dashboard, cost views
|
||||
- [ ] **Agent auth** — connection string generation with URL + key + instructions
|
||||
- [ ] **One-command dev setup** — embedded PGlite, everything local
|
||||
- [ ] **Multiple Adapter types** (HTTP Adapter, OpenClaw Adapter)
|
||||
- [ ] **Multiple Adapter types** (HTTP, OpenClaw gateway, and local coding adapters)
|
||||
|
||||
### Not V1
|
||||
|
||||
- Template export/import
|
||||
- Knowledge base - a future plugin
|
||||
- Advanced governance models (hiring budgets, multi-member boards)
|
||||
- Revenue/expense tracking beyond token costs - a future plugin
|
||||
@@ -509,7 +512,7 @@ Things Paperclip explicitly does **not** do:
|
||||
- **Not a SaaS** — single-tenant, self-hosted
|
||||
- **Not opinionated about Agent implementation** — any language, any framework, any runtime
|
||||
- **Not automatically self-healing** — surfaces problems, doesn't silently fix them
|
||||
- **Does not manage work artifacts** — no repo management, no deployment, no file systems
|
||||
- **Does not manage delivery infrastructure** — no repo management, no deployment, no file systems (but does manage task-linked documents and attachments)
|
||||
- **Does not auto-reassign work** — stale tasks are surfaced, not silently redistributed
|
||||
- **Does not track external revenue/expenses** — that's a future plugin. Token/LLM cost budgeting is core.
|
||||
|
||||
|
||||
@@ -16,14 +16,14 @@ By default this workflow does **not** mount your host repo checkout, your host h
|
||||
## Files
|
||||
|
||||
- `docker/untrusted-review/Dockerfile`
|
||||
- `docker-compose.untrusted-review.yml`
|
||||
- `docker/docker-compose.untrusted-review.yml`
|
||||
- `review-checkout-pr` inside the container
|
||||
|
||||
## Build and start a shell
|
||||
|
||||
```sh
|
||||
docker compose -f docker-compose.untrusted-review.yml build
|
||||
docker compose -f docker-compose.untrusted-review.yml run --rm --service-ports review
|
||||
docker compose -f docker/docker-compose.untrusted-review.yml build
|
||||
docker compose -f docker/docker-compose.untrusted-review.yml run --rm --service-ports review
|
||||
```
|
||||
|
||||
That opens an interactive shell in the review container with:
|
||||
@@ -47,7 +47,7 @@ claude login
|
||||
If you prefer API-key auth instead of CLI login, pass keys through Compose env:
|
||||
|
||||
```sh
|
||||
OPENAI_API_KEY=... ANTHROPIC_API_KEY=... docker compose -f docker-compose.untrusted-review.yml run --rm review
|
||||
OPENAI_API_KEY=... ANTHROPIC_API_KEY=... docker compose -f docker/docker-compose.untrusted-review.yml run --rm review
|
||||
```
|
||||
|
||||
## Check out a PR safely
|
||||
@@ -117,7 +117,7 @@ Notes:
|
||||
Remove the review container volumes when you want a clean environment:
|
||||
|
||||
```sh
|
||||
docker compose -f docker-compose.untrusted-review.yml down -v
|
||||
docker compose -f docker/docker-compose.untrusted-review.yml down -v
|
||||
```
|
||||
|
||||
That deletes:
|
||||
|
||||
@@ -249,7 +249,7 @@ Runs local `claude` CLI directly.
|
||||
"cwd": "/absolute/or/relative/path",
|
||||
"promptTemplate": "You are agent {{agent.id}} ...",
|
||||
"model": "optional-model-id",
|
||||
"maxTurnsPerRun": 300,
|
||||
"maxTurnsPerRun": 1000,
|
||||
"dangerouslySkipPermissions": true,
|
||||
"env": {"KEY": "VALUE"},
|
||||
"extraArgs": [],
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
services:
|
||||
paperclip:
|
||||
build:
|
||||
context: .
|
||||
context: ..
|
||||
dockerfile: Dockerfile
|
||||
ports:
|
||||
- "${PAPERCLIP_PORT:-3100}:3100"
|
||||
@@ -15,4 +15,4 @@ services:
|
||||
PAPERCLIP_PUBLIC_URL: "${PAPERCLIP_PUBLIC_URL:-http://localhost:3100}"
|
||||
BETTER_AUTH_SECRET: "${BETTER_AUTH_SECRET:?BETTER_AUTH_SECRET must be set}"
|
||||
volumes:
|
||||
- "${PAPERCLIP_DATA_DIR:-./data/docker-paperclip}:/paperclip"
|
||||
- "${PAPERCLIP_DATA_DIR:-../data/docker-paperclip}:/paperclip"
|
||||
@@ -1,7 +1,7 @@
|
||||
services:
|
||||
review:
|
||||
build:
|
||||
context: .
|
||||
context: ..
|
||||
dockerfile: docker/untrusted-review/Dockerfile
|
||||
init: true
|
||||
tty: true
|
||||
@@ -16,7 +16,9 @@ services:
|
||||
- pgdata:/var/lib/postgresql/data
|
||||
|
||||
server:
|
||||
build: .
|
||||
build:
|
||||
context: ..
|
||||
dockerfile: Dockerfile
|
||||
ports:
|
||||
- "3100:3100"
|
||||
environment:
|
||||
20
docker/quadlet/paperclip-db.container
Normal file
20
docker/quadlet/paperclip-db.container
Normal file
@@ -0,0 +1,20 @@
|
||||
[Unit]
|
||||
Description=PostgreSQL for Paperclip
|
||||
|
||||
[Container]
|
||||
Image=docker.io/library/postgres:17-alpine
|
||||
ContainerName=paperclip-db
|
||||
Pod=paperclip.pod
|
||||
Volume=paperclip-pgdata:/var/lib/postgresql/data
|
||||
EnvironmentFile=%h/.config/containers/systemd/paperclip.env
|
||||
HealthCmd=pg_isready -U $POSTGRES_USER -d $POSTGRES_DB -h localhost || exit 1
|
||||
HealthInterval=15s
|
||||
HealthTimeout=5s
|
||||
HealthRetries=5
|
||||
|
||||
[Service]
|
||||
Restart=on-failure
|
||||
TimeoutStartSec=60
|
||||
|
||||
[Install]
|
||||
WantedBy=default.target
|
||||
23
docker/quadlet/paperclip.container
Normal file
23
docker/quadlet/paperclip.container
Normal file
@@ -0,0 +1,23 @@
|
||||
[Unit]
|
||||
Description=Paperclip AI Agent Orchestrator
|
||||
Requires=paperclip-db.service
|
||||
After=paperclip-db.service
|
||||
|
||||
[Container]
|
||||
Image=paperclip-local
|
||||
ContainerName=paperclip
|
||||
Pod=paperclip.pod
|
||||
Volume=%h/.local/share/paperclip:/paperclip:Z
|
||||
Environment=HOST=0.0.0.0
|
||||
Environment=PAPERCLIP_HOME=/paperclip
|
||||
Environment=PAPERCLIP_DEPLOYMENT_MODE=authenticated
|
||||
Environment=PAPERCLIP_DEPLOYMENT_EXPOSURE=private
|
||||
Environment=PAPERCLIP_PUBLIC_URL=http://localhost:3100
|
||||
EnvironmentFile=%h/.config/containers/systemd/paperclip.env
|
||||
|
||||
[Service]
|
||||
Restart=on-failure
|
||||
TimeoutStartSec=120
|
||||
|
||||
[Install]
|
||||
WantedBy=default.target
|
||||
3
docker/quadlet/paperclip.pod
Normal file
3
docker/quadlet/paperclip.pod
Normal file
@@ -0,0 +1,3 @@
|
||||
[Pod]
|
||||
PodName=paperclip
|
||||
PublishPort=3100:3100
|
||||
@@ -20,7 +20,7 @@ The `claude_local` adapter runs Anthropic's Claude Code CLI locally. It supports
|
||||
| `env` | object | No | Environment variables (supports secret refs) |
|
||||
| `timeoutSec` | number | No | Process timeout (0 = no timeout) |
|
||||
| `graceSec` | number | No | Grace period before force-kill |
|
||||
| `maxTurnsPerRun` | number | No | Max agentic turns per heartbeat (defaults to `300`) |
|
||||
| `maxTurnsPerRun` | number | No | Max agentic turns per heartbeat (defaults to `1000`) |
|
||||
| `dangerouslySkipPermissions` | boolean | No | Skip permission prompts (dev only) |
|
||||
|
||||
## Prompt Templates
|
||||
|
||||
@@ -20,9 +20,12 @@ When a heartbeat fires, Paperclip:
|
||||
|---------|----------|-------------|
|
||||
| [Claude Local](/adapters/claude-local) | `claude_local` | Runs Claude Code CLI locally |
|
||||
| [Codex Local](/adapters/codex-local) | `codex_local` | Runs OpenAI Codex CLI locally |
|
||||
| [Gemini Local](/adapters/gemini-local) | `gemini_local` | Runs Gemini CLI locally |
|
||||
| [Gemini Local](/adapters/gemini-local) | `gemini_local` | Runs Gemini CLI locally (experimental — adapter package exists, not yet in stable type enum) |
|
||||
| OpenCode Local | `opencode_local` | Runs OpenCode CLI locally (multi-provider `provider/model`) |
|
||||
| OpenClaw | `openclaw` | Sends wake payloads to an OpenClaw webhook |
|
||||
| Hermes Local | `hermes_local` | Runs Hermes CLI locally |
|
||||
| Cursor | `cursor` | Runs Cursor in background mode |
|
||||
| Pi Local | `pi_local` | Runs an embedded Pi agent locally |
|
||||
| OpenClaw Gateway | `openclaw_gateway` | Connects to an OpenClaw gateway endpoint |
|
||||
| [Process](/adapters/process) | `process` | Executes arbitrary shell commands |
|
||||
| [HTTP](/adapters/http) | `http` | Sends webhooks to external agents |
|
||||
|
||||
@@ -55,7 +58,7 @@ Three registries consume these modules:
|
||||
|
||||
## Choosing an Adapter
|
||||
|
||||
- **Need a coding agent?** Use `claude_local`, `codex_local`, `gemini_local`, or `opencode_local`
|
||||
- **Need a coding agent?** Use `claude_local`, `codex_local`, `opencode_local`, or `hermes_local`
|
||||
- **Need to run a script or command?** Use `process`
|
||||
- **Need to call an external service?** Use `http`
|
||||
- **Need something custom?** [Create your own adapter](/adapters/creating-an-adapter)
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
# Agent Runtime Guide
|
||||
|
||||
Status: User-facing guide
|
||||
Last updated: 2026-02-17
|
||||
Status: User-facing guide
|
||||
Last updated: 2026-03-26
|
||||
Audience: Operators setting up and running agents in Paperclip
|
||||
|
||||
## 1. What this system does
|
||||
@@ -32,14 +32,19 @@ If an agent is already running, new wakeups are merged (coalesced) instead of la
|
||||
|
||||
## 3.1 Adapter choice
|
||||
|
||||
Common choices:
|
||||
Built-in adapters:
|
||||
|
||||
- `claude_local`: runs your local `claude` CLI
|
||||
- `codex_local`: runs your local `codex` CLI
|
||||
- `opencode_local`: runs your local `opencode` CLI
|
||||
- `hermes_local`: runs your local `hermes` CLI
|
||||
- `cursor`: runs Cursor in background mode
|
||||
- `pi_local`: runs an embedded Pi agent locally
|
||||
- `openclaw_gateway`: connects to an OpenClaw gateway endpoint
|
||||
- `process`: generic shell command adapter
|
||||
- `http`: calls an external HTTP endpoint
|
||||
|
||||
For `claude_local` and `codex_local`, Paperclip assumes the CLI is already installed and authenticated on the host machine.
|
||||
For local CLI adapters (`claude_local`, `codex_local`, `opencode_local`, `hermes_local`), Paperclip assumes the CLI is already installed and authenticated on the host machine.
|
||||
|
||||
## 3.2 Runtime behavior
|
||||
|
||||
@@ -69,6 +74,8 @@ You can set:
|
||||
|
||||
Templates support variables like `{{agent.id}}`, `{{agent.name}}`, and run context values.
|
||||
|
||||
> **Note:** `bootstrapPromptTemplate` is deprecated and should not be used for new agents. Existing configs that use it will continue to work but should be migrated to the managed instructions bundle system.
|
||||
|
||||
## 4. Session resume behavior
|
||||
|
||||
Paperclip stores session IDs for resumable adapters.
|
||||
@@ -133,7 +140,7 @@ If the connection drops, the UI reconnects automatically.
|
||||
|
||||
If runs fail repeatedly:
|
||||
|
||||
1. Check adapter command availability (`claude`/`codex` installed and logged in).
|
||||
1. Check adapter command availability (e.g. `claude`/`codex`/`opencode`/`hermes` installed and logged in).
|
||||
2. Verify `cwd` exists and is accessible.
|
||||
3. Inspect run error + stderr excerpt, then full log.
|
||||
4. Confirm timeout is not too low.
|
||||
@@ -166,9 +173,9 @@ Start with least privilege where possible, and avoid exposing secrets in broad r
|
||||
|
||||
## 10. Minimal setup checklist
|
||||
|
||||
1. Choose adapter (`claude_local` or `codex_local`).
|
||||
2. Set `cwd` to the target workspace.
|
||||
3. Add bootstrap + normal prompt templates.
|
||||
1. Choose adapter (e.g. `claude_local`, `codex_local`, `opencode_local`, `hermes_local`, `cursor`, or `openclaw_gateway`).
|
||||
2. Set `cwd` to the target workspace (for local adapters).
|
||||
3. Optionally add a prompt template (`promptTemplate`) or use the managed instructions bundle.
|
||||
4. Configure heartbeat policy (timer and/or assignment wakeups).
|
||||
5. Trigger a manual wakeup.
|
||||
6. Confirm run succeeds and session/token usage is recorded.
|
||||
|
||||
@@ -33,6 +33,8 @@ Interactive first-time setup:
|
||||
pnpm paperclipai onboard
|
||||
```
|
||||
|
||||
If Paperclip is already configured, rerunning `onboard` keeps the existing config in place. Use `paperclipai configure` to change settings on an existing install.
|
||||
|
||||
First prompt:
|
||||
|
||||
1. `Quickstart` (recommended): local defaults (embedded database, no LLM provider, local disk storage, default secrets)
|
||||
@@ -50,6 +52,8 @@ Non-interactive defaults + immediate start (opens browser on server listen):
|
||||
pnpm paperclipai onboard --yes
|
||||
```
|
||||
|
||||
On an existing install, `--yes` now preserves the current config and just starts Paperclip with that setup.
|
||||
|
||||
## `paperclipai doctor`
|
||||
|
||||
Health checks with optional auto-repair:
|
||||
|
||||
@@ -8,7 +8,7 @@ Run Paperclip in Docker without installing Node or pnpm locally.
|
||||
## Compose Quickstart (Recommended)
|
||||
|
||||
```sh
|
||||
docker compose -f docker-compose.quickstart.yml up --build
|
||||
docker compose -f docker/docker-compose.quickstart.yml up --build
|
||||
```
|
||||
|
||||
Open [http://localhost:3100](http://localhost:3100).
|
||||
@@ -21,10 +21,12 @@ Defaults:
|
||||
Override with environment variables:
|
||||
|
||||
```sh
|
||||
PAPERCLIP_PORT=3200 PAPERCLIP_DATA_DIR=./data/pc \
|
||||
docker compose -f docker-compose.quickstart.yml up --build
|
||||
PAPERCLIP_PORT=3200 PAPERCLIP_DATA_DIR=../data/pc \
|
||||
docker compose -f docker/docker-compose.quickstart.yml up --build
|
||||
```
|
||||
|
||||
**Note:** `PAPERCLIP_DATA_DIR` is resolved relative to the compose file (`docker/`), so `../data/pc` maps to `data/pc` in the project root.
|
||||
|
||||
## Manual Docker Build
|
||||
|
||||
```sh
|
||||
|
||||
@@ -46,6 +46,8 @@
|
||||
"guides/board-operator/managing-agents",
|
||||
"guides/board-operator/org-structure",
|
||||
"guides/board-operator/managing-tasks",
|
||||
"guides/board-operator/execution-workspaces-and-runtime-services",
|
||||
"guides/board-operator/delegation",
|
||||
"guides/board-operator/approvals",
|
||||
"guides/board-operator/costs-and-budgets",
|
||||
"guides/board-operator/activity-log",
|
||||
|
||||
189
docs/feedback-voting.md
Normal file
189
docs/feedback-voting.md
Normal file
@@ -0,0 +1,189 @@
|
||||
# Feedback Voting — Local Data Guide
|
||||
|
||||
When you rate an agent's response with **Helpful** (thumbs up) or **Needs work** (thumbs down), Paperclip saves your vote locally alongside your running instance. This guide covers what gets stored, how to access it, and how to export it.
|
||||
|
||||
## How voting works
|
||||
|
||||
1. Click **Helpful** or **Needs work** on any agent comment or document revision.
|
||||
2. If you click **Needs work**, an optional text prompt appears: _"What could have been better?"_ You can type a reason or dismiss it.
|
||||
3. A consent dialog asks whether to keep the vote local or share it. Your choice is remembered for future votes.
|
||||
|
||||
### What gets stored
|
||||
|
||||
Each vote creates two local records:
|
||||
|
||||
| Record | What it contains |
|
||||
|--------|-----------------|
|
||||
| **Vote** | Your vote (up/down), optional reason text, sharing preference, consent version, timestamp |
|
||||
| **Trace bundle** | Full context snapshot: the voted-on comment/revision text, issue title, agent info, your vote, and reason — everything needed to understand the feedback in isolation |
|
||||
|
||||
All data lives in your local Paperclip database. Nothing leaves your machine unless you explicitly choose to share.
|
||||
|
||||
When a vote is marked for sharing, Paperclip also queues the trace bundle for background export through the Telemetry Backend. The app server never uploads raw feedback trace bundles directly to object storage.
|
||||
|
||||
## Viewing your votes
|
||||
|
||||
### Quick report (terminal)
|
||||
|
||||
```bash
|
||||
pnpm paperclipai feedback report
|
||||
```
|
||||
|
||||
Shows a color-coded summary: vote counts, per-trace details with reasons, and export statuses.
|
||||
|
||||
```bash
|
||||
# Installed CLI
|
||||
paperclipai feedback report
|
||||
|
||||
# Point to a different server or company
|
||||
pnpm paperclipai feedback report --api-base http://127.0.0.1:3000 --company-id <company-id>
|
||||
|
||||
# Include raw payload dumps in the report
|
||||
pnpm paperclipai feedback report --payloads
|
||||
```
|
||||
|
||||
### API endpoints
|
||||
|
||||
All endpoints require board-user access (automatic in local dev).
|
||||
|
||||
**List votes for an issue:**
|
||||
```bash
|
||||
curl http://127.0.0.1:3102/api/issues/<issueId>/feedback-votes
|
||||
```
|
||||
|
||||
**List trace bundles for an issue (with full payloads):**
|
||||
```bash
|
||||
curl 'http://127.0.0.1:3102/api/issues/<issueId>/feedback-traces?includePayload=true'
|
||||
```
|
||||
|
||||
**List all traces company-wide:**
|
||||
```bash
|
||||
curl 'http://127.0.0.1:3102/api/companies/<companyId>/feedback-traces?includePayload=true'
|
||||
```
|
||||
|
||||
**Get a single trace envelope record:**
|
||||
```bash
|
||||
curl http://127.0.0.1:3102/api/feedback-traces/<traceId>
|
||||
```
|
||||
|
||||
**Get the full export bundle for a trace:**
|
||||
```bash
|
||||
curl http://127.0.0.1:3102/api/feedback-traces/<traceId>/bundle
|
||||
```
|
||||
|
||||
#### Filtering
|
||||
|
||||
The trace endpoints accept query parameters:
|
||||
|
||||
| Parameter | Values | Description |
|
||||
|-----------|--------|-------------|
|
||||
| `vote` | `up`, `down` | Filter by vote direction |
|
||||
| `status` | `local_only`, `pending`, `sent`, `failed` | Filter by export status |
|
||||
| `targetType` | `issue_comment`, `issue_document_revision` | Filter by what was voted on |
|
||||
| `sharedOnly` | `true` | Only show votes the user chose to share |
|
||||
| `includePayload` | `true` | Include the full context snapshot |
|
||||
| `from` / `to` | ISO date | Date range filter |
|
||||
|
||||
## Exporting your data
|
||||
|
||||
### Export to files + zip
|
||||
|
||||
```bash
|
||||
pnpm paperclipai feedback export
|
||||
```
|
||||
|
||||
Creates a timestamped directory with:
|
||||
|
||||
```
|
||||
feedback-export-20260331T120000Z/
|
||||
index.json # manifest with summary stats
|
||||
votes/
|
||||
PAP-123-a1b2c3d4.json # vote metadata (one per vote)
|
||||
traces/
|
||||
PAP-123-e5f6g7h8.json # Paperclip feedback envelope (one per trace)
|
||||
full-traces/
|
||||
PAP-123-e5f6g7h8/
|
||||
bundle.json # full export manifest for the trace
|
||||
...raw adapter files # codex / claude / opencode session artifacts when available
|
||||
feedback-export-20260331T120000Z.zip
|
||||
```
|
||||
|
||||
Exports are full by default. `traces/` keeps the Paperclip envelope, while `full-traces/` contains the richer per-trace bundle plus any recoverable adapter-native files.
|
||||
|
||||
```bash
|
||||
# Custom server and output directory
|
||||
pnpm paperclipai feedback export --api-base http://127.0.0.1:3000 --company-id <company-id> --out ./my-export
|
||||
```
|
||||
|
||||
### Reading an exported trace
|
||||
|
||||
Open any file in `traces/` to see:
|
||||
|
||||
```json
|
||||
{
|
||||
"id": "trace-uuid",
|
||||
"vote": "down",
|
||||
"issueIdentifier": "PAP-123",
|
||||
"issueTitle": "Fix login timeout",
|
||||
"targetType": "issue_comment",
|
||||
"targetSummary": {
|
||||
"label": "Comment",
|
||||
"excerpt": "The first 80 chars of the comment that was voted on..."
|
||||
},
|
||||
"payloadSnapshot": {
|
||||
"vote": {
|
||||
"value": "down",
|
||||
"reason": "Did not address the root cause"
|
||||
},
|
||||
"target": {
|
||||
"body": "Full text of the agent comment..."
|
||||
},
|
||||
"issue": {
|
||||
"identifier": "PAP-123",
|
||||
"title": "Fix login timeout"
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
Open `full-traces/<issue>-<trace>/bundle.json` to see the expanded export metadata, including capture notes, adapter type, integrity metadata, and the inventory of raw files written alongside it.
|
||||
|
||||
Built-in local adapters now export their native session artifacts more directly:
|
||||
|
||||
- `codex_local`: `adapter/codex/session.jsonl`
|
||||
- `claude_local`: `adapter/claude/session.jsonl`, plus any `adapter/claude/session/...` sidecar files and `adapter/claude/debug.txt` when present
|
||||
- `opencode_local`: `adapter/opencode/session.json`, `adapter/opencode/messages/*.json`, and `adapter/opencode/parts/<messageId>/*.json`, with optional `project.json`, `todo.json`, and `session-diff.json`
|
||||
|
||||
## Sharing preferences
|
||||
|
||||
The first time you vote, a consent dialog asks:
|
||||
|
||||
- **Keep local** — vote is stored locally only (`sharedWithLabs: false`)
|
||||
- **Share this vote** — vote is marked for sharing (`sharedWithLabs: true`)
|
||||
|
||||
Your preference is saved per-company. You can change it any time via the feedback settings. Votes marked "keep local" are never queued for export.
|
||||
|
||||
## Data lifecycle
|
||||
|
||||
| Status | Meaning |
|
||||
|--------|---------|
|
||||
| `local_only` | Vote stored locally, not marked for sharing |
|
||||
| `pending` | Marked for sharing, waiting to be sent |
|
||||
| `sent` | Successfully transmitted |
|
||||
| `failed` | Transmission attempted but failed (will retry) |
|
||||
|
||||
Your local database always retains the full vote and trace data regardless of sharing status.
|
||||
|
||||
## Remote sync
|
||||
|
||||
Votes you choose to share are queued as `pending` traces and flushed by the server's background worker to the Telemetry Backend. The Telemetry Backend validates the request, then persists the bundle into its configured object storage.
|
||||
|
||||
- App server responsibility: build the bundle, POST it to Telemetry Backend, update trace status
|
||||
- Telemetry Backend responsibility: authenticate the request, validate payload shape, compress/store the bundle, return the final object key
|
||||
- Retry behavior: failed uploads move to `failed` with an error message in `failureReason`, and the worker retries them on later ticks
|
||||
|
||||
Exported objects use a deterministic key pattern so they are easy to inspect:
|
||||
|
||||
```text
|
||||
feedback-traces/<companyId>/YYYY/MM/DD/<exportId-or-traceId>.json
|
||||
```
|
||||
122
docs/guides/board-operator/delegation.md
Normal file
122
docs/guides/board-operator/delegation.md
Normal file
@@ -0,0 +1,122 @@
|
||||
---
|
||||
title: How Delegation Works
|
||||
summary: How the CEO breaks down goals into tasks and assigns them to agents
|
||||
---
|
||||
|
||||
Delegation is one of Paperclip's most powerful features. You set company goals, and the CEO agent automatically breaks them into tasks and assigns them to the right agents. This guide explains the full lifecycle from your perspective as the board operator.
|
||||
|
||||
## The Delegation Lifecycle
|
||||
|
||||
When you create a company goal, the CEO doesn't just acknowledge it — it builds a plan and mobilizes the team:
|
||||
|
||||
```
|
||||
You set a company goal
|
||||
→ CEO wakes up on heartbeat
|
||||
→ CEO proposes a strategy (creates an approval for you)
|
||||
→ You approve the strategy
|
||||
→ CEO breaks goals into tasks and assigns them to reports
|
||||
→ Reports wake up (heartbeat triggered by assignment)
|
||||
→ Reports execute work and update task status
|
||||
→ CEO monitors progress, unblocks, and escalates
|
||||
→ You see results in the dashboard and activity log
|
||||
```
|
||||
|
||||
Each step is traceable. Every task links back to the goal through a parent hierarchy, so you can always see why work is happening.
|
||||
|
||||
## What You Need to Do
|
||||
|
||||
Your role is strategic oversight, not task management. Here's what the delegation model expects from you:
|
||||
|
||||
1. **Set clear company goals.** The CEO works from these. Specific, measurable goals produce better delegation. "Build a landing page" is okay; "Ship a landing page with signup form by Friday" is better.
|
||||
|
||||
2. **Approve the CEO's strategy.** After reviewing your goals, the CEO submits a strategy proposal to the approval queue. Review it, then approve, reject, or request revisions.
|
||||
|
||||
3. **Approve hire requests.** When the CEO needs more capacity (e.g., a frontend engineer to build the landing page), it submits a hire request. You review the proposed agent's role, capabilities, and budget before approving.
|
||||
|
||||
4. **Monitor progress.** Use the dashboard and activity log to track how work is flowing. Check task status, agent activity, and completion rates.
|
||||
|
||||
5. **Intervene only when things stall.** If progress stops, check these in order:
|
||||
- Is an approval pending in your queue?
|
||||
- Is an agent paused or in an error state?
|
||||
- Is the CEO's budget exhausted (above 80%, it focuses on critical tasks only)?
|
||||
|
||||
## What the CEO Does Automatically
|
||||
|
||||
You do **not** need to tell the CEO to engage specific agents. After you approve its strategy, the CEO:
|
||||
|
||||
- **Breaks goals into concrete tasks** with clear descriptions, priorities, and acceptance criteria
|
||||
- **Assigns tasks to the right agent** based on role and capabilities (e.g., engineering tasks go to the CTO or engineers, marketing tasks go to the CMO)
|
||||
- **Creates subtasks** when work needs to be decomposed further
|
||||
- **Hires new agents** when the team lacks capacity for a goal (subject to your approval)
|
||||
- **Monitors progress** on each heartbeat, checking task status and unblocking reports
|
||||
- **Escalates to you** when it encounters something it can't resolve — budget issues, blocked approvals, or strategic ambiguity
|
||||
|
||||
## Common Delegation Patterns
|
||||
|
||||
### Flat Hierarchy (Small Teams)
|
||||
|
||||
For small companies with 3-5 agents, the CEO delegates directly to each report:
|
||||
|
||||
```
|
||||
CEO
|
||||
├── CTO (engineering tasks)
|
||||
├── CMO (marketing tasks)
|
||||
└── Designer (design tasks)
|
||||
```
|
||||
|
||||
The CEO assigns tasks directly. Each agent works independently and reports status back.
|
||||
|
||||
### Three-Level Hierarchy (Larger Teams)
|
||||
|
||||
For larger organizations, managers delegate further down the chain:
|
||||
|
||||
```
|
||||
CEO
|
||||
├── CTO
|
||||
│ ├── Backend Engineer
|
||||
│ └── Frontend Engineer
|
||||
└── CMO
|
||||
└── Content Writer
|
||||
```
|
||||
|
||||
The CEO assigns high-level tasks to the CTO and CMO. They break those into subtasks and assign them to their own reports. You only interact with the CEO — the rest happens automatically.
|
||||
|
||||
### Hire-on-Demand
|
||||
|
||||
The CEO can start as the only agent and hire as work requires:
|
||||
|
||||
1. You set a goal that needs engineering work
|
||||
2. The CEO proposes a strategy that includes hiring a CTO
|
||||
3. You approve the hire
|
||||
4. The CEO assigns engineering tasks to the new CTO
|
||||
5. As scope grows, the CTO may request to hire engineers
|
||||
|
||||
This pattern lets you start small and scale the team based on actual work, not upfront planning.
|
||||
|
||||
## Troubleshooting
|
||||
|
||||
### "Why isn't the CEO delegating?"
|
||||
|
||||
If you've set a goal but nothing is happening, check these common causes:
|
||||
|
||||
| Check | What to look for |
|
||||
|-------|-----------------|
|
||||
| **Approval queue** | The CEO may have submitted a strategy or hire request that's waiting for your approval. This is the most common reason. |
|
||||
| **Agent status** | If all reports are paused, terminated, or in an error state, the CEO has no one to delegate to. Check the Agents page. |
|
||||
| **Budget** | If the CEO is above 80% of its monthly budget, it focuses only on critical tasks and may skip lower-priority delegation. |
|
||||
| **Goals** | If no company goals are set, the CEO has nothing to work from. Create a goal first. |
|
||||
| **Heartbeat** | Is the CEO's heartbeat enabled and running? Check the agent detail page for recent heartbeat history. |
|
||||
| **Agent instructions** | The CEO's delegation behavior is driven by its `AGENTS.md` instructions file. Open the CEO agent's detail page and verify that its instructions path is set and that the file includes delegation directives (subtask creation, hiring, assignment). If AGENTS.md is missing or doesn't mention delegation, the CEO won't know to break down goals and assign work. |
|
||||
|
||||
### "Do I have to tell the CEO to engage engineering and marketing?"
|
||||
|
||||
**No.** The CEO will delegate automatically after you approve its strategy. It knows the org chart and assigns tasks based on each agent's role and capabilities. You set the goal and approve the plan — the CEO handles task breakdown and assignment.
|
||||
|
||||
### "A task seems stuck"
|
||||
|
||||
If a specific task isn't progressing:
|
||||
|
||||
1. Check the task's comment thread — the assigned agent may have posted a blocker
|
||||
2. Check if the task is in `blocked` status — read the blocker comment to understand why
|
||||
3. Check the assigned agent's status — it may be paused or over budget
|
||||
4. If the agent is stuck, you can reassign the task or add a comment with guidance
|
||||
@@ -0,0 +1,68 @@
|
||||
---
|
||||
title: Execution Workspaces And Runtime Services
|
||||
summary: How project runtime configuration, execution workspaces, and issue runs fit together
|
||||
---
|
||||
|
||||
This guide documents the intended runtime model for projects, execution workspaces, and issue runs in Paperclip.
|
||||
|
||||
## Project runtime configuration
|
||||
|
||||
You can define how to run a project on the project workspace itself.
|
||||
|
||||
- Project workspace runtime config describes how to run services for that project checkout.
|
||||
- This is the default runtime configuration that child execution workspaces may inherit.
|
||||
- Defining the config does not start anything by itself.
|
||||
|
||||
## Manual runtime control
|
||||
|
||||
Runtime services are manually controlled from the UI.
|
||||
|
||||
- Project workspace runtime services are started and stopped from the project workspace UI.
|
||||
- Execution workspace runtime services are started and stopped from the execution workspace UI.
|
||||
- Paperclip does not automatically start or stop these runtime services as part of issue execution.
|
||||
- Paperclip also does not automatically restart workspace runtime services on server boot.
|
||||
|
||||
## Execution workspace inheritance
|
||||
|
||||
Execution workspaces isolate code and runtime state from the project primary workspace.
|
||||
|
||||
- An isolated execution workspace has its own checkout path, branch, and local runtime instance.
|
||||
- The runtime configuration may inherit from the linked project workspace by default.
|
||||
- The execution workspace may override that runtime configuration with its own workspace-specific settings.
|
||||
- The inherited configuration answers "how to run the service", but the running process is still specific to that execution workspace.
|
||||
|
||||
## Issues and execution workspaces
|
||||
|
||||
Issues are attached to execution workspace behavior, not to automatic runtime management.
|
||||
|
||||
- An issue may create a new execution workspace when you choose an isolated workspace mode.
|
||||
- An issue may reuse an existing execution workspace when you choose reuse.
|
||||
- Multiple issues may intentionally share one execution workspace so they can work against the same branch and running runtime services.
|
||||
- Assigning or running an issue does not automatically start or stop runtime services for that workspace.
|
||||
|
||||
## Execution workspace lifecycle
|
||||
|
||||
Execution workspaces are durable until a human closes them.
|
||||
|
||||
- The UI can archive an execution workspace.
|
||||
- Closing an execution workspace stops its runtime services and cleans up its workspace artifacts when allowed.
|
||||
- Shared workspaces that point at the project primary checkout are treated more conservatively during cleanup than disposable isolated workspaces.
|
||||
|
||||
## Resolved workspace logic during heartbeat runs
|
||||
|
||||
Heartbeat still resolves a workspace for the run, but that is about code location and session continuity, not runtime-service control.
|
||||
|
||||
1. Heartbeat resolves a base workspace for the run.
|
||||
2. Paperclip realizes the effective execution workspace, including creating or reusing a worktree when needed.
|
||||
3. Paperclip persists execution-workspace metadata such as paths, refs, and provisioning settings.
|
||||
4. Heartbeat passes the resolved code workspace to the agent run.
|
||||
5. Workspace runtime services remain manual UI-managed controls rather than automatic heartbeat-managed services.
|
||||
|
||||
## Current implementation guarantees
|
||||
|
||||
With the current implementation:
|
||||
|
||||
- Project workspace runtime config is the fallback for execution workspace UI controls.
|
||||
- Execution workspace runtime overrides are stored on the execution workspace.
|
||||
- Heartbeat runs do not auto-start workspace runtime services.
|
||||
- Server startup does not auto-restart workspace runtime services.
|
||||
@@ -29,7 +29,7 @@ Create agents from the Agents page. Each agent requires:
|
||||
|
||||
Common adapter choices:
|
||||
- `claude_local` / `codex_local` / `opencode_local` for local coding agents
|
||||
- `openclaw` / `http` for webhook-based external agents
|
||||
- `openclaw_gateway` / `http` for webhook-based external agents
|
||||
- `process` for generic local command execution
|
||||
|
||||
For `opencode_local`, configure an explicit `adapterConfig.model` (`provider/model`).
|
||||
|
||||
@@ -1,9 +1,9 @@
|
||||
---
|
||||
title: Core Concepts
|
||||
summary: Companies, agents, issues, heartbeats, and governance
|
||||
summary: Companies, agents, issues, delegation, heartbeats, and governance
|
||||
---
|
||||
|
||||
Paperclip organizes autonomous AI work around five key concepts.
|
||||
Paperclip organizes autonomous AI work around six key concepts.
|
||||
|
||||
## Company
|
||||
|
||||
@@ -50,6 +50,17 @@ Terminal states: `done`, `cancelled`.
|
||||
|
||||
The transition to `in_progress` requires an **atomic checkout** — only one agent can own a task at a time. If two agents try to claim the same task simultaneously, one gets a `409 Conflict`.
|
||||
|
||||
## Delegation
|
||||
|
||||
The CEO is the primary delegator. When you set company goals, the CEO:
|
||||
|
||||
1. Creates a strategy and submits it for your approval
|
||||
2. Breaks approved goals into tasks
|
||||
3. Assigns tasks to agents based on their role and capabilities
|
||||
4. Hires new agents when needed (subject to your approval)
|
||||
|
||||
You don't need to manually assign every task — set the goals and let the CEO organize the work. You approve key decisions (strategy, hiring) and monitor progress. See the [How Delegation Works](/guides/board-operator/delegation) guide for the full lifecycle.
|
||||
|
||||
## Heartbeats
|
||||
|
||||
Agents don't run continuously. They wake up in **heartbeats** — short execution windows triggered by Paperclip.
|
||||
|
||||
@@ -13,6 +13,8 @@ npx paperclipai onboard --yes
|
||||
|
||||
This walks you through setup, configures your environment, and gets Paperclip running.
|
||||
|
||||
If you already have a Paperclip install, rerunning `onboard` keeps your current config and data paths intact. Use `paperclipai configure` if you want to edit settings.
|
||||
|
||||
To start Paperclip again later:
|
||||
|
||||
```sh
|
||||
|
||||
20
package.json
20
package.json
@@ -3,9 +3,11 @@
|
||||
"private": true,
|
||||
"type": "module",
|
||||
"scripts": {
|
||||
"dev": "node scripts/dev-runner.mjs watch",
|
||||
"dev:watch": "node scripts/dev-runner.mjs watch",
|
||||
"dev:once": "node scripts/dev-runner.mjs dev",
|
||||
"dev": "pnpm --filter @paperclipai/server exec tsx ../scripts/dev-runner.ts watch",
|
||||
"dev:watch": "pnpm --filter @paperclipai/server exec tsx ../scripts/dev-runner.ts watch",
|
||||
"dev:once": "pnpm --filter @paperclipai/server exec tsx ../scripts/dev-runner.ts dev",
|
||||
"dev:list": "pnpm --filter @paperclipai/server exec tsx ../scripts/dev-service.ts list",
|
||||
"dev:stop": "pnpm --filter @paperclipai/server exec tsx ../scripts/dev-service.ts stop",
|
||||
"dev:server": "pnpm --filter @paperclipai/server dev",
|
||||
"dev:ui": "pnpm --filter @paperclipai/ui dev",
|
||||
"build": "pnpm -r build",
|
||||
@@ -32,11 +34,12 @@
|
||||
"test:e2e:headed": "npx playwright test --config tests/e2e/playwright.config.ts --headed",
|
||||
"evals:smoke": "cd evals/promptfoo && npx promptfoo@0.103.3 eval",
|
||||
"test:release-smoke": "npx playwright test --config tests/release-smoke/playwright.config.ts",
|
||||
"test:release-smoke:headed": "npx playwright test --config tests/release-smoke/playwright.config.ts --headed"
|
||||
"test:release-smoke:headed": "npx playwright test --config tests/release-smoke/playwright.config.ts --headed",
|
||||
"metrics:paperclip-commits": "tsx scripts/paperclip-commit-metrics.ts"
|
||||
},
|
||||
"devDependencies": {
|
||||
"cross-env": "^10.1.0",
|
||||
"@playwright/test": "^1.58.2",
|
||||
"cross-env": "^10.1.0",
|
||||
"esbuild": "^0.27.3",
|
||||
"typescript": "^5.7.3",
|
||||
"vitest": "^3.0.5"
|
||||
@@ -44,5 +47,10 @@
|
||||
"engines": {
|
||||
"node": ">=20"
|
||||
},
|
||||
"packageManager": "pnpm@9.15.4"
|
||||
"packageManager": "pnpm@9.15.4",
|
||||
"pnpm": {
|
||||
"patchedDependencies": {
|
||||
"embedded-postgres@18.1.0-beta.16": "patches/embedded-postgres@18.1.0-beta.16.patch"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -201,6 +201,33 @@ export function redactEnvForLogs(env: Record<string, string>): Record<string, st
|
||||
return redacted;
|
||||
}
|
||||
|
||||
export function buildInvocationEnvForLogs(
|
||||
env: Record<string, string>,
|
||||
options: {
|
||||
runtimeEnv?: NodeJS.ProcessEnv | Record<string, string>;
|
||||
includeRuntimeKeys?: string[];
|
||||
resolvedCommand?: string | null;
|
||||
resolvedCommandEnvKey?: string;
|
||||
} = {},
|
||||
): Record<string, string> {
|
||||
const merged: Record<string, string> = { ...env };
|
||||
const runtimeEnv = options.runtimeEnv ?? {};
|
||||
|
||||
for (const key of options.includeRuntimeKeys ?? []) {
|
||||
if (key in merged) continue;
|
||||
const value = runtimeEnv[key];
|
||||
if (typeof value !== "string" || value.length === 0) continue;
|
||||
merged[key] = value;
|
||||
}
|
||||
|
||||
const resolvedCommand = options.resolvedCommand?.trim();
|
||||
if (resolvedCommand) {
|
||||
merged[options.resolvedCommandEnvKey ?? "PAPERCLIP_RESOLVED_COMMAND"] = resolvedCommand;
|
||||
}
|
||||
|
||||
return redactEnvForLogs(merged);
|
||||
}
|
||||
|
||||
export function buildPaperclipEnv(agent: { id: string; companyId: string }): Record<string, string> {
|
||||
const resolveHostForUrl = (rawHost: string): string => {
|
||||
const host = rawHost.trim();
|
||||
@@ -269,6 +296,10 @@ async function resolveCommandPath(command: string, cwd: string, env: NodeJS.Proc
|
||||
return null;
|
||||
}
|
||||
|
||||
export async function resolveCommandForLogs(command: string, cwd: string, env: NodeJS.ProcessEnv): Promise<string> {
|
||||
return (await resolveCommandPath(command, cwd, env)) ?? command;
|
||||
}
|
||||
|
||||
function quoteForCmd(arg: string) {
|
||||
if (!arg.length) return '""';
|
||||
const escaped = arg.replace(/"/g, '""');
|
||||
|
||||
@@ -287,6 +287,12 @@ export interface ServerAdapterModule {
|
||||
* without knowing provider-specific credential paths or API shapes.
|
||||
*/
|
||||
getQuotaWindows?: () => Promise<ProviderQuotaResult>;
|
||||
/**
|
||||
* Optional: detect the currently configured model from local config files.
|
||||
* Returns the detected model/provider and the config source, or null if
|
||||
* the adapter does not support detection or no config is found.
|
||||
*/
|
||||
detectModel?: () => Promise<{ model: string; provider: string; source: string } | null>;
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
@@ -17,6 +17,27 @@ function asErrorText(value: unknown): string {
|
||||
}
|
||||
}
|
||||
|
||||
function printToolResult(block: Record<string, unknown>): void {
|
||||
const isError = block.is_error === true;
|
||||
let text = "";
|
||||
if (typeof block.content === "string") {
|
||||
text = block.content;
|
||||
} else if (Array.isArray(block.content)) {
|
||||
const parts: string[] = [];
|
||||
for (const part of block.content) {
|
||||
if (typeof part !== "object" || part === null || Array.isArray(part)) continue;
|
||||
const record = part as Record<string, unknown>;
|
||||
if (typeof record.text === "string") parts.push(record.text);
|
||||
}
|
||||
text = parts.join("\n");
|
||||
}
|
||||
|
||||
console.log((isError ? pc.red : pc.cyan)(`tool_result${isError ? " (error)" : ""}`));
|
||||
if (text) {
|
||||
console.log((isError ? pc.red : pc.gray)(text));
|
||||
}
|
||||
}
|
||||
|
||||
export function printClaudeStreamEvent(raw: string, debug: boolean): void {
|
||||
const line = raw.trim();
|
||||
if (!line) return;
|
||||
@@ -51,6 +72,9 @@ export function printClaudeStreamEvent(raw: string, debug: boolean): void {
|
||||
if (blockType === "text") {
|
||||
const text = typeof block.text === "string" ? block.text : "";
|
||||
if (text) console.log(pc.green(`assistant: ${text}`));
|
||||
} else if (blockType === "thinking") {
|
||||
const text = typeof block.thinking === "string" ? block.thinking : "";
|
||||
if (text) console.log(pc.gray(`thinking: ${text}`));
|
||||
} else if (blockType === "tool_use") {
|
||||
const name = typeof block.name === "string" ? block.name : "unknown";
|
||||
console.log(pc.yellow(`tool_call: ${name}`));
|
||||
@@ -62,6 +86,22 @@ export function printClaudeStreamEvent(raw: string, debug: boolean): void {
|
||||
return;
|
||||
}
|
||||
|
||||
if (type === "user") {
|
||||
const message =
|
||||
typeof parsed.message === "object" && parsed.message !== null && !Array.isArray(parsed.message)
|
||||
? (parsed.message as Record<string, unknown>)
|
||||
: {};
|
||||
const content = Array.isArray(message.content) ? message.content : [];
|
||||
for (const blockRaw of content) {
|
||||
if (typeof blockRaw !== "object" || blockRaw === null || Array.isArray(blockRaw)) continue;
|
||||
const block = blockRaw as Record<string, unknown>;
|
||||
if (typeof block.type === "string" && block.type === "tool_result") {
|
||||
printToolResult(block);
|
||||
}
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
if (type === "result") {
|
||||
const usage =
|
||||
typeof parsed.usage === "object" && parsed.usage !== null && !Array.isArray(parsed.usage)
|
||||
|
||||
@@ -26,7 +26,7 @@ Core fields:
|
||||
- extraArgs (string[], optional): additional CLI args
|
||||
- env (object, optional): KEY=VALUE environment variables
|
||||
- workspaceStrategy (object, optional): execution workspace strategy; currently supports { type: "git_worktree", baseRef?, branchTemplate?, worktreeParentDir? }
|
||||
- workspaceRuntime (object, optional): workspace runtime service intents; local host-managed services are realized before Claude starts and exposed back via context/env
|
||||
- workspaceRuntime (object, optional): reserved for workspace runtime metadata; workspace runtime services are manually controlled from the workspace UI and are not auto-started by heartbeats
|
||||
|
||||
Operational fields:
|
||||
- timeoutSec (number, optional): run timeout in seconds
|
||||
|
||||
@@ -14,10 +14,11 @@ import {
|
||||
buildPaperclipEnv,
|
||||
readPaperclipRuntimeSkillEntries,
|
||||
joinPromptSections,
|
||||
redactEnvForLogs,
|
||||
buildInvocationEnvForLogs,
|
||||
ensureAbsoluteDirectory,
|
||||
ensureCommandResolvable,
|
||||
ensurePathInEnv,
|
||||
resolveCommandForLogs,
|
||||
renderTemplate,
|
||||
runChildProcess,
|
||||
} from "@paperclipai/adapter-utils/server-utils";
|
||||
@@ -68,11 +69,13 @@ interface ClaudeExecutionInput {
|
||||
|
||||
interface ClaudeRuntimeConfig {
|
||||
command: string;
|
||||
resolvedCommand: string;
|
||||
cwd: string;
|
||||
workspaceId: string | null;
|
||||
workspaceRepoUrl: string | null;
|
||||
workspaceRepoRef: string | null;
|
||||
env: Record<string, string>;
|
||||
loggedEnv: Record<string, string>;
|
||||
timeoutSec: number;
|
||||
graceSec: number;
|
||||
extraArgs: string[];
|
||||
@@ -236,6 +239,12 @@ async function buildClaudeRuntimeConfig(input: ClaudeExecutionInput): Promise<Cl
|
||||
|
||||
const runtimeEnv = ensurePathInEnv({ ...process.env, ...env });
|
||||
await ensureCommandResolvable(command, cwd, runtimeEnv);
|
||||
const resolvedCommand = await resolveCommandForLogs(command, cwd, runtimeEnv);
|
||||
const loggedEnv = buildInvocationEnvForLogs(env, {
|
||||
runtimeEnv,
|
||||
includeRuntimeKeys: ["HOME", "CLAUDE_CONFIG_DIR"],
|
||||
resolvedCommand,
|
||||
});
|
||||
|
||||
const timeoutSec = asNumber(config.timeoutSec, 0);
|
||||
const graceSec = asNumber(config.graceSec, 20);
|
||||
@@ -247,11 +256,13 @@ async function buildClaudeRuntimeConfig(input: ClaudeExecutionInput): Promise<Cl
|
||||
|
||||
return {
|
||||
command,
|
||||
resolvedCommand,
|
||||
cwd,
|
||||
workspaceId,
|
||||
workspaceRepoUrl,
|
||||
workspaceRepoRef,
|
||||
env,
|
||||
loggedEnv,
|
||||
timeoutSec,
|
||||
graceSec,
|
||||
extraArgs,
|
||||
@@ -324,11 +335,13 @@ export async function execute(ctx: AdapterExecutionContext): Promise<AdapterExec
|
||||
});
|
||||
const {
|
||||
command,
|
||||
resolvedCommand,
|
||||
cwd,
|
||||
workspaceId,
|
||||
workspaceRepoUrl,
|
||||
workspaceRepoRef,
|
||||
env,
|
||||
loggedEnv,
|
||||
timeoutSec,
|
||||
graceSec,
|
||||
extraArgs,
|
||||
@@ -440,11 +453,11 @@ export async function execute(ctx: AdapterExecutionContext): Promise<AdapterExec
|
||||
if (onMeta) {
|
||||
await onMeta({
|
||||
adapterType: "claude_local",
|
||||
command,
|
||||
command: resolvedCommand,
|
||||
cwd,
|
||||
commandArgs: args,
|
||||
commandNotes,
|
||||
env: redactEnvForLogs(env),
|
||||
env: loggedEnv,
|
||||
prompt,
|
||||
promptMetrics,
|
||||
context,
|
||||
|
||||
@@ -24,7 +24,7 @@ Core fields:
|
||||
- cwd (string, optional): default absolute working directory fallback for the agent process (created if missing when possible)
|
||||
- instructionsFilePath (string, optional): absolute path to a markdown instructions file prepended to stdin prompt at runtime
|
||||
- model (string, optional): Codex model id
|
||||
- modelReasoningEffort (string, optional): reasoning effort override (minimal|low|medium|high) passed via -c model_reasoning_effort=...
|
||||
- modelReasoningEffort (string, optional): reasoning effort override (minimal|low|medium|high|xhigh) passed via -c model_reasoning_effort=...
|
||||
- promptTemplate (string, optional): run prompt template
|
||||
- search (boolean, optional): run codex with --search
|
||||
- dangerouslyBypassApprovalsAndSandbox (boolean, optional): run with bypass flag
|
||||
@@ -32,7 +32,7 @@ Core fields:
|
||||
- extraArgs (string[], optional): additional CLI args
|
||||
- env (object, optional): KEY=VALUE environment variables
|
||||
- workspaceStrategy (object, optional): execution workspace strategy; currently supports { type: "git_worktree", baseRef?, branchTemplate?, worktreeParentDir? }
|
||||
- workspaceRuntime (object, optional): workspace runtime service intents; local host-managed services are realized before Codex starts and exposed back via context/env
|
||||
- workspaceRuntime (object, optional): reserved for workspace runtime metadata; workspace runtime services are manually controlled from the workspace UI and are not auto-started by heartbeats
|
||||
|
||||
Operational fields:
|
||||
- timeoutSec (number, optional): run timeout in seconds
|
||||
@@ -42,7 +42,7 @@ Notes:
|
||||
- Prompts are piped via stdin (Codex receives "-" prompt argument).
|
||||
- If instructionsFilePath is configured, Paperclip prepends that file's contents to the stdin prompt on every run.
|
||||
- Codex exec automatically applies repo-scoped AGENTS.md instructions from the active workspace. Paperclip cannot suppress that discovery in exec mode, so repo AGENTS.md files may still apply even when you only configured an explicit instructionsFilePath.
|
||||
- Paperclip injects desired local skills into the active workspace's ".agents/skills" directory at execution time so Codex can discover "$paperclip" and related skills without coupling them to the user's login home.
|
||||
- Paperclip injects desired local skills into the effective CODEX_HOME/skills/ directory at execution time so Codex can discover "$paperclip" and related skills without polluting the project working directory. In managed-home mode (the default) this is ~/.paperclip/instances/<id>/companies/<companyId>/codex-home/skills/; when CODEX_HOME is explicitly overridden in adapter config, that override is used instead.
|
||||
- Unless explicitly overridden in adapter config, Paperclip runs Codex with a per-company managed CODEX_HOME under the active Paperclip instance and seeds auth/config from the shared Codex home (the CODEX_HOME env var, when set, or ~/.codex).
|
||||
- Some model/tool combinations reject certain effort levels (for example minimal with web search enabled).
|
||||
- When Paperclip realizes a workspace/runtime for a run, it injects PAPERCLIP_WORKSPACE_* and PAPERCLIP_RUNTIME_* env vars for agent-side tooling.
|
||||
|
||||
@@ -9,19 +9,20 @@ import {
|
||||
asStringArray,
|
||||
parseObject,
|
||||
buildPaperclipEnv,
|
||||
redactEnvForLogs,
|
||||
buildInvocationEnvForLogs,
|
||||
ensureAbsoluteDirectory,
|
||||
ensureCommandResolvable,
|
||||
ensurePaperclipSkillSymlink,
|
||||
ensurePathInEnv,
|
||||
readPaperclipRuntimeSkillEntries,
|
||||
resolveCommandForLogs,
|
||||
resolvePaperclipDesiredSkillNames,
|
||||
renderTemplate,
|
||||
joinPromptSections,
|
||||
runChildProcess,
|
||||
} from "@paperclipai/adapter-utils/server-utils";
|
||||
import { parseCodexJsonl, isCodexUnknownSessionError } from "./parse.js";
|
||||
import { pathExists, prepareManagedCodexHome, resolveManagedCodexHomeDir } from "./codex-home.js";
|
||||
import { pathExists, prepareManagedCodexHome, resolveManagedCodexHomeDir, resolveSharedCodexHomeDir } from "./codex-home.js";
|
||||
import { resolveCodexDesiredSkillNames } from "./skills.js";
|
||||
|
||||
const __moduleDir = path.dirname(fileURLToPath(import.meta.url));
|
||||
@@ -135,8 +136,8 @@ async function pruneBrokenUnavailablePaperclipSkillSymlinks(
|
||||
}
|
||||
}
|
||||
|
||||
function resolveCodexWorkspaceSkillsDir(cwd: string): string {
|
||||
return path.join(cwd, ".agents", "skills");
|
||||
function resolveCodexSkillsDir(codexHome: string): string {
|
||||
return path.join(codexHome, "skills");
|
||||
}
|
||||
|
||||
type EnsureCodexSkillsInjectedOptions = {
|
||||
@@ -157,7 +158,7 @@ export async function ensureCodexSkillsInjected(
|
||||
const skillsEntries = allSkillsEntries.filter((entry) => desiredSet.has(entry.key));
|
||||
if (skillsEntries.length === 0) return;
|
||||
|
||||
const skillsHome = options.skillsHome ?? resolveCodexWorkspaceSkillsDir(process.cwd());
|
||||
const skillsHome = options.skillsHome ?? resolveCodexSkillsDir(resolveSharedCodexHomeDir());
|
||||
await fs.mkdir(skillsHome, { recursive: true });
|
||||
const linkSkill = options.linkSkill;
|
||||
for (const entry of skillsEntries) {
|
||||
@@ -273,11 +274,13 @@ export async function execute(ctx: AdapterExecutionContext): Promise<AdapterExec
|
||||
const defaultCodexHome = resolveManagedCodexHomeDir(process.env, agent.companyId);
|
||||
const effectiveCodexHome = configuredCodexHome ?? preparedManagedCodexHome ?? defaultCodexHome;
|
||||
await fs.mkdir(effectiveCodexHome, { recursive: true });
|
||||
const codexWorkspaceSkillsDir = resolveCodexWorkspaceSkillsDir(cwd);
|
||||
// Inject skills into the same CODEX_HOME that Codex will actually run with
|
||||
// (managed home in the default case, or an explicit override from adapter config).
|
||||
const codexSkillsDir = resolveCodexSkillsDir(effectiveCodexHome);
|
||||
await ensureCodexSkillsInjected(
|
||||
onLog,
|
||||
{
|
||||
skillsHome: codexWorkspaceSkillsDir,
|
||||
skillsHome: codexSkillsDir,
|
||||
skillsEntries: codexSkillEntries,
|
||||
desiredSkillNames,
|
||||
},
|
||||
@@ -381,6 +384,12 @@ export async function execute(ctx: AdapterExecutionContext): Promise<AdapterExec
|
||||
const billingType = resolveCodexBillingType(effectiveEnv);
|
||||
const runtimeEnv = ensurePathInEnv(effectiveEnv);
|
||||
await ensureCommandResolvable(command, cwd, runtimeEnv);
|
||||
const resolvedCommand = await resolveCommandForLogs(command, cwd, runtimeEnv);
|
||||
const loggedEnv = buildInvocationEnvForLogs(env, {
|
||||
runtimeEnv,
|
||||
includeRuntimeKeys: ["HOME"],
|
||||
resolvedCommand,
|
||||
});
|
||||
|
||||
const timeoutSec = asNumber(config.timeoutSec, 0);
|
||||
const graceSec = asNumber(config.graceSec, 20);
|
||||
@@ -488,14 +497,14 @@ export async function execute(ctx: AdapterExecutionContext): Promise<AdapterExec
|
||||
if (onMeta) {
|
||||
await onMeta({
|
||||
adapterType: "codex_local",
|
||||
command,
|
||||
command: resolvedCommand,
|
||||
cwd,
|
||||
commandNotes,
|
||||
commandArgs: args.map((value, idx) => {
|
||||
if (idx === args.length - 1 && value !== "-") return `<prompt ${prompt.length} chars>`;
|
||||
return value;
|
||||
}),
|
||||
env: redactEnvForLogs(env),
|
||||
env: loggedEnv,
|
||||
prompt,
|
||||
promptMetrics,
|
||||
context,
|
||||
|
||||
@@ -0,0 +1,85 @@
|
||||
import { EventEmitter } from "node:events";
|
||||
import fs from "node:fs";
|
||||
import os from "node:os";
|
||||
import path from "node:path";
|
||||
import type { ChildProcess } from "node:child_process";
|
||||
import { describe, expect, it, vi, beforeEach, afterEach } from "vitest";
|
||||
|
||||
const { mockSpawn } = vi.hoisted(() => ({
|
||||
mockSpawn: vi.fn(),
|
||||
}));
|
||||
|
||||
vi.mock("node:child_process", async (importOriginal) => {
|
||||
const cp = await importOriginal<typeof import("node:child_process")>();
|
||||
return {
|
||||
...cp,
|
||||
spawn: (...args: Parameters<typeof cp.spawn>) => mockSpawn(...args) as ReturnType<typeof cp.spawn>,
|
||||
};
|
||||
});
|
||||
|
||||
import { getQuotaWindows } from "./quota.js";
|
||||
|
||||
function createChildThatErrorsOnMicrotask(err: Error): ChildProcess {
|
||||
const child = new EventEmitter() as ChildProcess;
|
||||
const stream = Object.assign(new EventEmitter(), {
|
||||
setEncoding: () => {},
|
||||
});
|
||||
Object.assign(child, {
|
||||
stdout: stream,
|
||||
stderr: Object.assign(new EventEmitter(), { setEncoding: () => {} }),
|
||||
stdin: { write: vi.fn(), end: vi.fn() },
|
||||
kill: vi.fn(),
|
||||
});
|
||||
queueMicrotask(() => {
|
||||
child.emit("error", err);
|
||||
});
|
||||
return child;
|
||||
}
|
||||
|
||||
describe("CodexRpcClient spawn failures", () => {
|
||||
let previousCodexHome: string | undefined;
|
||||
let isolatedCodexHome: string | undefined;
|
||||
|
||||
beforeEach(() => {
|
||||
mockSpawn.mockReset();
|
||||
// After the RPC path fails, getQuotaWindows() calls readCodexToken() which
|
||||
// reads $CODEX_HOME/auth.json (default ~/.codex). Point CODEX_HOME at an
|
||||
// empty temp directory so we never hit real host auth or the WHAM network.
|
||||
previousCodexHome = process.env.CODEX_HOME;
|
||||
isolatedCodexHome = fs.mkdtempSync(path.join(os.tmpdir(), "paperclip-codex-spawn-test-"));
|
||||
process.env.CODEX_HOME = isolatedCodexHome;
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
if (isolatedCodexHome) {
|
||||
try {
|
||||
fs.rmSync(isolatedCodexHome, { recursive: true, force: true });
|
||||
} catch {
|
||||
/* ignore */
|
||||
}
|
||||
isolatedCodexHome = undefined;
|
||||
}
|
||||
if (previousCodexHome === undefined) {
|
||||
delete process.env.CODEX_HOME;
|
||||
} else {
|
||||
process.env.CODEX_HOME = previousCodexHome;
|
||||
}
|
||||
});
|
||||
|
||||
it("does not crash the process when codex is missing; getQuotaWindows returns ok: false", async () => {
|
||||
const enoent = Object.assign(new Error("spawn codex ENOENT"), {
|
||||
code: "ENOENT",
|
||||
errno: -2,
|
||||
syscall: "spawn codex",
|
||||
path: "codex",
|
||||
});
|
||||
mockSpawn.mockImplementation(() => createChildThatErrorsOnMicrotask(enoent));
|
||||
|
||||
const result = await getQuotaWindows();
|
||||
|
||||
expect(result.ok).toBe(false);
|
||||
expect(result.windows).toEqual([]);
|
||||
expect(result.error).toContain("Codex app-server");
|
||||
expect(result.error).toContain("spawn codex ENOENT");
|
||||
});
|
||||
});
|
||||
@@ -107,8 +107,8 @@ function parsePlanAndEmailFromToken(idToken: string | null, accessToken: string
|
||||
return { email: null, planType: null };
|
||||
}
|
||||
|
||||
export async function readCodexAuthInfo(): Promise<CodexAuthInfo | null> {
|
||||
const authPath = path.join(codexHomeDir(), "auth.json");
|
||||
export async function readCodexAuthInfo(codexHome?: string): Promise<CodexAuthInfo | null> {
|
||||
const authPath = path.join(codexHome ?? codexHomeDir(), "auth.json");
|
||||
let raw: string;
|
||||
try {
|
||||
raw = await fs.readFile(authPath, "utf8");
|
||||
@@ -432,6 +432,13 @@ class CodexRpcClient {
|
||||
}
|
||||
this.pending.clear();
|
||||
});
|
||||
this.proc.on("error", (err: Error) => {
|
||||
for (const request of this.pending.values()) {
|
||||
clearTimeout(request.timer);
|
||||
request.reject(err);
|
||||
}
|
||||
this.pending.clear();
|
||||
});
|
||||
}
|
||||
|
||||
private onStdout(chunk: string) {
|
||||
|
||||
@@ -31,7 +31,7 @@ async function buildCodexSkillSnapshot(
|
||||
sourcePath: entry.source,
|
||||
targetPath: null,
|
||||
detail: desiredSet.has(entry.key)
|
||||
? "Will be linked into the workspace .agents/skills directory on the next run."
|
||||
? "Will be linked into the effective CODEX_HOME/skills/ directory on the next run."
|
||||
: null,
|
||||
required: Boolean(entry.required),
|
||||
requiredReason: entry.requiredReason ?? null,
|
||||
|
||||
@@ -15,6 +15,7 @@ import {
|
||||
} from "@paperclipai/adapter-utils/server-utils";
|
||||
import path from "node:path";
|
||||
import { parseCodexJsonl } from "./parse.js";
|
||||
import { codexHomeDir, readCodexAuthInfo } from "./quota.js";
|
||||
|
||||
function summarizeStatus(checks: AdapterEnvironmentCheck[]): AdapterEnvironmentTestResult["status"] {
|
||||
if (checks.some((check) => check.level === "error")) return "fail";
|
||||
@@ -108,12 +109,23 @@ export async function testEnvironment(
|
||||
detail: `Detected in ${source}.`,
|
||||
});
|
||||
} else {
|
||||
checks.push({
|
||||
code: "codex_openai_api_key_missing",
|
||||
level: "warn",
|
||||
message: "OPENAI_API_KEY is not set. Codex runs may fail until authentication is configured.",
|
||||
hint: "Set OPENAI_API_KEY in adapter env, shell environment, or Codex auth configuration.",
|
||||
});
|
||||
const codexHome = isNonEmpty(env.CODEX_HOME) ? env.CODEX_HOME : undefined;
|
||||
const codexAuth = await readCodexAuthInfo(codexHome).catch(() => null);
|
||||
if (codexAuth) {
|
||||
checks.push({
|
||||
code: "codex_native_auth_present",
|
||||
level: "info",
|
||||
message: "Codex is authenticated via its own auth configuration.",
|
||||
detail: codexAuth.email ? `Logged in as ${codexAuth.email}.` : `Credentials found in ${path.join(codexHome ?? codexHomeDir(), "auth.json")}.`,
|
||||
});
|
||||
} else {
|
||||
checks.push({
|
||||
code: "codex_openai_api_key_missing",
|
||||
level: "warn",
|
||||
message: "OPENAI_API_KEY is not set. Codex runs may fail until authentication is configured.",
|
||||
hint: "Set OPENAI_API_KEY in adapter env, shell environment, or run `codex auth` to log in.",
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
const canRunProbe =
|
||||
|
||||
7
packages/adapters/codex-local/vitest.config.ts
Normal file
7
packages/adapters/codex-local/vitest.config.ts
Normal file
@@ -0,0 +1,7 @@
|
||||
import { defineConfig } from "vitest/config";
|
||||
|
||||
export default defineConfig({
|
||||
test: {
|
||||
environment: "node",
|
||||
},
|
||||
});
|
||||
@@ -9,12 +9,13 @@ import {
|
||||
asStringArray,
|
||||
parseObject,
|
||||
buildPaperclipEnv,
|
||||
redactEnvForLogs,
|
||||
buildInvocationEnvForLogs,
|
||||
ensureAbsoluteDirectory,
|
||||
ensureCommandResolvable,
|
||||
ensurePaperclipSkillSymlink,
|
||||
ensurePathInEnv,
|
||||
readPaperclipRuntimeSkillEntries,
|
||||
resolveCommandForLogs,
|
||||
resolvePaperclipDesiredSkillNames,
|
||||
removeMaintainerOnlySkillSymlinks,
|
||||
renderTemplate,
|
||||
@@ -271,6 +272,12 @@ export async function execute(ctx: AdapterExecutionContext): Promise<AdapterExec
|
||||
const billingType = resolveCursorBillingType(effectiveEnv);
|
||||
const runtimeEnv = ensurePathInEnv(effectiveEnv);
|
||||
await ensureCommandResolvable(command, cwd, runtimeEnv);
|
||||
const resolvedCommand = await resolveCommandForLogs(command, cwd, runtimeEnv);
|
||||
const loggedEnv = buildInvocationEnvForLogs(env, {
|
||||
runtimeEnv,
|
||||
includeRuntimeKeys: ["HOME"],
|
||||
resolvedCommand,
|
||||
});
|
||||
|
||||
const timeoutSec = asNumber(config.timeoutSec, 0);
|
||||
const graceSec = asNumber(config.graceSec, 20);
|
||||
@@ -383,11 +390,11 @@ export async function execute(ctx: AdapterExecutionContext): Promise<AdapterExec
|
||||
if (onMeta) {
|
||||
await onMeta({
|
||||
adapterType: "cursor",
|
||||
command,
|
||||
command: resolvedCommand,
|
||||
cwd,
|
||||
commandNotes,
|
||||
commandArgs: args,
|
||||
env: redactEnvForLogs(env),
|
||||
env: loggedEnv,
|
||||
prompt,
|
||||
promptMetrics,
|
||||
context,
|
||||
|
||||
@@ -12,6 +12,8 @@ import {
|
||||
ensurePathInEnv,
|
||||
runChildProcess,
|
||||
} from "@paperclipai/adapter-utils/server-utils";
|
||||
import fs from "node:fs/promises";
|
||||
import os from "node:os";
|
||||
import path from "node:path";
|
||||
import { DEFAULT_CURSOR_LOCAL_MODEL } from "../index.js";
|
||||
import { parseCursorJsonl } from "./parse.js";
|
||||
@@ -49,6 +51,41 @@ function summarizeProbeDetail(stdout: string, stderr: string, parsedError: strin
|
||||
return clean.length > max ? `${clean.slice(0, max - 1)}…` : clean;
|
||||
}
|
||||
|
||||
export interface CursorAuthInfo {
|
||||
email: string | null;
|
||||
displayName: string | null;
|
||||
userId: number | null;
|
||||
}
|
||||
|
||||
export function cursorConfigPath(cursorHome?: string): string {
|
||||
return path.join(cursorHome ?? path.join(os.homedir(), ".cursor"), "cli-config.json");
|
||||
}
|
||||
|
||||
export async function readCursorAuthInfo(cursorHome?: string): Promise<CursorAuthInfo | null> {
|
||||
let raw: string;
|
||||
try {
|
||||
raw = await fs.readFile(cursorConfigPath(cursorHome), "utf8");
|
||||
} catch {
|
||||
return null;
|
||||
}
|
||||
let parsed: unknown;
|
||||
try {
|
||||
parsed = JSON.parse(raw);
|
||||
} catch {
|
||||
return null;
|
||||
}
|
||||
if (typeof parsed !== "object" || parsed === null) return null;
|
||||
const obj = parsed as Record<string, unknown>;
|
||||
const authInfo = obj.authInfo;
|
||||
if (typeof authInfo !== "object" || authInfo === null) return null;
|
||||
const info = authInfo as Record<string, unknown>;
|
||||
const email = typeof info.email === "string" && info.email.trim().length > 0 ? info.email.trim() : null;
|
||||
const displayName = typeof info.displayName === "string" && info.displayName.trim().length > 0 ? info.displayName.trim() : null;
|
||||
const userId = typeof info.userId === "number" ? info.userId : null;
|
||||
if (!email && !displayName && userId == null) return null;
|
||||
return { email, displayName, userId };
|
||||
}
|
||||
|
||||
const CURSOR_AUTH_REQUIRED_RE =
|
||||
/(?:authentication\s+required|not\s+authenticated|not\s+logged\s+in|unauthorized|invalid(?:\s+or\s+missing)?\s+api(?:[_\s-]?key)?|cursor[_\s-]?api[_\s-]?key|run\s+'?agent\s+login'?\s+first|api(?:[_\s-]?key)?(?:\s+is)?\s+required)/i;
|
||||
|
||||
@@ -109,12 +146,25 @@ export async function testEnvironment(
|
||||
detail: `Detected in ${source}.`,
|
||||
});
|
||||
} else {
|
||||
checks.push({
|
||||
code: "cursor_api_key_missing",
|
||||
level: "warn",
|
||||
message: "CURSOR_API_KEY is not set. Cursor runs may fail until authentication is configured.",
|
||||
hint: "Set CURSOR_API_KEY in adapter env or run `agent login`.",
|
||||
});
|
||||
const cursorHome = isNonEmpty(env.CURSOR_HOME) ? env.CURSOR_HOME : undefined;
|
||||
const cursorAuth = await readCursorAuthInfo(cursorHome).catch(() => null);
|
||||
if (cursorAuth) {
|
||||
checks.push({
|
||||
code: "cursor_native_auth_present",
|
||||
level: "info",
|
||||
message: "Cursor is authenticated via `agent login`.",
|
||||
detail: cursorAuth.email
|
||||
? `Logged in as ${cursorAuth.email}.`
|
||||
: `Credentials found in ${cursorConfigPath(cursorHome)}.`,
|
||||
});
|
||||
} else {
|
||||
checks.push({
|
||||
code: "cursor_api_key_missing",
|
||||
level: "warn",
|
||||
message: "CURSOR_API_KEY is not set. Cursor runs may fail until authentication is configured.",
|
||||
hint: "Set CURSOR_API_KEY in adapter env or run `agent login`.",
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
const canRunProbe =
|
||||
|
||||
@@ -10,16 +10,17 @@ import {
|
||||
asString,
|
||||
asStringArray,
|
||||
buildPaperclipEnv,
|
||||
buildInvocationEnvForLogs,
|
||||
ensureAbsoluteDirectory,
|
||||
ensureCommandResolvable,
|
||||
ensurePaperclipSkillSymlink,
|
||||
joinPromptSections,
|
||||
ensurePathInEnv,
|
||||
readPaperclipRuntimeSkillEntries,
|
||||
resolveCommandForLogs,
|
||||
resolvePaperclipDesiredSkillNames,
|
||||
removeMaintainerOnlySkillSymlinks,
|
||||
parseObject,
|
||||
redactEnvForLogs,
|
||||
renderTemplate,
|
||||
runChildProcess,
|
||||
} from "@paperclipai/adapter-utils/server-utils";
|
||||
@@ -220,6 +221,12 @@ export async function execute(ctx: AdapterExecutionContext): Promise<AdapterExec
|
||||
const billingType = resolveGeminiBillingType(effectiveEnv);
|
||||
const runtimeEnv = ensurePathInEnv(effectiveEnv);
|
||||
await ensureCommandResolvable(command, cwd, runtimeEnv);
|
||||
const resolvedCommand = await resolveCommandForLogs(command, cwd, runtimeEnv);
|
||||
const loggedEnv = buildInvocationEnvForLogs(env, {
|
||||
runtimeEnv,
|
||||
includeRuntimeKeys: ["HOME"],
|
||||
resolvedCommand,
|
||||
});
|
||||
|
||||
const timeoutSec = asNumber(config.timeoutSec, 0);
|
||||
const graceSec = asNumber(config.graceSec, 20);
|
||||
@@ -333,13 +340,13 @@ export async function execute(ctx: AdapterExecutionContext): Promise<AdapterExec
|
||||
if (onMeta) {
|
||||
await onMeta({
|
||||
adapterType: "gemini_local",
|
||||
command,
|
||||
command: resolvedCommand,
|
||||
cwd,
|
||||
commandNotes,
|
||||
commandArgs: args.map((value, index) => (
|
||||
index === args.length - 1 ? `<prompt ${prompt.length} chars>` : value
|
||||
)),
|
||||
env: redactEnvForLogs(env),
|
||||
env: loggedEnv,
|
||||
prompt,
|
||||
promptMetrics,
|
||||
context,
|
||||
|
||||
@@ -31,7 +31,7 @@ Gateway connect identity fields:
|
||||
|
||||
Request behavior fields:
|
||||
- payloadTemplate (object, optional): additional fields merged into gateway agent params
|
||||
- workspaceRuntime (object, optional): desired runtime service intents; Paperclip forwards these in a standardized paperclip.workspaceRuntime block for remote execution environments
|
||||
- workspaceRuntime (object, optional): reserved workspace runtime metadata; workspace runtime services are manually controlled from the workspace UI and are not auto-started by heartbeats
|
||||
- timeoutSec (number, optional): adapter timeout in seconds (default 120)
|
||||
- waitTimeoutMs (number, optional): agent.wait timeout override (default timeoutSec * 1000)
|
||||
- autoPairOnFirstConnect (boolean, optional): on first "pairing required", attempt device.pair.list/device.pair.approve via shared auth, then retry once (default true)
|
||||
@@ -45,7 +45,7 @@ Standard outbound payload additions:
|
||||
- paperclip (object): standardized Paperclip context added to every gateway agent request
|
||||
- paperclip.workspace (object, optional): resolved execution workspace for this run
|
||||
- paperclip.workspaces (array, optional): additional workspace hints Paperclip exposed to the run
|
||||
- paperclip.workspaceRuntime (object, optional): normalized runtime service intent config for the workspace
|
||||
- paperclip.workspaceRuntime (object, optional): reserved workspace runtime metadata when explicitly supplied outside normal heartbeat execution
|
||||
|
||||
Standard result metadata supported:
|
||||
- meta.runtimeServices (array, optional): normalized adapter-managed runtime service reports
|
||||
|
||||
@@ -1,7 +1,15 @@
|
||||
export const type = "opencode_local";
|
||||
export const label = "OpenCode (local)";
|
||||
|
||||
export const models: Array<{ id: string; label: string }> = [];
|
||||
export const DEFAULT_OPENCODE_LOCAL_MODEL = "openai/gpt-5.2-codex";
|
||||
|
||||
export const models: Array<{ id: string; label: string }> = [
|
||||
{ id: DEFAULT_OPENCODE_LOCAL_MODEL, label: DEFAULT_OPENCODE_LOCAL_MODEL },
|
||||
{ id: "openai/gpt-5.4", label: "openai/gpt-5.4" },
|
||||
{ id: "openai/gpt-5.2", label: "openai/gpt-5.2" },
|
||||
{ id: "openai/gpt-5.1-codex-max", label: "openai/gpt-5.1-codex-max" },
|
||||
{ id: "openai/gpt-5.1-codex-mini", label: "openai/gpt-5.1-codex-mini" },
|
||||
];
|
||||
|
||||
export const agentConfigurationDoc = `# opencode_local agent configuration
|
||||
|
||||
@@ -21,7 +29,8 @@ Core fields:
|
||||
- cwd (string, optional): default absolute working directory fallback for the agent process (created if missing when possible)
|
||||
- instructionsFilePath (string, optional): absolute path to a markdown instructions file prepended to the run prompt
|
||||
- model (string, required): OpenCode model id in provider/model format (for example anthropic/claude-sonnet-4-5)
|
||||
- variant (string, optional): provider-specific model variant (for example minimal|low|medium|high|max)
|
||||
- variant (string, optional): provider-specific reasoning/profile variant passed as --variant (for example minimal|low|medium|high|xhigh|max)
|
||||
- dangerouslySkipPermissions (boolean, optional): inject a runtime OpenCode config that allows \`external_directory\` access without interactive prompts; defaults to true for unattended Paperclip runs
|
||||
- promptTemplate (string, optional): run prompt template
|
||||
- command (string, optional): defaults to "opencode"
|
||||
- extraArgs (string[], optional): additional CLI args
|
||||
@@ -37,4 +46,10 @@ Notes:
|
||||
- Paperclip requires an explicit \`model\` value for \`opencode_local\` agents.
|
||||
- Runs are executed with: opencode run --format json ...
|
||||
- Sessions are resumed with --session when stored session cwd matches current cwd.
|
||||
- The adapter sets OPENCODE_DISABLE_PROJECT_CONFIG=true to prevent OpenCode from \
|
||||
writing an opencode.json config file into the project working directory. Model \
|
||||
selection is passed via the --model CLI flag instead.
|
||||
- When \`dangerouslySkipPermissions\` is enabled, Paperclip injects a temporary \
|
||||
runtime config with \`permission.external_directory=allow\` so headless runs do \
|
||||
not stall on approval prompts.
|
||||
`;
|
||||
|
||||
@@ -10,11 +10,12 @@ import {
|
||||
parseObject,
|
||||
buildPaperclipEnv,
|
||||
joinPromptSections,
|
||||
redactEnvForLogs,
|
||||
buildInvocationEnvForLogs,
|
||||
ensureAbsoluteDirectory,
|
||||
ensureCommandResolvable,
|
||||
ensurePaperclipSkillSymlink,
|
||||
ensurePathInEnv,
|
||||
resolveCommandForLogs,
|
||||
renderTemplate,
|
||||
runChildProcess,
|
||||
readPaperclipRuntimeSkillEntries,
|
||||
@@ -23,6 +24,7 @@ import {
|
||||
import { isOpenCodeUnknownSessionError, parseOpenCodeJsonl } from "./parse.js";
|
||||
import { ensureOpenCodeModelConfiguredAndAvailable } from "./models.js";
|
||||
import { removeMaintainerOnlySkillSymlinks } from "@paperclipai/adapter-utils/server-utils";
|
||||
import { prepareOpenCodeRuntimeConfig } from "./runtime-config.js";
|
||||
|
||||
const __moduleDir = path.dirname(fileURLToPath(import.meta.url));
|
||||
|
||||
@@ -169,234 +171,253 @@ export async function execute(ctx: AdapterExecutionContext): Promise<AdapterExec
|
||||
for (const [key, value] of Object.entries(envConfig)) {
|
||||
if (typeof value === "string") env[key] = value;
|
||||
}
|
||||
// Prevent OpenCode from writing an opencode.json config file into the
|
||||
// project working directory (which would pollute the git repo). Model
|
||||
// selection is already handled via the --model CLI flag. Set after the
|
||||
// envConfig loop so user overrides cannot disable this guard.
|
||||
env.OPENCODE_DISABLE_PROJECT_CONFIG = "true";
|
||||
if (!hasExplicitApiKey && authToken) {
|
||||
env.PAPERCLIP_API_KEY = authToken;
|
||||
}
|
||||
const runtimeEnv = Object.fromEntries(
|
||||
Object.entries(ensurePathInEnv({ ...process.env, ...env })).filter(
|
||||
(entry): entry is [string, string] => typeof entry[1] === "string",
|
||||
),
|
||||
);
|
||||
await ensureCommandResolvable(command, cwd, runtimeEnv);
|
||||
|
||||
await ensureOpenCodeModelConfiguredAndAvailable({
|
||||
model,
|
||||
command,
|
||||
cwd,
|
||||
env: runtimeEnv,
|
||||
});
|
||||
|
||||
const timeoutSec = asNumber(config.timeoutSec, 0);
|
||||
const graceSec = asNumber(config.graceSec, 20);
|
||||
const extraArgs = (() => {
|
||||
const fromExtraArgs = asStringArray(config.extraArgs);
|
||||
if (fromExtraArgs.length > 0) return fromExtraArgs;
|
||||
return asStringArray(config.args);
|
||||
})();
|
||||
|
||||
const runtimeSessionParams = parseObject(runtime.sessionParams);
|
||||
const runtimeSessionId = asString(runtimeSessionParams.sessionId, runtime.sessionId ?? "");
|
||||
const runtimeSessionCwd = asString(runtimeSessionParams.cwd, "");
|
||||
const canResumeSession =
|
||||
runtimeSessionId.length > 0 &&
|
||||
(runtimeSessionCwd.length === 0 || path.resolve(runtimeSessionCwd) === path.resolve(cwd));
|
||||
const sessionId = canResumeSession ? runtimeSessionId : null;
|
||||
if (runtimeSessionId && !canResumeSession) {
|
||||
await onLog(
|
||||
"stdout",
|
||||
`[paperclip] OpenCode session "${runtimeSessionId}" was saved for cwd "${runtimeSessionCwd}" and will not be resumed in "${cwd}".\n`,
|
||||
const preparedRuntimeConfig = await prepareOpenCodeRuntimeConfig({ env, config });
|
||||
try {
|
||||
const runtimeEnv = Object.fromEntries(
|
||||
Object.entries(ensurePathInEnv({ ...process.env, ...preparedRuntimeConfig.env })).filter(
|
||||
(entry): entry is [string, string] => typeof entry[1] === "string",
|
||||
),
|
||||
);
|
||||
}
|
||||
await ensureCommandResolvable(command, cwd, runtimeEnv);
|
||||
const resolvedCommand = await resolveCommandForLogs(command, cwd, runtimeEnv);
|
||||
const loggedEnv = buildInvocationEnvForLogs(preparedRuntimeConfig.env, {
|
||||
runtimeEnv,
|
||||
includeRuntimeKeys: ["HOME"],
|
||||
resolvedCommand,
|
||||
});
|
||||
|
||||
const instructionsFilePath = asString(config.instructionsFilePath, "").trim();
|
||||
const resolvedInstructionsFilePath = instructionsFilePath
|
||||
? path.resolve(cwd, instructionsFilePath)
|
||||
: "";
|
||||
const instructionsDir = resolvedInstructionsFilePath ? `${path.dirname(resolvedInstructionsFilePath)}/` : "";
|
||||
let instructionsPrefix = "";
|
||||
if (resolvedInstructionsFilePath) {
|
||||
try {
|
||||
const instructionsContents = await fs.readFile(resolvedInstructionsFilePath, "utf8");
|
||||
instructionsPrefix =
|
||||
`${instructionsContents}\n\n` +
|
||||
`The above agent instructions were loaded from ${resolvedInstructionsFilePath}. ` +
|
||||
`Resolve any relative file references from ${instructionsDir}.\n\n`;
|
||||
} catch (err) {
|
||||
const reason = err instanceof Error ? err.message : String(err);
|
||||
await onLog(
|
||||
"stdout",
|
||||
`[paperclip] Warning: could not read agent instructions file "${resolvedInstructionsFilePath}": ${reason}\n`,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
const commandNotes = (() => {
|
||||
if (!resolvedInstructionsFilePath) return [] as string[];
|
||||
if (instructionsPrefix.length > 0) {
|
||||
return [
|
||||
`Loaded agent instructions from ${resolvedInstructionsFilePath}`,
|
||||
`Prepended instructions + path directive to stdin prompt (relative references from ${instructionsDir}).`,
|
||||
];
|
||||
}
|
||||
return [
|
||||
`Configured instructionsFilePath ${resolvedInstructionsFilePath}, but file could not be read; continuing without injected instructions.`,
|
||||
];
|
||||
})();
|
||||
|
||||
const bootstrapPromptTemplate = asString(config.bootstrapPromptTemplate, "");
|
||||
const templateData = {
|
||||
agentId: agent.id,
|
||||
companyId: agent.companyId,
|
||||
runId,
|
||||
company: { id: agent.companyId },
|
||||
agent,
|
||||
run: { id: runId, source: "on_demand" },
|
||||
context,
|
||||
};
|
||||
const renderedPrompt = renderTemplate(promptTemplate, templateData);
|
||||
const renderedBootstrapPrompt =
|
||||
!sessionId && bootstrapPromptTemplate.trim().length > 0
|
||||
? renderTemplate(bootstrapPromptTemplate, templateData).trim()
|
||||
: "";
|
||||
const sessionHandoffNote = asString(context.paperclipSessionHandoffMarkdown, "").trim();
|
||||
const prompt = joinPromptSections([
|
||||
instructionsPrefix,
|
||||
renderedBootstrapPrompt,
|
||||
sessionHandoffNote,
|
||||
renderedPrompt,
|
||||
]);
|
||||
const promptMetrics = {
|
||||
promptChars: prompt.length,
|
||||
instructionsChars: instructionsPrefix.length,
|
||||
bootstrapPromptChars: renderedBootstrapPrompt.length,
|
||||
sessionHandoffChars: sessionHandoffNote.length,
|
||||
heartbeatPromptChars: renderedPrompt.length,
|
||||
};
|
||||
|
||||
const buildArgs = (resumeSessionId: string | null) => {
|
||||
const args = ["run", "--format", "json"];
|
||||
if (resumeSessionId) args.push("--session", resumeSessionId);
|
||||
if (model) args.push("--model", model);
|
||||
if (variant) args.push("--variant", variant);
|
||||
if (extraArgs.length > 0) args.push(...extraArgs);
|
||||
return args;
|
||||
};
|
||||
|
||||
const runAttempt = async (resumeSessionId: string | null) => {
|
||||
const args = buildArgs(resumeSessionId);
|
||||
if (onMeta) {
|
||||
await onMeta({
|
||||
adapterType: "opencode_local",
|
||||
command,
|
||||
cwd,
|
||||
commandNotes,
|
||||
commandArgs: [...args, `<stdin prompt ${prompt.length} chars>`],
|
||||
env: redactEnvForLogs(env),
|
||||
prompt,
|
||||
promptMetrics,
|
||||
context,
|
||||
});
|
||||
}
|
||||
|
||||
const proc = await runChildProcess(runId, command, args, {
|
||||
await ensureOpenCodeModelConfiguredAndAvailable({
|
||||
model,
|
||||
command,
|
||||
cwd,
|
||||
env: runtimeEnv,
|
||||
stdin: prompt,
|
||||
timeoutSec,
|
||||
graceSec,
|
||||
onSpawn,
|
||||
onLog,
|
||||
});
|
||||
return {
|
||||
proc,
|
||||
rawStderr: proc.stderr,
|
||||
parsed: parseOpenCodeJsonl(proc.stdout),
|
||||
};
|
||||
};
|
||||
|
||||
const toResult = (
|
||||
attempt: {
|
||||
proc: { exitCode: number | null; signal: string | null; timedOut: boolean; stdout: string; stderr: string };
|
||||
rawStderr: string;
|
||||
parsed: ReturnType<typeof parseOpenCodeJsonl>;
|
||||
},
|
||||
clearSessionOnMissingSession = false,
|
||||
): AdapterExecutionResult => {
|
||||
if (attempt.proc.timedOut) {
|
||||
return {
|
||||
exitCode: attempt.proc.exitCode,
|
||||
signal: attempt.proc.signal,
|
||||
timedOut: true,
|
||||
errorMessage: `Timed out after ${timeoutSec}s`,
|
||||
clearSession: clearSessionOnMissingSession,
|
||||
};
|
||||
const timeoutSec = asNumber(config.timeoutSec, 0);
|
||||
const graceSec = asNumber(config.graceSec, 20);
|
||||
const extraArgs = (() => {
|
||||
const fromExtraArgs = asStringArray(config.extraArgs);
|
||||
if (fromExtraArgs.length > 0) return fromExtraArgs;
|
||||
return asStringArray(config.args);
|
||||
})();
|
||||
|
||||
const runtimeSessionParams = parseObject(runtime.sessionParams);
|
||||
const runtimeSessionId = asString(runtimeSessionParams.sessionId, runtime.sessionId ?? "");
|
||||
const runtimeSessionCwd = asString(runtimeSessionParams.cwd, "");
|
||||
const canResumeSession =
|
||||
runtimeSessionId.length > 0 &&
|
||||
(runtimeSessionCwd.length === 0 || path.resolve(runtimeSessionCwd) === path.resolve(cwd));
|
||||
const sessionId = canResumeSession ? runtimeSessionId : null;
|
||||
if (runtimeSessionId && !canResumeSession) {
|
||||
await onLog(
|
||||
"stdout",
|
||||
`[paperclip] OpenCode session "${runtimeSessionId}" was saved for cwd "${runtimeSessionCwd}" and will not be resumed in "${cwd}".\n`,
|
||||
);
|
||||
}
|
||||
|
||||
const resolvedSessionId =
|
||||
attempt.parsed.sessionId ??
|
||||
(clearSessionOnMissingSession ? null : runtimeSessionId ?? runtime.sessionId ?? null);
|
||||
const resolvedSessionParams = resolvedSessionId
|
||||
? ({
|
||||
sessionId: resolvedSessionId,
|
||||
cwd,
|
||||
...(workspaceId ? { workspaceId } : {}),
|
||||
...(workspaceRepoUrl ? { repoUrl: workspaceRepoUrl } : {}),
|
||||
...(workspaceRepoRef ? { repoRef: workspaceRepoRef } : {}),
|
||||
} as Record<string, unknown>)
|
||||
: null;
|
||||
const instructionsFilePath = asString(config.instructionsFilePath, "").trim();
|
||||
const resolvedInstructionsFilePath = instructionsFilePath
|
||||
? path.resolve(cwd, instructionsFilePath)
|
||||
: "";
|
||||
const instructionsDir = resolvedInstructionsFilePath ? `${path.dirname(resolvedInstructionsFilePath)}/` : "";
|
||||
let instructionsPrefix = "";
|
||||
if (resolvedInstructionsFilePath) {
|
||||
try {
|
||||
const instructionsContents = await fs.readFile(resolvedInstructionsFilePath, "utf8");
|
||||
instructionsPrefix =
|
||||
`${instructionsContents}\n\n` +
|
||||
`The above agent instructions were loaded from ${resolvedInstructionsFilePath}. ` +
|
||||
`Resolve any relative file references from ${instructionsDir}.\n\n`;
|
||||
} catch (err) {
|
||||
const reason = err instanceof Error ? err.message : String(err);
|
||||
await onLog(
|
||||
"stdout",
|
||||
`[paperclip] Warning: could not read agent instructions file "${resolvedInstructionsFilePath}": ${reason}\n`,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
const parsedError = typeof attempt.parsed.errorMessage === "string" ? attempt.parsed.errorMessage.trim() : "";
|
||||
const stderrLine = firstNonEmptyLine(attempt.proc.stderr);
|
||||
const rawExitCode = attempt.proc.exitCode;
|
||||
const synthesizedExitCode = parsedError && (rawExitCode ?? 0) === 0 ? 1 : rawExitCode;
|
||||
const fallbackErrorMessage =
|
||||
parsedError ||
|
||||
stderrLine ||
|
||||
`OpenCode exited with code ${synthesizedExitCode ?? -1}`;
|
||||
const modelId = model || null;
|
||||
const commandNotes = (() => {
|
||||
const notes = [...preparedRuntimeConfig.notes];
|
||||
if (!resolvedInstructionsFilePath) return notes;
|
||||
if (instructionsPrefix.length > 0) {
|
||||
notes.push(`Loaded agent instructions from ${resolvedInstructionsFilePath}`);
|
||||
notes.push(
|
||||
`Prepended instructions + path directive to stdin prompt (relative references from ${instructionsDir}).`,
|
||||
);
|
||||
return notes;
|
||||
}
|
||||
notes.push(
|
||||
`Configured instructionsFilePath ${resolvedInstructionsFilePath}, but file could not be read; continuing without injected instructions.`,
|
||||
);
|
||||
return notes;
|
||||
})();
|
||||
|
||||
return {
|
||||
exitCode: synthesizedExitCode,
|
||||
signal: attempt.proc.signal,
|
||||
timedOut: false,
|
||||
errorMessage: (synthesizedExitCode ?? 0) === 0 ? null : fallbackErrorMessage,
|
||||
usage: {
|
||||
inputTokens: attempt.parsed.usage.inputTokens,
|
||||
outputTokens: attempt.parsed.usage.outputTokens,
|
||||
cachedInputTokens: attempt.parsed.usage.cachedInputTokens,
|
||||
},
|
||||
sessionId: resolvedSessionId,
|
||||
sessionParams: resolvedSessionParams,
|
||||
sessionDisplayId: resolvedSessionId,
|
||||
provider: parseModelProvider(modelId),
|
||||
biller: resolveOpenCodeBiller(runtimeEnv, parseModelProvider(modelId)),
|
||||
model: modelId,
|
||||
billingType: "unknown",
|
||||
costUsd: attempt.parsed.costUsd,
|
||||
resultJson: {
|
||||
stdout: attempt.proc.stdout,
|
||||
stderr: attempt.proc.stderr,
|
||||
},
|
||||
summary: attempt.parsed.summary,
|
||||
clearSession: Boolean(clearSessionOnMissingSession && !attempt.parsed.sessionId),
|
||||
const bootstrapPromptTemplate = asString(config.bootstrapPromptTemplate, "");
|
||||
const templateData = {
|
||||
agentId: agent.id,
|
||||
companyId: agent.companyId,
|
||||
runId,
|
||||
company: { id: agent.companyId },
|
||||
agent,
|
||||
run: { id: runId, source: "on_demand" },
|
||||
context,
|
||||
};
|
||||
const renderedPrompt = renderTemplate(promptTemplate, templateData);
|
||||
const renderedBootstrapPrompt =
|
||||
!sessionId && bootstrapPromptTemplate.trim().length > 0
|
||||
? renderTemplate(bootstrapPromptTemplate, templateData).trim()
|
||||
: "";
|
||||
const sessionHandoffNote = asString(context.paperclipSessionHandoffMarkdown, "").trim();
|
||||
const prompt = joinPromptSections([
|
||||
instructionsPrefix,
|
||||
renderedBootstrapPrompt,
|
||||
sessionHandoffNote,
|
||||
renderedPrompt,
|
||||
]);
|
||||
const promptMetrics = {
|
||||
promptChars: prompt.length,
|
||||
instructionsChars: instructionsPrefix.length,
|
||||
bootstrapPromptChars: renderedBootstrapPrompt.length,
|
||||
sessionHandoffChars: sessionHandoffNote.length,
|
||||
heartbeatPromptChars: renderedPrompt.length,
|
||||
};
|
||||
};
|
||||
|
||||
const initial = await runAttempt(sessionId);
|
||||
const initialFailed =
|
||||
!initial.proc.timedOut && ((initial.proc.exitCode ?? 0) !== 0 || Boolean(initial.parsed.errorMessage));
|
||||
if (
|
||||
sessionId &&
|
||||
initialFailed &&
|
||||
isOpenCodeUnknownSessionError(initial.proc.stdout, initial.rawStderr)
|
||||
) {
|
||||
await onLog(
|
||||
"stdout",
|
||||
`[paperclip] OpenCode session "${sessionId}" is unavailable; retrying with a fresh session.\n`,
|
||||
);
|
||||
const retry = await runAttempt(null);
|
||||
return toResult(retry, true);
|
||||
const buildArgs = (resumeSessionId: string | null) => {
|
||||
const args = ["run", "--format", "json"];
|
||||
if (resumeSessionId) args.push("--session", resumeSessionId);
|
||||
if (model) args.push("--model", model);
|
||||
if (variant) args.push("--variant", variant);
|
||||
if (extraArgs.length > 0) args.push(...extraArgs);
|
||||
return args;
|
||||
};
|
||||
|
||||
const runAttempt = async (resumeSessionId: string | null) => {
|
||||
const args = buildArgs(resumeSessionId);
|
||||
if (onMeta) {
|
||||
await onMeta({
|
||||
adapterType: "opencode_local",
|
||||
command: resolvedCommand,
|
||||
cwd,
|
||||
commandNotes,
|
||||
commandArgs: [...args, `<stdin prompt ${prompt.length} chars>`],
|
||||
env: loggedEnv,
|
||||
prompt,
|
||||
promptMetrics,
|
||||
context,
|
||||
});
|
||||
}
|
||||
|
||||
const proc = await runChildProcess(runId, command, args, {
|
||||
cwd,
|
||||
env: runtimeEnv,
|
||||
stdin: prompt,
|
||||
timeoutSec,
|
||||
graceSec,
|
||||
onSpawn,
|
||||
onLog,
|
||||
});
|
||||
return {
|
||||
proc,
|
||||
rawStderr: proc.stderr,
|
||||
parsed: parseOpenCodeJsonl(proc.stdout),
|
||||
};
|
||||
};
|
||||
|
||||
const toResult = (
|
||||
attempt: {
|
||||
proc: { exitCode: number | null; signal: string | null; timedOut: boolean; stdout: string; stderr: string };
|
||||
rawStderr: string;
|
||||
parsed: ReturnType<typeof parseOpenCodeJsonl>;
|
||||
},
|
||||
clearSessionOnMissingSession = false,
|
||||
): AdapterExecutionResult => {
|
||||
if (attempt.proc.timedOut) {
|
||||
return {
|
||||
exitCode: attempt.proc.exitCode,
|
||||
signal: attempt.proc.signal,
|
||||
timedOut: true,
|
||||
errorMessage: `Timed out after ${timeoutSec}s`,
|
||||
clearSession: clearSessionOnMissingSession,
|
||||
};
|
||||
}
|
||||
|
||||
const resolvedSessionId =
|
||||
attempt.parsed.sessionId ??
|
||||
(clearSessionOnMissingSession ? null : runtimeSessionId ?? runtime.sessionId ?? null);
|
||||
const resolvedSessionParams = resolvedSessionId
|
||||
? ({
|
||||
sessionId: resolvedSessionId,
|
||||
cwd,
|
||||
...(workspaceId ? { workspaceId } : {}),
|
||||
...(workspaceRepoUrl ? { repoUrl: workspaceRepoUrl } : {}),
|
||||
...(workspaceRepoRef ? { repoRef: workspaceRepoRef } : {}),
|
||||
} as Record<string, unknown>)
|
||||
: null;
|
||||
|
||||
const parsedError = typeof attempt.parsed.errorMessage === "string" ? attempt.parsed.errorMessage.trim() : "";
|
||||
const stderrLine = firstNonEmptyLine(attempt.proc.stderr);
|
||||
const rawExitCode = attempt.proc.exitCode;
|
||||
const synthesizedExitCode = parsedError && (rawExitCode ?? 0) === 0 ? 1 : rawExitCode;
|
||||
const fallbackErrorMessage =
|
||||
parsedError ||
|
||||
stderrLine ||
|
||||
`OpenCode exited with code ${synthesizedExitCode ?? -1}`;
|
||||
const modelId = model || null;
|
||||
|
||||
return {
|
||||
exitCode: synthesizedExitCode,
|
||||
signal: attempt.proc.signal,
|
||||
timedOut: false,
|
||||
errorMessage: (synthesizedExitCode ?? 0) === 0 ? null : fallbackErrorMessage,
|
||||
usage: {
|
||||
inputTokens: attempt.parsed.usage.inputTokens,
|
||||
outputTokens: attempt.parsed.usage.outputTokens,
|
||||
cachedInputTokens: attempt.parsed.usage.cachedInputTokens,
|
||||
},
|
||||
sessionId: resolvedSessionId,
|
||||
sessionParams: resolvedSessionParams,
|
||||
sessionDisplayId: resolvedSessionId,
|
||||
provider: parseModelProvider(modelId),
|
||||
biller: resolveOpenCodeBiller(runtimeEnv, parseModelProvider(modelId)),
|
||||
model: modelId,
|
||||
billingType: "unknown",
|
||||
costUsd: attempt.parsed.costUsd,
|
||||
resultJson: {
|
||||
stdout: attempt.proc.stdout,
|
||||
stderr: attempt.proc.stderr,
|
||||
},
|
||||
summary: attempt.parsed.summary,
|
||||
clearSession: Boolean(clearSessionOnMissingSession && !attempt.parsed.sessionId),
|
||||
};
|
||||
};
|
||||
|
||||
const initial = await runAttempt(sessionId);
|
||||
const initialFailed =
|
||||
!initial.proc.timedOut && ((initial.proc.exitCode ?? 0) !== 0 || Boolean(initial.parsed.errorMessage));
|
||||
if (
|
||||
sessionId &&
|
||||
initialFailed &&
|
||||
isOpenCodeUnknownSessionError(initial.proc.stdout, initial.rawStderr)
|
||||
) {
|
||||
await onLog(
|
||||
"stdout",
|
||||
`[paperclip] OpenCode session "${sessionId}" is unavailable; retrying with a fresh session.\n`,
|
||||
);
|
||||
const retry = await runAttempt(null);
|
||||
return toResult(retry, true);
|
||||
}
|
||||
|
||||
return toResult(initial);
|
||||
} finally {
|
||||
await preparedRuntimeConfig.cleanup();
|
||||
}
|
||||
|
||||
return toResult(initial);
|
||||
}
|
||||
|
||||
@@ -120,7 +120,8 @@ export async function discoverOpenCodeModels(input: {
|
||||
// /etc/passwd entry (e.g. `docker run --user 1234` with a minimal
|
||||
// image). Fall back to process.env.HOME.
|
||||
}
|
||||
const runtimeEnv = normalizeEnv(ensurePathInEnv({ ...process.env, ...env, ...(resolvedHome ? { HOME: resolvedHome } : {}) }));
|
||||
// Prevent OpenCode from writing an opencode.json into the working directory.
|
||||
const runtimeEnv = normalizeEnv(ensurePathInEnv({ ...process.env, ...env, ...(resolvedHome ? { HOME: resolvedHome } : {}), OPENCODE_DISABLE_PROJECT_CONFIG: "true" }));
|
||||
|
||||
const result = await runChildProcess(
|
||||
`opencode-models-${Date.now()}-${Math.random().toString(16).slice(2)}`,
|
||||
|
||||
@@ -0,0 +1,79 @@
|
||||
import fs from "node:fs/promises";
|
||||
import os from "node:os";
|
||||
import path from "node:path";
|
||||
import { afterEach, describe, expect, it } from "vitest";
|
||||
import { prepareOpenCodeRuntimeConfig } from "./runtime-config.js";
|
||||
|
||||
const cleanupPaths = new Set<string>();
|
||||
|
||||
afterEach(async () => {
|
||||
await Promise.all(
|
||||
[...cleanupPaths].map(async (filepath) => {
|
||||
await fs.rm(filepath, { recursive: true, force: true });
|
||||
cleanupPaths.delete(filepath);
|
||||
}),
|
||||
);
|
||||
});
|
||||
|
||||
async function makeConfigHome(initialConfig?: Record<string, unknown>) {
|
||||
const root = await fs.mkdtemp(path.join(os.tmpdir(), "paperclip-opencode-test-"));
|
||||
cleanupPaths.add(root);
|
||||
const configDir = path.join(root, "opencode");
|
||||
await fs.mkdir(configDir, { recursive: true });
|
||||
if (initialConfig) {
|
||||
await fs.writeFile(
|
||||
path.join(configDir, "opencode.json"),
|
||||
`${JSON.stringify(initialConfig, null, 2)}\n`,
|
||||
"utf8",
|
||||
);
|
||||
}
|
||||
return root;
|
||||
}
|
||||
|
||||
describe("prepareOpenCodeRuntimeConfig", () => {
|
||||
it("injects an external_directory allow rule by default", async () => {
|
||||
const configHome = await makeConfigHome({
|
||||
permission: {
|
||||
read: "allow",
|
||||
},
|
||||
theme: "system",
|
||||
});
|
||||
|
||||
const prepared = await prepareOpenCodeRuntimeConfig({
|
||||
env: { XDG_CONFIG_HOME: configHome },
|
||||
config: {},
|
||||
});
|
||||
cleanupPaths.add(prepared.env.XDG_CONFIG_HOME);
|
||||
|
||||
expect(prepared.env.XDG_CONFIG_HOME).not.toBe(configHome);
|
||||
const runtimeConfig = JSON.parse(
|
||||
await fs.readFile(
|
||||
path.join(prepared.env.XDG_CONFIG_HOME, "opencode", "opencode.json"),
|
||||
"utf8",
|
||||
),
|
||||
) as Record<string, unknown>;
|
||||
expect(runtimeConfig).toMatchObject({
|
||||
theme: "system",
|
||||
permission: {
|
||||
read: "allow",
|
||||
external_directory: "allow",
|
||||
},
|
||||
});
|
||||
|
||||
await prepared.cleanup();
|
||||
cleanupPaths.delete(prepared.env.XDG_CONFIG_HOME);
|
||||
await expect(fs.access(prepared.env.XDG_CONFIG_HOME)).rejects.toThrow();
|
||||
});
|
||||
|
||||
it("respects explicit opt-out", async () => {
|
||||
const configHome = await makeConfigHome();
|
||||
const prepared = await prepareOpenCodeRuntimeConfig({
|
||||
env: { XDG_CONFIG_HOME: configHome },
|
||||
config: { dangerouslySkipPermissions: false },
|
||||
});
|
||||
|
||||
expect(prepared.env).toEqual({ XDG_CONFIG_HOME: configHome });
|
||||
expect(prepared.notes).toEqual([]);
|
||||
await prepared.cleanup();
|
||||
});
|
||||
});
|
||||
@@ -0,0 +1,91 @@
|
||||
import fs from "node:fs/promises";
|
||||
import os from "node:os";
|
||||
import path from "node:path";
|
||||
import { asBoolean } from "@paperclipai/adapter-utils/server-utils";
|
||||
|
||||
type PreparedOpenCodeRuntimeConfig = {
|
||||
env: Record<string, string>;
|
||||
notes: string[];
|
||||
cleanup: () => Promise<void>;
|
||||
};
|
||||
|
||||
function resolveXdgConfigHome(env: Record<string, string>): string {
|
||||
return (
|
||||
(typeof env.XDG_CONFIG_HOME === "string" && env.XDG_CONFIG_HOME.trim()) ||
|
||||
(typeof process.env.XDG_CONFIG_HOME === "string" && process.env.XDG_CONFIG_HOME.trim()) ||
|
||||
path.join(os.homedir(), ".config")
|
||||
);
|
||||
}
|
||||
|
||||
function isPlainObject(value: unknown): value is Record<string, unknown> {
|
||||
return typeof value === "object" && value !== null && !Array.isArray(value);
|
||||
}
|
||||
|
||||
async function readJsonObject(filepath: string): Promise<Record<string, unknown>> {
|
||||
try {
|
||||
const raw = await fs.readFile(filepath, "utf8");
|
||||
const parsed = JSON.parse(raw);
|
||||
return isPlainObject(parsed) ? parsed : {};
|
||||
} catch {
|
||||
return {};
|
||||
}
|
||||
}
|
||||
|
||||
export async function prepareOpenCodeRuntimeConfig(input: {
|
||||
env: Record<string, string>;
|
||||
config: Record<string, unknown>;
|
||||
}): Promise<PreparedOpenCodeRuntimeConfig> {
|
||||
const skipPermissions = asBoolean(input.config.dangerouslySkipPermissions, true);
|
||||
if (!skipPermissions) {
|
||||
return {
|
||||
env: input.env,
|
||||
notes: [],
|
||||
cleanup: async () => {},
|
||||
};
|
||||
}
|
||||
|
||||
const sourceConfigDir = path.join(resolveXdgConfigHome(input.env), "opencode");
|
||||
const runtimeConfigHome = await fs.mkdtemp(path.join(os.tmpdir(), "paperclip-opencode-config-"));
|
||||
const runtimeConfigDir = path.join(runtimeConfigHome, "opencode");
|
||||
const runtimeConfigPath = path.join(runtimeConfigDir, "opencode.json");
|
||||
|
||||
await fs.mkdir(runtimeConfigDir, { recursive: true });
|
||||
try {
|
||||
await fs.cp(sourceConfigDir, runtimeConfigDir, {
|
||||
recursive: true,
|
||||
force: true,
|
||||
errorOnExist: false,
|
||||
dereference: false,
|
||||
});
|
||||
} catch (err) {
|
||||
if ((err as NodeJS.ErrnoException | null)?.code !== "ENOENT") {
|
||||
throw err;
|
||||
}
|
||||
}
|
||||
|
||||
const existingConfig = await readJsonObject(runtimeConfigPath);
|
||||
const existingPermission = isPlainObject(existingConfig.permission)
|
||||
? existingConfig.permission
|
||||
: {};
|
||||
const nextConfig = {
|
||||
...existingConfig,
|
||||
permission: {
|
||||
...existingPermission,
|
||||
external_directory: "allow",
|
||||
},
|
||||
};
|
||||
await fs.writeFile(runtimeConfigPath, `${JSON.stringify(nextConfig, null, 2)}\n`, "utf8");
|
||||
|
||||
return {
|
||||
env: {
|
||||
...input.env,
|
||||
XDG_CONFIG_HOME: runtimeConfigHome,
|
||||
},
|
||||
notes: [
|
||||
"Injected runtime OpenCode config with permission.external_directory=allow to avoid headless approval prompts.",
|
||||
],
|
||||
cleanup: async () => {
|
||||
await fs.rm(runtimeConfigHome, { recursive: true, force: true });
|
||||
},
|
||||
};
|
||||
}
|
||||
@@ -4,6 +4,7 @@ import type {
|
||||
AdapterEnvironmentTestResult,
|
||||
} from "@paperclipai/adapter-utils";
|
||||
import {
|
||||
asBoolean,
|
||||
asString,
|
||||
asStringArray,
|
||||
parseObject,
|
||||
@@ -14,6 +15,7 @@ import {
|
||||
} from "@paperclipai/adapter-utils/server-utils";
|
||||
import { discoverOpenCodeModels, ensureOpenCodeModelConfiguredAndAvailable } from "./models.js";
|
||||
import { parseOpenCodeJsonl } from "./parse.js";
|
||||
import { prepareOpenCodeRuntimeConfig } from "./runtime-config.js";
|
||||
|
||||
function summarizeStatus(checks: AdapterEnvironmentCheck[]): AdapterEnvironmentTestResult["status"] {
|
||||
if (checks.some((check) => check.level === "error")) return "fail";
|
||||
@@ -90,224 +92,238 @@ export async function testEnvironment(
|
||||
});
|
||||
}
|
||||
|
||||
const runtimeEnv = normalizeEnv(ensurePathInEnv({ ...process.env, ...env }));
|
||||
|
||||
const cwdInvalid = checks.some((check) => check.code === "opencode_cwd_invalid");
|
||||
if (cwdInvalid) {
|
||||
// Prevent OpenCode from writing an opencode.json into the working directory.
|
||||
env.OPENCODE_DISABLE_PROJECT_CONFIG = "true";
|
||||
const preparedRuntimeConfig = await prepareOpenCodeRuntimeConfig({ env, config });
|
||||
if (asBoolean(config.dangerouslySkipPermissions, true)) {
|
||||
checks.push({
|
||||
code: "opencode_command_skipped",
|
||||
level: "warn",
|
||||
message: "Skipped command check because working directory validation failed.",
|
||||
detail: command,
|
||||
code: "opencode_headless_permissions_enabled",
|
||||
level: "info",
|
||||
message: "Headless OpenCode external-directory permissions are auto-approved for unattended runs.",
|
||||
});
|
||||
} else {
|
||||
try {
|
||||
await ensureCommandResolvable(command, cwd, runtimeEnv);
|
||||
}
|
||||
try {
|
||||
const runtimeEnv = normalizeEnv(ensurePathInEnv({ ...process.env, ...preparedRuntimeConfig.env }));
|
||||
|
||||
const cwdInvalid = checks.some((check) => check.code === "opencode_cwd_invalid");
|
||||
if (cwdInvalid) {
|
||||
checks.push({
|
||||
code: "opencode_command_resolvable",
|
||||
level: "info",
|
||||
message: `Command is executable: ${command}`,
|
||||
});
|
||||
} catch (err) {
|
||||
checks.push({
|
||||
code: "opencode_command_unresolvable",
|
||||
level: "error",
|
||||
message: err instanceof Error ? err.message : "Command is not executable",
|
||||
code: "opencode_command_skipped",
|
||||
level: "warn",
|
||||
message: "Skipped command check because working directory validation failed.",
|
||||
detail: command,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
const canRunProbe =
|
||||
checks.every((check) => check.code !== "opencode_cwd_invalid" && check.code !== "opencode_command_unresolvable");
|
||||
|
||||
let modelValidationPassed = false;
|
||||
const configuredModel = asString(config.model, "").trim();
|
||||
|
||||
if (canRunProbe && configuredModel) {
|
||||
try {
|
||||
const discovered = await discoverOpenCodeModels({ command, cwd, env: runtimeEnv });
|
||||
if (discovered.length > 0) {
|
||||
} else {
|
||||
try {
|
||||
await ensureCommandResolvable(command, cwd, runtimeEnv);
|
||||
checks.push({
|
||||
code: "opencode_models_discovered",
|
||||
code: "opencode_command_resolvable",
|
||||
level: "info",
|
||||
message: `Discovered ${discovered.length} model(s) from OpenCode providers.`,
|
||||
message: `Command is executable: ${command}`,
|
||||
});
|
||||
} else {
|
||||
} catch (err) {
|
||||
checks.push({
|
||||
code: "opencode_models_empty",
|
||||
code: "opencode_command_unresolvable",
|
||||
level: "error",
|
||||
message: "OpenCode returned no models.",
|
||||
hint: "Run `opencode models` and verify provider authentication.",
|
||||
});
|
||||
}
|
||||
} catch (err) {
|
||||
const errMsg = err instanceof Error ? err.message : String(err);
|
||||
if (/ProviderModelNotFoundError/i.test(errMsg)) {
|
||||
checks.push({
|
||||
code: "opencode_hello_probe_model_unavailable",
|
||||
level: "warn",
|
||||
message: "The configured model was not found by the provider.",
|
||||
detail: errMsg,
|
||||
hint: "Run `opencode models` and choose an available provider/model ID.",
|
||||
});
|
||||
} else {
|
||||
checks.push({
|
||||
code: "opencode_models_discovery_failed",
|
||||
level: "error",
|
||||
message: errMsg || "OpenCode model discovery failed.",
|
||||
hint: "Run `opencode models` manually to verify provider auth and config.",
|
||||
message: err instanceof Error ? err.message : "Command is not executable",
|
||||
detail: command,
|
||||
});
|
||||
}
|
||||
}
|
||||
} else if (canRunProbe && !configuredModel) {
|
||||
try {
|
||||
const discovered = await discoverOpenCodeModels({ command, cwd, env: runtimeEnv });
|
||||
if (discovered.length > 0) {
|
||||
checks.push({
|
||||
code: "opencode_models_discovered",
|
||||
level: "info",
|
||||
message: `Discovered ${discovered.length} model(s) from OpenCode providers.`,
|
||||
});
|
||||
|
||||
const canRunProbe =
|
||||
checks.every((check) => check.code !== "opencode_cwd_invalid" && check.code !== "opencode_command_unresolvable");
|
||||
|
||||
let modelValidationPassed = false;
|
||||
const configuredModel = asString(config.model, "").trim();
|
||||
|
||||
if (canRunProbe && configuredModel) {
|
||||
try {
|
||||
const discovered = await discoverOpenCodeModels({ command, cwd, env: runtimeEnv });
|
||||
if (discovered.length > 0) {
|
||||
checks.push({
|
||||
code: "opencode_models_discovered",
|
||||
level: "info",
|
||||
message: `Discovered ${discovered.length} model(s) from OpenCode providers.`,
|
||||
});
|
||||
} else {
|
||||
checks.push({
|
||||
code: "opencode_models_empty",
|
||||
level: "error",
|
||||
message: "OpenCode returned no models.",
|
||||
hint: "Run `opencode models` and verify provider authentication.",
|
||||
});
|
||||
}
|
||||
} catch (err) {
|
||||
const errMsg = err instanceof Error ? err.message : String(err);
|
||||
if (/ProviderModelNotFoundError/i.test(errMsg)) {
|
||||
checks.push({
|
||||
code: "opencode_hello_probe_model_unavailable",
|
||||
level: "warn",
|
||||
message: "The configured model was not found by the provider.",
|
||||
detail: errMsg,
|
||||
hint: "Run `opencode models` and choose an available provider/model ID.",
|
||||
});
|
||||
} else {
|
||||
checks.push({
|
||||
code: "opencode_models_discovery_failed",
|
||||
level: "error",
|
||||
message: errMsg || "OpenCode model discovery failed.",
|
||||
hint: "Run `opencode models` manually to verify provider auth and config.",
|
||||
});
|
||||
}
|
||||
}
|
||||
} catch (err) {
|
||||
const errMsg = err instanceof Error ? err.message : String(err);
|
||||
if (/ProviderModelNotFoundError/i.test(errMsg)) {
|
||||
checks.push({
|
||||
code: "opencode_hello_probe_model_unavailable",
|
||||
level: "warn",
|
||||
message: "The configured model was not found by the provider.",
|
||||
detail: errMsg,
|
||||
hint: "Run `opencode models` and choose an available provider/model ID.",
|
||||
});
|
||||
} else {
|
||||
checks.push({
|
||||
code: "opencode_models_discovery_failed",
|
||||
level: "warn",
|
||||
message: errMsg || "OpenCode model discovery failed (best-effort, no model configured).",
|
||||
hint: "Run `opencode models` manually to verify provider auth and config.",
|
||||
});
|
||||
} else if (canRunProbe && !configuredModel) {
|
||||
try {
|
||||
const discovered = await discoverOpenCodeModels({ command, cwd, env: runtimeEnv });
|
||||
if (discovered.length > 0) {
|
||||
checks.push({
|
||||
code: "opencode_models_discovered",
|
||||
level: "info",
|
||||
message: `Discovered ${discovered.length} model(s) from OpenCode providers.`,
|
||||
});
|
||||
}
|
||||
} catch (err) {
|
||||
const errMsg = err instanceof Error ? err.message : String(err);
|
||||
if (/ProviderModelNotFoundError/i.test(errMsg)) {
|
||||
checks.push({
|
||||
code: "opencode_hello_probe_model_unavailable",
|
||||
level: "warn",
|
||||
message: "The configured model was not found by the provider.",
|
||||
detail: errMsg,
|
||||
hint: "Run `opencode models` and choose an available provider/model ID.",
|
||||
});
|
||||
} else {
|
||||
checks.push({
|
||||
code: "opencode_models_discovery_failed",
|
||||
level: "warn",
|
||||
message: errMsg || "OpenCode model discovery failed (best-effort, no model configured).",
|
||||
hint: "Run `opencode models` manually to verify provider auth and config.",
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const modelUnavailable = checks.some((check) => check.code === "opencode_hello_probe_model_unavailable");
|
||||
if (!configuredModel && !modelUnavailable) {
|
||||
// No model configured – skip model requirement if no model-related checks exist
|
||||
} else if (configuredModel && canRunProbe) {
|
||||
try {
|
||||
await ensureOpenCodeModelConfiguredAndAvailable({
|
||||
model: configuredModel,
|
||||
command,
|
||||
cwd,
|
||||
env: runtimeEnv,
|
||||
});
|
||||
checks.push({
|
||||
code: "opencode_model_configured",
|
||||
level: "info",
|
||||
message: `Configured model: ${configuredModel}`,
|
||||
});
|
||||
modelValidationPassed = true;
|
||||
} catch (err) {
|
||||
checks.push({
|
||||
code: "opencode_model_invalid",
|
||||
level: "error",
|
||||
message: err instanceof Error ? err.message : "Configured model is unavailable.",
|
||||
hint: "Run `opencode models` and choose a currently available provider/model ID.",
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
if (canRunProbe && modelValidationPassed) {
|
||||
const extraArgs = (() => {
|
||||
const fromExtraArgs = asStringArray(config.extraArgs);
|
||||
if (fromExtraArgs.length > 0) return fromExtraArgs;
|
||||
return asStringArray(config.args);
|
||||
})();
|
||||
const variant = asString(config.variant, "").trim();
|
||||
const probeModel = configuredModel;
|
||||
|
||||
const args = ["run", "--format", "json"];
|
||||
args.push("--model", probeModel);
|
||||
if (variant) args.push("--variant", variant);
|
||||
if (extraArgs.length > 0) args.push(...extraArgs);
|
||||
|
||||
try {
|
||||
const probe = await runChildProcess(
|
||||
`opencode-envtest-${Date.now()}-${Math.random().toString(16).slice(2)}`,
|
||||
command,
|
||||
args,
|
||||
{
|
||||
const modelUnavailable = checks.some((check) => check.code === "opencode_hello_probe_model_unavailable");
|
||||
if (!configuredModel && !modelUnavailable) {
|
||||
// No model configured – skip model requirement if no model-related checks exist
|
||||
} else if (configuredModel && canRunProbe) {
|
||||
try {
|
||||
await ensureOpenCodeModelConfiguredAndAvailable({
|
||||
model: configuredModel,
|
||||
command,
|
||||
cwd,
|
||||
env: runtimeEnv,
|
||||
timeoutSec: 60,
|
||||
graceSec: 5,
|
||||
stdin: "Respond with hello.",
|
||||
onLog: async () => {},
|
||||
},
|
||||
);
|
||||
});
|
||||
checks.push({
|
||||
code: "opencode_model_configured",
|
||||
level: "info",
|
||||
message: `Configured model: ${configuredModel}`,
|
||||
});
|
||||
modelValidationPassed = true;
|
||||
} catch (err) {
|
||||
checks.push({
|
||||
code: "opencode_model_invalid",
|
||||
level: "error",
|
||||
message: err instanceof Error ? err.message : "Configured model is unavailable.",
|
||||
hint: "Run `opencode models` and choose a currently available provider/model ID.",
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
const parsed = parseOpenCodeJsonl(probe.stdout);
|
||||
const detail = summarizeProbeDetail(probe.stdout, probe.stderr, parsed.errorMessage);
|
||||
const authEvidence = `${parsed.errorMessage ?? ""}\n${probe.stdout}\n${probe.stderr}`.trim();
|
||||
if (canRunProbe && modelValidationPassed) {
|
||||
const extraArgs = (() => {
|
||||
const fromExtraArgs = asStringArray(config.extraArgs);
|
||||
if (fromExtraArgs.length > 0) return fromExtraArgs;
|
||||
return asStringArray(config.args);
|
||||
})();
|
||||
const variant = asString(config.variant, "").trim();
|
||||
const probeModel = configuredModel;
|
||||
|
||||
if (probe.timedOut) {
|
||||
checks.push({
|
||||
code: "opencode_hello_probe_timed_out",
|
||||
level: "warn",
|
||||
message: "OpenCode hello probe timed out.",
|
||||
hint: "Retry the probe. If this persists, run OpenCode manually in this working directory.",
|
||||
});
|
||||
} else if ((probe.exitCode ?? 1) === 0 && !parsed.errorMessage) {
|
||||
const summary = parsed.summary.trim();
|
||||
const hasHello = /\bhello\b/i.test(summary);
|
||||
checks.push({
|
||||
code: hasHello ? "opencode_hello_probe_passed" : "opencode_hello_probe_unexpected_output",
|
||||
level: hasHello ? "info" : "warn",
|
||||
message: hasHello
|
||||
? "OpenCode hello probe succeeded."
|
||||
: "OpenCode probe ran but did not return `hello` as expected.",
|
||||
...(summary ? { detail: summary.replace(/\s+/g, " ").trim().slice(0, 240) } : {}),
|
||||
...(hasHello
|
||||
? {}
|
||||
: {
|
||||
hint: "Run `opencode run --format json` manually and prompt `Respond with hello` to inspect output.",
|
||||
}),
|
||||
});
|
||||
} else if (/ProviderModelNotFoundError/i.test(authEvidence)) {
|
||||
checks.push({
|
||||
code: "opencode_hello_probe_model_unavailable",
|
||||
level: "warn",
|
||||
message: "The configured model was not found by the provider.",
|
||||
...(detail ? { detail } : {}),
|
||||
hint: "Run `opencode models` and choose an available provider/model ID.",
|
||||
});
|
||||
} else if (OPENCODE_AUTH_REQUIRED_RE.test(authEvidence)) {
|
||||
checks.push({
|
||||
code: "opencode_hello_probe_auth_required",
|
||||
level: "warn",
|
||||
message: "OpenCode is installed, but provider authentication is not ready.",
|
||||
...(detail ? { detail } : {}),
|
||||
hint: "Run `opencode auth login` or set provider credentials, then retry the probe.",
|
||||
});
|
||||
} else {
|
||||
const args = ["run", "--format", "json"];
|
||||
args.push("--model", probeModel);
|
||||
if (variant) args.push("--variant", variant);
|
||||
if (extraArgs.length > 0) args.push(...extraArgs);
|
||||
|
||||
try {
|
||||
const probe = await runChildProcess(
|
||||
`opencode-envtest-${Date.now()}-${Math.random().toString(16).slice(2)}`,
|
||||
command,
|
||||
args,
|
||||
{
|
||||
cwd,
|
||||
env: runtimeEnv,
|
||||
timeoutSec: 60,
|
||||
graceSec: 5,
|
||||
stdin: "Respond with hello.",
|
||||
onLog: async () => {},
|
||||
},
|
||||
);
|
||||
|
||||
const parsed = parseOpenCodeJsonl(probe.stdout);
|
||||
const detail = summarizeProbeDetail(probe.stdout, probe.stderr, parsed.errorMessage);
|
||||
const authEvidence = `${parsed.errorMessage ?? ""}\n${probe.stdout}\n${probe.stderr}`.trim();
|
||||
|
||||
if (probe.timedOut) {
|
||||
checks.push({
|
||||
code: "opencode_hello_probe_timed_out",
|
||||
level: "warn",
|
||||
message: "OpenCode hello probe timed out.",
|
||||
hint: "Retry the probe. If this persists, run OpenCode manually in this working directory.",
|
||||
});
|
||||
} else if ((probe.exitCode ?? 1) === 0 && !parsed.errorMessage) {
|
||||
const summary = parsed.summary.trim();
|
||||
const hasHello = /\bhello\b/i.test(summary);
|
||||
checks.push({
|
||||
code: hasHello ? "opencode_hello_probe_passed" : "opencode_hello_probe_unexpected_output",
|
||||
level: hasHello ? "info" : "warn",
|
||||
message: hasHello
|
||||
? "OpenCode hello probe succeeded."
|
||||
: "OpenCode probe ran but did not return `hello` as expected.",
|
||||
...(summary ? { detail: summary.replace(/\s+/g, " ").trim().slice(0, 240) } : {}),
|
||||
...(hasHello
|
||||
? {}
|
||||
: {
|
||||
hint: "Run `opencode run --format json` manually and prompt `Respond with hello` to inspect output.",
|
||||
}),
|
||||
});
|
||||
} else if (/ProviderModelNotFoundError/i.test(authEvidence)) {
|
||||
checks.push({
|
||||
code: "opencode_hello_probe_model_unavailable",
|
||||
level: "warn",
|
||||
message: "The configured model was not found by the provider.",
|
||||
...(detail ? { detail } : {}),
|
||||
hint: "Run `opencode models` and choose an available provider/model ID.",
|
||||
});
|
||||
} else if (OPENCODE_AUTH_REQUIRED_RE.test(authEvidence)) {
|
||||
checks.push({
|
||||
code: "opencode_hello_probe_auth_required",
|
||||
level: "warn",
|
||||
message: "OpenCode is installed, but provider authentication is not ready.",
|
||||
...(detail ? { detail } : {}),
|
||||
hint: "Run `opencode auth login` or set provider credentials, then retry the probe.",
|
||||
});
|
||||
} else {
|
||||
checks.push({
|
||||
code: "opencode_hello_probe_failed",
|
||||
level: "error",
|
||||
message: "OpenCode hello probe failed.",
|
||||
...(detail ? { detail } : {}),
|
||||
hint: "Run `opencode run --format json` manually in this working directory to debug.",
|
||||
});
|
||||
}
|
||||
} catch (err) {
|
||||
checks.push({
|
||||
code: "opencode_hello_probe_failed",
|
||||
level: "error",
|
||||
message: "OpenCode hello probe failed.",
|
||||
...(detail ? { detail } : {}),
|
||||
detail: err instanceof Error ? err.message : String(err),
|
||||
hint: "Run `opencode run --format json` manually in this working directory to debug.",
|
||||
});
|
||||
}
|
||||
} catch (err) {
|
||||
checks.push({
|
||||
code: "opencode_hello_probe_failed",
|
||||
level: "error",
|
||||
message: "OpenCode hello probe failed.",
|
||||
detail: err instanceof Error ? err.message : String(err),
|
||||
hint: "Run `opencode run --format json` manually in this working directory to debug.",
|
||||
});
|
||||
}
|
||||
} finally {
|
||||
await preparedRuntimeConfig.cleanup();
|
||||
}
|
||||
|
||||
return {
|
||||
|
||||
@@ -58,6 +58,7 @@ export function buildOpenCodeLocalConfig(v: CreateConfigValues): Record<string,
|
||||
if (v.bootstrapPrompt) ac.bootstrapPromptTemplate = v.bootstrapPrompt;
|
||||
if (v.model) ac.model = v.model;
|
||||
if (v.thinkingEffort) ac.variant = v.thinkingEffort;
|
||||
ac.dangerouslySkipPermissions = v.dangerouslySkipPermissions;
|
||||
// OpenCode sessions can run until the CLI exits naturally; keep timeout disabled (0)
|
||||
// and rely on graceSec for termination handling when a timeout is configured elsewhere.
|
||||
ac.timeoutSec = 0;
|
||||
|
||||
@@ -10,12 +10,13 @@ import {
|
||||
parseObject,
|
||||
buildPaperclipEnv,
|
||||
joinPromptSections,
|
||||
redactEnvForLogs,
|
||||
buildInvocationEnvForLogs,
|
||||
ensureAbsoluteDirectory,
|
||||
ensureCommandResolvable,
|
||||
ensurePaperclipSkillSymlink,
|
||||
ensurePathInEnv,
|
||||
readPaperclipRuntimeSkillEntries,
|
||||
resolveCommandForLogs,
|
||||
resolvePaperclipDesiredSkillNames,
|
||||
removeMaintainerOnlySkillSymlinks,
|
||||
renderTemplate,
|
||||
@@ -204,6 +205,12 @@ export async function execute(ctx: AdapterExecutionContext): Promise<AdapterExec
|
||||
),
|
||||
);
|
||||
await ensureCommandResolvable(command, cwd, runtimeEnv);
|
||||
const resolvedCommand = await resolveCommandForLogs(command, cwd, runtimeEnv);
|
||||
const loggedEnv = buildInvocationEnvForLogs(env, {
|
||||
runtimeEnv,
|
||||
includeRuntimeKeys: ["HOME"],
|
||||
resolvedCommand,
|
||||
});
|
||||
|
||||
// Validate model is available before execution
|
||||
await ensurePiModelConfiguredAndAvailable({
|
||||
@@ -326,8 +333,9 @@ export async function execute(ctx: AdapterExecutionContext): Promise<AdapterExec
|
||||
const buildArgs = (sessionFile: string): string[] => {
|
||||
const args: string[] = [];
|
||||
|
||||
// Use RPC mode for proper lifecycle management (waits for agent completion)
|
||||
args.push("--mode", "rpc");
|
||||
// Use JSON mode for structured output with print mode (non-interactive)
|
||||
args.push("--mode", "json");
|
||||
args.push("-p"); // Non-interactive mode: process prompt and exit
|
||||
|
||||
// Use --append-system-prompt to extend Pi's default system prompt
|
||||
args.push("--append-system-prompt", renderedSystemPromptExtension);
|
||||
@@ -343,29 +351,23 @@ export async function execute(ctx: AdapterExecutionContext): Promise<AdapterExec
|
||||
args.push("--skill", PI_AGENT_SKILLS_DIR);
|
||||
|
||||
if (extraArgs.length > 0) args.push(...extraArgs);
|
||||
|
||||
// Add the user prompt as the last argument
|
||||
args.push(userPrompt);
|
||||
|
||||
return args;
|
||||
};
|
||||
|
||||
const buildRpcStdin = (): string => {
|
||||
// Send the prompt as an RPC command
|
||||
const promptCommand = {
|
||||
type: "prompt",
|
||||
message: userPrompt,
|
||||
};
|
||||
return JSON.stringify(promptCommand) + "\n";
|
||||
};
|
||||
|
||||
const runAttempt = async (sessionFile: string) => {
|
||||
const args = buildArgs(sessionFile);
|
||||
if (onMeta) {
|
||||
await onMeta({
|
||||
adapterType: "pi_local",
|
||||
command,
|
||||
command: resolvedCommand,
|
||||
cwd,
|
||||
commandNotes,
|
||||
commandArgs: args,
|
||||
env: redactEnvForLogs(env),
|
||||
env: loggedEnv,
|
||||
prompt: userPrompt,
|
||||
promptMetrics,
|
||||
context,
|
||||
@@ -402,7 +404,6 @@ export async function execute(ctx: AdapterExecutionContext): Promise<AdapterExec
|
||||
graceSec,
|
||||
onSpawn,
|
||||
onLog: bufferedOnLog,
|
||||
stdin: buildRpcStdin(),
|
||||
});
|
||||
|
||||
// Flush any remaining buffer content
|
||||
|
||||
@@ -131,7 +131,9 @@ export async function discoverPiModels(input: {
|
||||
throw new Error(detail ? `\`pi --list-models\` failed: ${detail}` : "`pi --list-models` failed.");
|
||||
}
|
||||
|
||||
return sortModels(dedupeModels(parseModelsOutput(result.stdout)));
|
||||
// Pi outputs model list to stderr, but fall back to stdout for older versions
|
||||
const output = result.stderr || result.stdout;
|
||||
return sortModels(dedupeModels(parseModelsOutput(output)));
|
||||
}
|
||||
|
||||
function normalizeEnv(input: unknown): Record<string, string> {
|
||||
|
||||
@@ -17,19 +17,39 @@ function asString(value: unknown, fallback = ""): string {
|
||||
return typeof value === "string" ? value : fallback;
|
||||
}
|
||||
|
||||
function extractTextContent(content: string | Array<{ type: string; text?: string }>): string {
|
||||
if (typeof content === "string") return content;
|
||||
if (!Array.isArray(content)) return "";
|
||||
return content
|
||||
.filter((c) => c.type === "text" && c.text)
|
||||
.map((c) => c.text!)
|
||||
.join("");
|
||||
function extractTextContent(content: string | Array<{ type: string; text?: string; thinking?: string }>): { text: string; thinking: string } {
|
||||
if (typeof content === "string") return { text: content, thinking: "" };
|
||||
if (!Array.isArray(content)) return { text: "", thinking: "" };
|
||||
|
||||
let text = "";
|
||||
let thinking = "";
|
||||
|
||||
for (const c of content) {
|
||||
if (c.type === "text" && c.text) {
|
||||
text += c.text;
|
||||
}
|
||||
if (c.type === "thinking" && c.thinking) {
|
||||
thinking += c.thinking;
|
||||
}
|
||||
}
|
||||
|
||||
return { text, thinking };
|
||||
}
|
||||
|
||||
// Track pending tool calls for proper toolUseId matching
|
||||
let pendingToolCalls = new Map<string, { toolName: string; args: unknown }>();
|
||||
|
||||
export function resetParserState(): void {
|
||||
pendingToolCalls.clear();
|
||||
}
|
||||
|
||||
export function parsePiStdoutLine(line: string, ts: string): TranscriptEntry[] {
|
||||
const parsed = asRecord(safeJsonParse(line));
|
||||
if (!parsed) {
|
||||
return [{ kind: "stdout", ts, text: line }];
|
||||
// Non-JSON line, treat as raw stdout
|
||||
const trimmed = line.trim();
|
||||
if (!trimmed) return [];
|
||||
return [{ kind: "stdout", ts, text: trimmed }];
|
||||
}
|
||||
|
||||
const type = asString(parsed.type);
|
||||
@@ -41,16 +61,64 @@ export function parsePiStdoutLine(line: string, ts: string): TranscriptEntry[] {
|
||||
|
||||
// Agent lifecycle
|
||||
if (type === "agent_start") {
|
||||
return [{ kind: "system", ts, text: "Pi agent started" }];
|
||||
return [{ kind: "system", ts, text: "🚀 Pi agent started" }];
|
||||
}
|
||||
|
||||
if (type === "agent_end") {
|
||||
return [{ kind: "system", ts, text: "Pi agent finished" }];
|
||||
const entries: TranscriptEntry[] = [];
|
||||
|
||||
// Extract final message from messages array if available
|
||||
const messages = parsed.messages as Array<Record<string, unknown>> | undefined;
|
||||
if (messages && messages.length > 0) {
|
||||
const lastMessage = messages[messages.length - 1];
|
||||
if (lastMessage?.role === "assistant") {
|
||||
const content = lastMessage.content as string | Array<{ type: string; text?: string; thinking?: string }>;
|
||||
const { text, thinking } = extractTextContent(content);
|
||||
|
||||
if (thinking) {
|
||||
entries.push({ kind: "thinking", ts, text: thinking });
|
||||
}
|
||||
if (text) {
|
||||
entries.push({ kind: "assistant", ts, text });
|
||||
}
|
||||
|
||||
// Extract usage
|
||||
const usage = asRecord(lastMessage.usage);
|
||||
if (usage) {
|
||||
const inputTokens = (usage.inputTokens ?? usage.input ?? 0) as number;
|
||||
const outputTokens = (usage.outputTokens ?? usage.output ?? 0) as number;
|
||||
const cachedTokens = (usage.cacheRead ?? usage.cachedInputTokens ?? 0) as number;
|
||||
const costRecord = asRecord(usage.cost);
|
||||
const costUsd = (costRecord?.total ?? usage.costUsd ?? 0) as number;
|
||||
|
||||
if (inputTokens > 0 || outputTokens > 0) {
|
||||
entries.push({
|
||||
kind: "result",
|
||||
ts,
|
||||
text: "Run completed",
|
||||
inputTokens,
|
||||
outputTokens,
|
||||
cachedTokens,
|
||||
costUsd,
|
||||
subtype: "end",
|
||||
isError: false,
|
||||
errors: [],
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (entries.length === 0) {
|
||||
entries.push({ kind: "system", ts, text: "✅ Pi agent finished" });
|
||||
}
|
||||
|
||||
return entries;
|
||||
}
|
||||
|
||||
// Turn lifecycle
|
||||
if (type === "turn_start") {
|
||||
return [{ kind: "system", ts, text: "Turn started" }];
|
||||
return []; // Skip noisy lifecycle events
|
||||
}
|
||||
|
||||
if (type === "turn_end") {
|
||||
@@ -60,16 +128,21 @@ export function parsePiStdoutLine(line: string, ts: string): TranscriptEntry[] {
|
||||
const entries: TranscriptEntry[] = [];
|
||||
|
||||
if (message) {
|
||||
const content = message.content as string | Array<{ type: string; text?: string }>;
|
||||
const text = extractTextContent(content);
|
||||
const content = message.content as string | Array<{ type: string; text?: string; thinking?: string }>;
|
||||
const { text, thinking } = extractTextContent(content);
|
||||
|
||||
if (thinking) {
|
||||
entries.push({ kind: "thinking", ts, text: thinking });
|
||||
}
|
||||
if (text) {
|
||||
entries.push({ kind: "assistant", ts, text });
|
||||
}
|
||||
}
|
||||
|
||||
// Process tool results
|
||||
// Process tool results - match with pending tool calls
|
||||
if (toolResults) {
|
||||
for (const tr of toolResults) {
|
||||
const toolCallId = asString(tr.toolCallId, `tool-${Date.now()}`);
|
||||
const content = tr.content;
|
||||
const isError = tr.isError === true;
|
||||
|
||||
@@ -78,23 +151,31 @@ export function parsePiStdoutLine(line: string, ts: string): TranscriptEntry[] {
|
||||
if (typeof content === "string") {
|
||||
contentStr = content;
|
||||
} else if (Array.isArray(content)) {
|
||||
contentStr = extractTextContent(content as Array<{ type: string; text?: string }>);
|
||||
const extracted = extractTextContent(content as Array<{ type: string; text?: string }>);
|
||||
contentStr = extracted.text || JSON.stringify(content);
|
||||
} else {
|
||||
contentStr = JSON.stringify(content);
|
||||
}
|
||||
|
||||
// Get tool name from pending calls if available
|
||||
const pendingCall = pendingToolCalls.get(toolCallId);
|
||||
const toolName = asString(tr.toolName, pendingCall?.toolName || "tool");
|
||||
|
||||
entries.push({
|
||||
kind: "tool_result",
|
||||
ts,
|
||||
toolUseId: asString(tr.toolCallId, "unknown"),
|
||||
toolName: asString(tr.toolName),
|
||||
toolUseId: toolCallId,
|
||||
toolName,
|
||||
content: contentStr,
|
||||
isError,
|
||||
});
|
||||
|
||||
// Clean up pending call
|
||||
pendingToolCalls.delete(toolCallId);
|
||||
}
|
||||
}
|
||||
|
||||
return entries.length > 0 ? entries : [{ kind: "system", ts, text: "Turn ended" }];
|
||||
return entries;
|
||||
}
|
||||
|
||||
// Message streaming
|
||||
@@ -106,33 +187,81 @@ export function parsePiStdoutLine(line: string, ts: string): TranscriptEntry[] {
|
||||
const assistantEvent = asRecord(parsed.assistantMessageEvent);
|
||||
if (assistantEvent) {
|
||||
const msgType = asString(assistantEvent.type);
|
||||
|
||||
// Handle thinking deltas
|
||||
if (msgType === "thinking_delta") {
|
||||
const delta = asString(assistantEvent.delta);
|
||||
if (delta) {
|
||||
return [{ kind: "thinking", ts, text: delta, delta: true }];
|
||||
}
|
||||
}
|
||||
|
||||
// Handle text deltas
|
||||
if (msgType === "text_delta") {
|
||||
const delta = asString(assistantEvent.delta);
|
||||
if (delta) {
|
||||
return [{ kind: "assistant", ts, text: delta, delta: true }];
|
||||
}
|
||||
}
|
||||
|
||||
// Handle thinking end - emit full thinking block
|
||||
if (msgType === "thinking_end") {
|
||||
const content = asString(assistantEvent.content);
|
||||
if (content) {
|
||||
return [{ kind: "thinking", ts, text: content }];
|
||||
}
|
||||
}
|
||||
|
||||
// Handle text end - emit full text block
|
||||
if (msgType === "text_end") {
|
||||
const content = asString(assistantEvent.content);
|
||||
if (content) {
|
||||
return [{ kind: "assistant", ts, text: content }];
|
||||
}
|
||||
}
|
||||
}
|
||||
return [];
|
||||
}
|
||||
|
||||
if (type === "message_end") {
|
||||
const message = asRecord(parsed.message);
|
||||
if (message) {
|
||||
const content = message.content as string | Array<{ type: string; text?: string; thinking?: string }>;
|
||||
const { text, thinking } = extractTextContent(content);
|
||||
|
||||
const entries: TranscriptEntry[] = [];
|
||||
|
||||
// Emit final thinking block if present
|
||||
if (thinking) {
|
||||
entries.push({ kind: "thinking", ts, text: thinking });
|
||||
}
|
||||
|
||||
// Emit final text block if present
|
||||
if (text) {
|
||||
entries.push({ kind: "assistant", ts, text });
|
||||
}
|
||||
|
||||
return entries;
|
||||
}
|
||||
return [];
|
||||
}
|
||||
|
||||
// Tool execution
|
||||
if (type === "tool_execution_start") {
|
||||
const toolName = asString(parsed.toolName);
|
||||
const toolCallId = asString(parsed.toolCallId, `tool-${Date.now()}`);
|
||||
const toolName = asString(parsed.toolName, "tool");
|
||||
const args = parsed.args;
|
||||
if (toolName) {
|
||||
return [{
|
||||
kind: "tool_call",
|
||||
ts,
|
||||
name: toolName,
|
||||
input: args,
|
||||
}];
|
||||
}
|
||||
return [{ kind: "system", ts, text: `Tool started` }];
|
||||
|
||||
// Track this tool call for later matching
|
||||
pendingToolCalls.set(toolCallId, { toolName, args });
|
||||
|
||||
return [{
|
||||
kind: "tool_call",
|
||||
ts,
|
||||
name: toolName,
|
||||
input: args,
|
||||
toolUseId: toolCallId,
|
||||
}];
|
||||
}
|
||||
|
||||
if (type === "tool_execution_update") {
|
||||
@@ -140,40 +269,43 @@ export function parsePiStdoutLine(line: string, ts: string): TranscriptEntry[] {
|
||||
}
|
||||
|
||||
if (type === "tool_execution_end") {
|
||||
const toolCallId = asString(parsed.toolCallId);
|
||||
const toolName = asString(parsed.toolName);
|
||||
const toolCallId = asString(parsed.toolCallId, `tool-${Date.now()}`);
|
||||
const toolName = asString(parsed.toolName, "tool");
|
||||
const result = parsed.result;
|
||||
const isError = parsed.isError === true;
|
||||
|
||||
// Extract text from Pi's content array format
|
||||
// Can be: {"content": [{"type": "text", "text": "..."}]} or [{"type": "text", "text": "..."}]
|
||||
let contentStr: string;
|
||||
if (typeof result === "string") {
|
||||
contentStr = result;
|
||||
} else if (Array.isArray(result)) {
|
||||
// Direct array format: result is [{"type": "text", "text": "..."}]
|
||||
contentStr = extractTextContent(result as Array<{ type: string; text?: string }>);
|
||||
const extracted = extractTextContent(result as Array<{ type: string; text?: string }>);
|
||||
contentStr = extracted.text || JSON.stringify(result);
|
||||
} else if (result && typeof result === "object") {
|
||||
const resultObj = result as Record<string, unknown>;
|
||||
if (Array.isArray(resultObj.content)) {
|
||||
// Wrapped format: result is {"content": [{"type": "text", "text": "..."}]}
|
||||
contentStr = extractTextContent(resultObj.content as Array<{ type: string; text?: string }>);
|
||||
const extracted = extractTextContent(resultObj.content as Array<{ type: string; text?: string }>);
|
||||
contentStr = extracted.text || JSON.stringify(result);
|
||||
} else {
|
||||
contentStr = JSON.stringify(result);
|
||||
}
|
||||
} else {
|
||||
contentStr = JSON.stringify(result);
|
||||
contentStr = String(result);
|
||||
}
|
||||
|
||||
// Clean up pending call
|
||||
pendingToolCalls.delete(toolCallId);
|
||||
|
||||
return [{
|
||||
kind: "tool_result",
|
||||
ts,
|
||||
toolUseId: toolCallId || "unknown",
|
||||
toolUseId: toolCallId,
|
||||
toolName,
|
||||
content: contentStr,
|
||||
isError,
|
||||
}];
|
||||
}
|
||||
|
||||
// Fallback for unknown event types
|
||||
return [{ kind: "stdout", ts, text: line }];
|
||||
}
|
||||
|
||||
@@ -35,11 +35,12 @@
|
||||
"dist"
|
||||
],
|
||||
"scripts": {
|
||||
"build": "tsc && cp -r src/migrations dist/migrations",
|
||||
"check:migrations": "tsx src/check-migration-numbering.ts",
|
||||
"build": "pnpm run check:migrations && tsc && cp -r src/migrations dist/migrations",
|
||||
"clean": "rm -rf dist",
|
||||
"typecheck": "tsc --noEmit",
|
||||
"generate": "tsc -p tsconfig.json && drizzle-kit generate",
|
||||
"migrate": "tsx src/migrate.ts",
|
||||
"typecheck": "pnpm run check:migrations && tsc --noEmit",
|
||||
"generate": "pnpm run check:migrations && tsc -p tsconfig.json && drizzle-kit generate",
|
||||
"migrate": "pnpm run check:migrations && tsx src/migrate.ts",
|
||||
"seed": "tsx src/seed.ts"
|
||||
},
|
||||
"dependencies": {
|
||||
|
||||
179
packages/db/src/backup-lib.test.ts
Normal file
179
packages/db/src/backup-lib.test.ts
Normal file
@@ -0,0 +1,179 @@
|
||||
import fs from "node:fs";
|
||||
import os from "node:os";
|
||||
import path from "node:path";
|
||||
import { afterEach, describe, expect, it } from "vitest";
|
||||
import postgres from "postgres";
|
||||
import { createBufferedTextFileWriter, runDatabaseBackup, runDatabaseRestore } from "./backup-lib.js";
|
||||
import { ensurePostgresDatabase } from "./client.js";
|
||||
import {
|
||||
getEmbeddedPostgresTestSupport,
|
||||
startEmbeddedPostgresTestDatabase,
|
||||
} from "./test-embedded-postgres.js";
|
||||
|
||||
const cleanups: Array<() => Promise<void> | void> = [];
|
||||
const embeddedPostgresSupport = await getEmbeddedPostgresTestSupport();
|
||||
const describeEmbeddedPostgres = embeddedPostgresSupport.supported ? describe : describe.skip;
|
||||
|
||||
function createTempDir(prefix: string): string {
|
||||
const dir = fs.mkdtempSync(path.join(os.tmpdir(), prefix));
|
||||
cleanups.push(() => {
|
||||
fs.rmSync(dir, { recursive: true, force: true });
|
||||
});
|
||||
return dir;
|
||||
}
|
||||
|
||||
async function createTempDatabase(): Promise<string> {
|
||||
const db = await startEmbeddedPostgresTestDatabase("paperclip-db-backup-");
|
||||
cleanups.push(db.cleanup);
|
||||
return db.connectionString;
|
||||
}
|
||||
|
||||
async function createSiblingDatabase(connectionString: string, databaseName: string): Promise<string> {
|
||||
const adminUrl = new URL(connectionString);
|
||||
adminUrl.pathname = "/postgres";
|
||||
await ensurePostgresDatabase(adminUrl.toString(), databaseName);
|
||||
const targetUrl = new URL(connectionString);
|
||||
targetUrl.pathname = `/${databaseName}`;
|
||||
return targetUrl.toString();
|
||||
}
|
||||
|
||||
afterEach(async () => {
|
||||
while (cleanups.length > 0) {
|
||||
const cleanup = cleanups.pop();
|
||||
await cleanup?.();
|
||||
}
|
||||
});
|
||||
|
||||
if (!embeddedPostgresSupport.supported) {
|
||||
console.warn(
|
||||
`Skipping embedded Postgres backup tests on this host: ${embeddedPostgresSupport.reason ?? "unsupported environment"}`,
|
||||
);
|
||||
}
|
||||
|
||||
describe("createBufferedTextFileWriter", () => {
|
||||
it("preserves line boundaries across buffered flushes", async () => {
|
||||
const tempDir = createTempDir("paperclip-buffered-writer-");
|
||||
const outputPath = path.join(tempDir, "backup.sql");
|
||||
const writer = createBufferedTextFileWriter(outputPath, 16);
|
||||
const lines = [
|
||||
"-- header",
|
||||
"BEGIN;",
|
||||
"",
|
||||
"INSERT INTO test VALUES (1);",
|
||||
"-- footer",
|
||||
];
|
||||
|
||||
for (const line of lines) {
|
||||
writer.emit(line);
|
||||
}
|
||||
|
||||
await writer.close();
|
||||
|
||||
expect(fs.readFileSync(outputPath, "utf8")).toBe(lines.join("\n"));
|
||||
});
|
||||
});
|
||||
|
||||
describeEmbeddedPostgres("runDatabaseBackup", () => {
|
||||
it(
|
||||
"backs up and restores large table payloads without materializing one giant string",
|
||||
async () => {
|
||||
const sourceConnectionString = await createTempDatabase();
|
||||
const restoreConnectionString = await createSiblingDatabase(
|
||||
sourceConnectionString,
|
||||
"paperclip_restore_target",
|
||||
);
|
||||
const backupDir = createTempDir("paperclip-db-backup-output-");
|
||||
const sourceSql = postgres(sourceConnectionString, { max: 1, onnotice: () => {} });
|
||||
const restoreSql = postgres(restoreConnectionString, { max: 1, onnotice: () => {} });
|
||||
|
||||
try {
|
||||
await sourceSql.unsafe(`
|
||||
CREATE TYPE "public"."backup_test_state" AS ENUM ('pending', 'done');
|
||||
`);
|
||||
await sourceSql.unsafe(`
|
||||
CREATE TABLE "public"."backup_test_records" (
|
||||
"id" serial PRIMARY KEY,
|
||||
"title" text NOT NULL,
|
||||
"payload" text NOT NULL,
|
||||
"state" "public"."backup_test_state" NOT NULL,
|
||||
"metadata" jsonb,
|
||||
"created_at" timestamptz NOT NULL DEFAULT now()
|
||||
);
|
||||
`);
|
||||
|
||||
const payload = "x".repeat(8192);
|
||||
for (let index = 0; index < 160; index += 1) {
|
||||
const createdAt = new Date(Date.UTC(2026, 0, 1, 0, 0, index));
|
||||
await sourceSql`
|
||||
INSERT INTO "public"."backup_test_records" (
|
||||
"title",
|
||||
"payload",
|
||||
"state",
|
||||
"metadata",
|
||||
"created_at"
|
||||
)
|
||||
VALUES (
|
||||
${`row-${index}`},
|
||||
${payload},
|
||||
${index % 2 === 0 ? "pending" : "done"}::"public"."backup_test_state",
|
||||
${JSON.stringify({ index, even: index % 2 === 0 })}::jsonb,
|
||||
${createdAt}
|
||||
)
|
||||
`;
|
||||
}
|
||||
|
||||
const result = await runDatabaseBackup({
|
||||
connectionString: sourceConnectionString,
|
||||
backupDir,
|
||||
retentionDays: 7,
|
||||
filenamePrefix: "paperclip-test",
|
||||
});
|
||||
|
||||
expect(result.backupFile).toMatch(/paperclip-test-.*\.sql$/);
|
||||
expect(result.sizeBytes).toBeGreaterThan(1024 * 1024);
|
||||
expect(fs.existsSync(result.backupFile)).toBe(true);
|
||||
|
||||
await runDatabaseRestore({
|
||||
connectionString: restoreConnectionString,
|
||||
backupFile: result.backupFile,
|
||||
});
|
||||
|
||||
const counts = await restoreSql.unsafe<{ count: number }[]>(`
|
||||
SELECT count(*)::int AS count
|
||||
FROM "public"."backup_test_records"
|
||||
`);
|
||||
expect(counts[0]?.count).toBe(160);
|
||||
|
||||
const sampleRows = await restoreSql.unsafe<{
|
||||
title: string;
|
||||
payload: string;
|
||||
state: string;
|
||||
metadata: { index: number; even: boolean };
|
||||
}[]>(`
|
||||
SELECT "title", "payload", "state"::text AS "state", "metadata"
|
||||
FROM "public"."backup_test_records"
|
||||
WHERE "title" IN ('row-0', 'row-159')
|
||||
ORDER BY "title"
|
||||
`);
|
||||
expect(sampleRows).toEqual([
|
||||
{
|
||||
title: "row-0",
|
||||
payload,
|
||||
state: "pending",
|
||||
metadata: { index: 0, even: true },
|
||||
},
|
||||
{
|
||||
title: "row-159",
|
||||
payload,
|
||||
state: "done",
|
||||
metadata: { index: 159, even: false },
|
||||
},
|
||||
]);
|
||||
} finally {
|
||||
await sourceSql.end();
|
||||
await restoreSql.end();
|
||||
}
|
||||
},
|
||||
60_000,
|
||||
);
|
||||
});
|
||||
@@ -1,5 +1,5 @@
|
||||
import { existsSync, mkdirSync, readdirSync, statSync, unlinkSync } from "node:fs";
|
||||
import { readFile, writeFile } from "node:fs/promises";
|
||||
import { createWriteStream, existsSync, mkdirSync, readdirSync, statSync, unlinkSync } from "node:fs";
|
||||
import { readFile } from "node:fs/promises";
|
||||
import { basename, resolve } from "node:path";
|
||||
import postgres from "postgres";
|
||||
|
||||
@@ -47,6 +47,7 @@ type TableDefinition = {
|
||||
|
||||
const DRIZZLE_SCHEMA = "drizzle";
|
||||
const DRIZZLE_MIGRATIONS_TABLE = "__drizzle_migrations";
|
||||
const DEFAULT_BACKUP_WRITE_BUFFER_BYTES = 1024 * 1024;
|
||||
|
||||
const STATEMENT_BREAKPOINT = "-- paperclip statement breakpoint 69f6f3f1-42fd-46a6-bf17-d1d85f8f3900";
|
||||
|
||||
@@ -141,6 +142,102 @@ function tableKey(schemaName: string, tableName: string): string {
|
||||
return `${schemaName}.${tableName}`;
|
||||
}
|
||||
|
||||
export function createBufferedTextFileWriter(filePath: string, maxBufferedBytes = DEFAULT_BACKUP_WRITE_BUFFER_BYTES) {
|
||||
const stream = createWriteStream(filePath, { encoding: "utf8" });
|
||||
const flushThreshold = Math.max(1, Math.trunc(maxBufferedBytes));
|
||||
let bufferedLines: string[] = [];
|
||||
let bufferedBytes = 0;
|
||||
let firstChunk = true;
|
||||
let closed = false;
|
||||
let streamError: Error | null = null;
|
||||
let pendingWrite = Promise.resolve();
|
||||
|
||||
stream.on("error", (error) => {
|
||||
streamError = error;
|
||||
});
|
||||
|
||||
const writeChunk = async (chunk: string): Promise<void> => {
|
||||
if (streamError) throw streamError;
|
||||
const canContinue = stream.write(chunk);
|
||||
if (!canContinue) {
|
||||
await new Promise<void>((resolve, reject) => {
|
||||
const handleDrain = () => {
|
||||
cleanup();
|
||||
resolve();
|
||||
};
|
||||
const handleError = (error: Error) => {
|
||||
cleanup();
|
||||
reject(error);
|
||||
};
|
||||
const cleanup = () => {
|
||||
stream.off("drain", handleDrain);
|
||||
stream.off("error", handleError);
|
||||
};
|
||||
stream.once("drain", handleDrain);
|
||||
stream.once("error", handleError);
|
||||
});
|
||||
}
|
||||
if (streamError) throw streamError;
|
||||
};
|
||||
|
||||
const flushBufferedLines = () => {
|
||||
if (bufferedLines.length === 0) return;
|
||||
const linesToWrite = bufferedLines;
|
||||
bufferedLines = [];
|
||||
bufferedBytes = 0;
|
||||
const chunkBody = linesToWrite.join("\n");
|
||||
const chunk = firstChunk ? chunkBody : `\n${chunkBody}`;
|
||||
firstChunk = false;
|
||||
pendingWrite = pendingWrite.then(() => writeChunk(chunk));
|
||||
};
|
||||
|
||||
return {
|
||||
emit(line: string) {
|
||||
if (closed) {
|
||||
throw new Error(`Cannot write to closed backup file: ${filePath}`);
|
||||
}
|
||||
if (streamError) throw streamError;
|
||||
bufferedLines.push(line);
|
||||
bufferedBytes += Buffer.byteLength(line, "utf8") + 1;
|
||||
if (bufferedBytes >= flushThreshold) {
|
||||
flushBufferedLines();
|
||||
}
|
||||
},
|
||||
async close() {
|
||||
if (closed) return;
|
||||
closed = true;
|
||||
flushBufferedLines();
|
||||
await pendingWrite;
|
||||
await new Promise<void>((resolve, reject) => {
|
||||
if (streamError) {
|
||||
reject(streamError);
|
||||
return;
|
||||
}
|
||||
stream.end((error?: Error | null) => {
|
||||
if (error) reject(error);
|
||||
else resolve();
|
||||
});
|
||||
});
|
||||
if (streamError) throw streamError;
|
||||
},
|
||||
async abort() {
|
||||
if (closed) return;
|
||||
closed = true;
|
||||
bufferedLines = [];
|
||||
bufferedBytes = 0;
|
||||
stream.destroy();
|
||||
await pendingWrite.catch(() => {});
|
||||
if (existsSync(filePath)) {
|
||||
try {
|
||||
unlinkSync(filePath);
|
||||
} catch {
|
||||
// Preserve the original backup failure if temporary file cleanup also fails.
|
||||
}
|
||||
}
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
export async function runDatabaseBackup(opts: RunDatabaseBackupOptions): Promise<RunDatabaseBackupResult> {
|
||||
const filenamePrefix = opts.filenamePrefix ?? "paperclip";
|
||||
const retentionDays = Math.max(1, Math.trunc(opts.retentionDays));
|
||||
@@ -149,12 +246,14 @@ export async function runDatabaseBackup(opts: RunDatabaseBackupOptions): Promise
|
||||
const excludedTableNames = normalizeTableNameSet(opts.excludeTables);
|
||||
const nullifiedColumnsByTable = normalizeNullifyColumnMap(opts.nullifyColumns);
|
||||
const sql = postgres(opts.connectionString, { max: 1, connect_timeout: connectTimeout });
|
||||
mkdirSync(opts.backupDir, { recursive: true });
|
||||
const backupFile = resolve(opts.backupDir, `${filenamePrefix}-${timestamp()}.sql`);
|
||||
const writer = createBufferedTextFileWriter(backupFile);
|
||||
|
||||
try {
|
||||
await sql`SELECT 1`;
|
||||
|
||||
const lines: string[] = [];
|
||||
const emit = (line: string) => lines.push(line);
|
||||
const emit = (line: string) => writer.emit(line);
|
||||
const emitStatement = (statement: string) => {
|
||||
emit(statement);
|
||||
emit(STATEMENT_BREAKPOINT);
|
||||
@@ -503,10 +602,7 @@ export async function runDatabaseBackup(opts: RunDatabaseBackupOptions): Promise
|
||||
emitStatement("COMMIT;");
|
||||
emit("");
|
||||
|
||||
// Write the backup file
|
||||
mkdirSync(opts.backupDir, { recursive: true });
|
||||
const backupFile = resolve(opts.backupDir, `${filenamePrefix}-${timestamp()}.sql`);
|
||||
await writeFile(backupFile, lines.join("\n"), "utf8");
|
||||
await writer.close();
|
||||
|
||||
const sizeBytes = statSync(backupFile).size;
|
||||
const prunedCount = pruneOldBackups(opts.backupDir, retentionDays, filenamePrefix);
|
||||
@@ -516,6 +612,9 @@ export async function runDatabaseBackup(opts: RunDatabaseBackupOptions): Promise
|
||||
sizeBytes,
|
||||
prunedCount,
|
||||
};
|
||||
} catch (error) {
|
||||
await writer.abort();
|
||||
throw error;
|
||||
} finally {
|
||||
await sql.end();
|
||||
}
|
||||
|
||||
89
packages/db/src/check-migration-numbering.ts
Normal file
89
packages/db/src/check-migration-numbering.ts
Normal file
@@ -0,0 +1,89 @@
|
||||
import { readdir, readFile } from "node:fs/promises";
|
||||
import { fileURLToPath } from "node:url";
|
||||
|
||||
const migrationsDir = fileURLToPath(new URL("./migrations", import.meta.url));
|
||||
const journalPath = fileURLToPath(new URL("./migrations/meta/_journal.json", import.meta.url));
|
||||
|
||||
type JournalFile = {
|
||||
entries?: Array<{
|
||||
idx?: number;
|
||||
tag?: string;
|
||||
}>;
|
||||
};
|
||||
|
||||
function migrationNumber(value: string): string | null {
|
||||
const match = value.match(/^(\d{4})_/);
|
||||
return match ? match[1] : null;
|
||||
}
|
||||
|
||||
function ensureNoDuplicates(values: string[], label: string) {
|
||||
const seen = new Map<string, string>();
|
||||
|
||||
for (const value of values) {
|
||||
const number = migrationNumber(value);
|
||||
if (!number) {
|
||||
throw new Error(`${label} entry does not start with a 4-digit migration number: ${value}`);
|
||||
}
|
||||
const existing = seen.get(number);
|
||||
if (existing) {
|
||||
throw new Error(`Duplicate migration number ${number} in ${label}: ${existing}, ${value}`);
|
||||
}
|
||||
seen.set(number, value);
|
||||
}
|
||||
}
|
||||
|
||||
function ensureStrictlyOrdered(values: string[], label: string) {
|
||||
const sorted = [...values].sort();
|
||||
for (let index = 0; index < values.length; index += 1) {
|
||||
if (values[index] !== sorted[index]) {
|
||||
throw new Error(
|
||||
`${label} are out of order at position ${index}: expected ${sorted[index]}, found ${values[index]}`,
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function ensureJournalMatchesFiles(migrationFiles: string[], journalTags: string[]) {
|
||||
const journalFiles = journalTags.map((tag) => `${tag}.sql`);
|
||||
|
||||
if (journalFiles.length !== migrationFiles.length) {
|
||||
throw new Error(
|
||||
`Migration journal/file count mismatch: journal has ${journalFiles.length}, files have ${migrationFiles.length}`,
|
||||
);
|
||||
}
|
||||
|
||||
for (let index = 0; index < migrationFiles.length; index += 1) {
|
||||
const migrationFile = migrationFiles[index];
|
||||
const journalFile = journalFiles[index];
|
||||
if (migrationFile !== journalFile) {
|
||||
throw new Error(
|
||||
`Migration journal/file order mismatch at position ${index}: journal has ${journalFile}, files have ${migrationFile}`,
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async function main() {
|
||||
const migrationFiles = (await readdir(migrationsDir))
|
||||
.filter((entry) => entry.endsWith(".sql"))
|
||||
.sort();
|
||||
|
||||
ensureNoDuplicates(migrationFiles, "migration files");
|
||||
ensureStrictlyOrdered(migrationFiles, "migration files");
|
||||
|
||||
const rawJournal = await readFile(journalPath, "utf8");
|
||||
const journal = JSON.parse(rawJournal) as JournalFile;
|
||||
const journalTags = (journal.entries ?? [])
|
||||
.map((entry, index) => {
|
||||
if (typeof entry.tag !== "string" || entry.tag.length === 0) {
|
||||
throw new Error(`Migration journal entry ${index} is missing a tag`);
|
||||
}
|
||||
return entry.tag;
|
||||
});
|
||||
|
||||
ensureNoDuplicates(journalTags, "migration journal");
|
||||
ensureStrictlyOrdered(journalTags, "migration journal");
|
||||
ensureJournalMatchesFiles(migrationFiles, journalTags);
|
||||
}
|
||||
|
||||
await main();
|
||||
@@ -1,83 +1,24 @@
|
||||
import { createHash } from "node:crypto";
|
||||
import fs from "node:fs";
|
||||
import net from "node:net";
|
||||
import os from "node:os";
|
||||
import path from "node:path";
|
||||
import { afterEach, describe, expect, it } from "vitest";
|
||||
import postgres from "postgres";
|
||||
import {
|
||||
applyPendingMigrations,
|
||||
ensurePostgresDatabase,
|
||||
inspectMigrations,
|
||||
} from "./client.js";
|
||||
import {
|
||||
getEmbeddedPostgresTestSupport,
|
||||
startEmbeddedPostgresTestDatabase,
|
||||
} from "./test-embedded-postgres.js";
|
||||
|
||||
type EmbeddedPostgresInstance = {
|
||||
initialise(): Promise<void>;
|
||||
start(): Promise<void>;
|
||||
stop(): Promise<void>;
|
||||
};
|
||||
|
||||
type EmbeddedPostgresCtor = new (opts: {
|
||||
databaseDir: string;
|
||||
user: string;
|
||||
password: string;
|
||||
port: number;
|
||||
persistent: boolean;
|
||||
initdbFlags?: string[];
|
||||
onLog?: (message: unknown) => void;
|
||||
onError?: (message: unknown) => void;
|
||||
}) => EmbeddedPostgresInstance;
|
||||
|
||||
const tempPaths: string[] = [];
|
||||
const runningInstances: EmbeddedPostgresInstance[] = [];
|
||||
|
||||
async function getEmbeddedPostgresCtor(): Promise<EmbeddedPostgresCtor> {
|
||||
const mod = await import("embedded-postgres");
|
||||
return mod.default as EmbeddedPostgresCtor;
|
||||
}
|
||||
|
||||
async function getAvailablePort(): Promise<number> {
|
||||
return await new Promise((resolve, reject) => {
|
||||
const server = net.createServer();
|
||||
server.unref();
|
||||
server.on("error", reject);
|
||||
server.listen(0, "127.0.0.1", () => {
|
||||
const address = server.address();
|
||||
if (!address || typeof address === "string") {
|
||||
server.close(() => reject(new Error("Failed to allocate test port")));
|
||||
return;
|
||||
}
|
||||
const { port } = address;
|
||||
server.close((error) => {
|
||||
if (error) reject(error);
|
||||
else resolve(port);
|
||||
});
|
||||
});
|
||||
});
|
||||
}
|
||||
const cleanups: Array<() => Promise<void>> = [];
|
||||
const embeddedPostgresSupport = await getEmbeddedPostgresTestSupport();
|
||||
const describeEmbeddedPostgres = embeddedPostgresSupport.supported ? describe : describe.skip;
|
||||
|
||||
async function createTempDatabase(): Promise<string> {
|
||||
const dataDir = fs.mkdtempSync(path.join(os.tmpdir(), "paperclip-db-client-"));
|
||||
tempPaths.push(dataDir);
|
||||
const port = await getAvailablePort();
|
||||
const EmbeddedPostgres = await getEmbeddedPostgresCtor();
|
||||
const instance = new EmbeddedPostgres({
|
||||
databaseDir: dataDir,
|
||||
user: "paperclip",
|
||||
password: "paperclip",
|
||||
port,
|
||||
persistent: true,
|
||||
initdbFlags: ["--encoding=UTF8", "--locale=C"],
|
||||
onLog: () => {},
|
||||
onError: () => {},
|
||||
});
|
||||
await instance.initialise();
|
||||
await instance.start();
|
||||
runningInstances.push(instance);
|
||||
|
||||
const adminUrl = `postgres://paperclip:paperclip@127.0.0.1:${port}/postgres`;
|
||||
await ensurePostgresDatabase(adminUrl, "paperclip");
|
||||
return `postgres://paperclip:paperclip@127.0.0.1:${port}/paperclip`;
|
||||
const db = await startEmbeddedPostgresTestDatabase("paperclip-db-client-");
|
||||
cleanups.push(db.cleanup);
|
||||
return db.connectionString;
|
||||
}
|
||||
|
||||
async function migrationHash(migrationFile: string): Promise<string> {
|
||||
@@ -89,19 +30,19 @@ async function migrationHash(migrationFile: string): Promise<string> {
|
||||
}
|
||||
|
||||
afterEach(async () => {
|
||||
while (runningInstances.length > 0) {
|
||||
const instance = runningInstances.pop();
|
||||
if (!instance) continue;
|
||||
await instance.stop();
|
||||
}
|
||||
while (tempPaths.length > 0) {
|
||||
const tempPath = tempPaths.pop();
|
||||
if (!tempPath) continue;
|
||||
fs.rmSync(tempPath, { recursive: true, force: true });
|
||||
while (cleanups.length > 0) {
|
||||
const cleanup = cleanups.pop();
|
||||
await cleanup?.();
|
||||
}
|
||||
});
|
||||
|
||||
describe("applyPendingMigrations", () => {
|
||||
if (!embeddedPostgresSupport.supported) {
|
||||
console.warn(
|
||||
`Skipping embedded Postgres migration tests on this host: ${embeddedPostgresSupport.reason ?? "unsupported environment"}`,
|
||||
);
|
||||
}
|
||||
|
||||
describeEmbeddedPostgres("applyPendingMigrations", () => {
|
||||
it(
|
||||
"applies an inserted earlier migration without replaying later legacy migrations",
|
||||
async () => {
|
||||
@@ -228,4 +169,236 @@ describe("applyPendingMigrations", () => {
|
||||
},
|
||||
20_000,
|
||||
);
|
||||
|
||||
it(
|
||||
"replays migration 0046 safely when document revision columns already exist",
|
||||
async () => {
|
||||
const connectionString = await createTempDatabase();
|
||||
|
||||
await applyPendingMigrations(connectionString);
|
||||
|
||||
const sql = postgres(connectionString, { max: 1, onnotice: () => {} });
|
||||
try {
|
||||
const smoothSentinelsHash = await migrationHash("0046_smooth_sentinels.sql");
|
||||
|
||||
await sql.unsafe(
|
||||
`DELETE FROM "drizzle"."__drizzle_migrations" WHERE hash = '${smoothSentinelsHash}'`,
|
||||
);
|
||||
|
||||
const columns = await sql.unsafe<{ column_name: string; is_nullable: string; column_default: string | null }[]>(
|
||||
`
|
||||
SELECT column_name, is_nullable, column_default
|
||||
FROM information_schema.columns
|
||||
WHERE table_schema = 'public'
|
||||
AND table_name = 'document_revisions'
|
||||
AND column_name IN ('title', 'format')
|
||||
ORDER BY column_name
|
||||
`,
|
||||
);
|
||||
expect(columns).toHaveLength(2);
|
||||
} finally {
|
||||
await sql.end();
|
||||
}
|
||||
|
||||
const pendingState = await inspectMigrations(connectionString);
|
||||
expect(pendingState).toMatchObject({
|
||||
status: "needsMigrations",
|
||||
pendingMigrations: ["0046_smooth_sentinels.sql"],
|
||||
reason: "pending-migrations",
|
||||
});
|
||||
|
||||
await applyPendingMigrations(connectionString);
|
||||
|
||||
const finalState = await inspectMigrations(connectionString);
|
||||
expect(finalState.status).toBe("upToDate");
|
||||
|
||||
const verifySql = postgres(connectionString, { max: 1, onnotice: () => {} });
|
||||
try {
|
||||
const columns = await verifySql.unsafe<{ column_name: string; is_nullable: string; column_default: string | null }[]>(
|
||||
`
|
||||
SELECT column_name, is_nullable, column_default
|
||||
FROM information_schema.columns
|
||||
WHERE table_schema = 'public'
|
||||
AND table_name = 'document_revisions'
|
||||
AND column_name IN ('title', 'format')
|
||||
ORDER BY column_name
|
||||
`,
|
||||
);
|
||||
expect(columns).toEqual([
|
||||
expect.objectContaining({
|
||||
column_name: "format",
|
||||
is_nullable: "NO",
|
||||
}),
|
||||
expect.objectContaining({
|
||||
column_name: "title",
|
||||
is_nullable: "YES",
|
||||
}),
|
||||
]);
|
||||
expect(columns[0]?.column_default).toContain("'markdown'");
|
||||
} finally {
|
||||
await verifySql.end();
|
||||
}
|
||||
},
|
||||
20_000,
|
||||
);
|
||||
|
||||
it(
|
||||
"replays migration 0047 safely when feedback tables and run columns already exist",
|
||||
async () => {
|
||||
const connectionString = await createTempDatabase();
|
||||
|
||||
await applyPendingMigrations(connectionString);
|
||||
|
||||
const sql = postgres(connectionString, { max: 1, onnotice: () => {} });
|
||||
try {
|
||||
const overjoyedGrootHash = await migrationHash("0047_overjoyed_groot.sql");
|
||||
|
||||
await sql.unsafe(
|
||||
`DELETE FROM "drizzle"."__drizzle_migrations" WHERE hash = '${overjoyedGrootHash}'`,
|
||||
);
|
||||
|
||||
const tables = await sql.unsafe<{ table_name: string }[]>(
|
||||
`
|
||||
SELECT table_name
|
||||
FROM information_schema.tables
|
||||
WHERE table_schema = 'public'
|
||||
AND table_name IN ('feedback_exports', 'feedback_votes')
|
||||
ORDER BY table_name
|
||||
`,
|
||||
);
|
||||
expect(tables.map((row) => row.table_name)).toEqual([
|
||||
"feedback_exports",
|
||||
"feedback_votes",
|
||||
]);
|
||||
|
||||
const columns = await sql.unsafe<{ table_name: string; column_name: string }[]>(
|
||||
`
|
||||
SELECT table_name, column_name
|
||||
FROM information_schema.columns
|
||||
WHERE table_schema = 'public'
|
||||
AND (
|
||||
(table_name = 'companies' AND column_name IN (
|
||||
'feedback_data_sharing_enabled',
|
||||
'feedback_data_sharing_consent_at',
|
||||
'feedback_data_sharing_consent_by_user_id',
|
||||
'feedback_data_sharing_terms_version'
|
||||
))
|
||||
OR (table_name = 'document_revisions' AND column_name = 'created_by_run_id')
|
||||
OR (table_name = 'issue_comments' AND column_name = 'created_by_run_id')
|
||||
)
|
||||
ORDER BY table_name, column_name
|
||||
`,
|
||||
);
|
||||
expect(columns).toHaveLength(6);
|
||||
} finally {
|
||||
await sql.end();
|
||||
}
|
||||
|
||||
const pendingState = await inspectMigrations(connectionString);
|
||||
expect(pendingState).toMatchObject({
|
||||
status: "needsMigrations",
|
||||
pendingMigrations: ["0047_overjoyed_groot.sql"],
|
||||
reason: "pending-migrations",
|
||||
});
|
||||
|
||||
await applyPendingMigrations(connectionString);
|
||||
|
||||
const finalState = await inspectMigrations(connectionString);
|
||||
expect(finalState.status).toBe("upToDate");
|
||||
|
||||
const verifySql = postgres(connectionString, { max: 1, onnotice: () => {} });
|
||||
try {
|
||||
const constraints = await verifySql.unsafe<{ conname: string }[]>(
|
||||
`
|
||||
SELECT conname
|
||||
FROM pg_constraint
|
||||
WHERE conname IN (
|
||||
'feedback_exports_company_id_companies_id_fk',
|
||||
'feedback_exports_feedback_vote_id_feedback_votes_id_fk',
|
||||
'feedback_exports_issue_id_issues_id_fk',
|
||||
'feedback_votes_company_id_companies_id_fk',
|
||||
'feedback_votes_issue_id_issues_id_fk'
|
||||
)
|
||||
ORDER BY conname
|
||||
`,
|
||||
);
|
||||
expect(constraints.map((row) => row.conname)).toEqual([
|
||||
"feedback_exports_company_id_companies_id_fk",
|
||||
"feedback_exports_feedback_vote_id_feedback_votes_id_fk",
|
||||
"feedback_exports_issue_id_issues_id_fk",
|
||||
"feedback_votes_company_id_companies_id_fk",
|
||||
"feedback_votes_issue_id_issues_id_fk",
|
||||
]);
|
||||
} finally {
|
||||
await verifySql.end();
|
||||
}
|
||||
},
|
||||
20_000,
|
||||
);
|
||||
|
||||
it(
|
||||
"replays migration 0048 safely when routines.variables already exists",
|
||||
async () => {
|
||||
const connectionString = await createTempDatabase();
|
||||
|
||||
await applyPendingMigrations(connectionString);
|
||||
|
||||
const sql = postgres(connectionString, { max: 1, onnotice: () => {} });
|
||||
try {
|
||||
const flashyMarrowHash = await migrationHash("0048_flashy_marrow.sql");
|
||||
|
||||
await sql.unsafe(
|
||||
`DELETE FROM "drizzle"."__drizzle_migrations" WHERE hash = '${flashyMarrowHash}'`,
|
||||
);
|
||||
|
||||
const columns = await sql.unsafe<{ column_name: string }[]>(
|
||||
`
|
||||
SELECT column_name
|
||||
FROM information_schema.columns
|
||||
WHERE table_schema = 'public'
|
||||
AND table_name = 'routines'
|
||||
AND column_name = 'variables'
|
||||
`,
|
||||
);
|
||||
expect(columns).toHaveLength(1);
|
||||
} finally {
|
||||
await sql.end();
|
||||
}
|
||||
|
||||
const pendingState = await inspectMigrations(connectionString);
|
||||
expect(pendingState).toMatchObject({
|
||||
status: "needsMigrations",
|
||||
pendingMigrations: ["0048_flashy_marrow.sql"],
|
||||
reason: "pending-migrations",
|
||||
});
|
||||
|
||||
await applyPendingMigrations(connectionString);
|
||||
|
||||
const finalState = await inspectMigrations(connectionString);
|
||||
expect(finalState.status).toBe("upToDate");
|
||||
|
||||
const verifySql = postgres(connectionString, { max: 1, onnotice: () => {} });
|
||||
try {
|
||||
const columns = await verifySql.unsafe<{ column_name: string; is_nullable: string; data_type: string }[]>(
|
||||
`
|
||||
SELECT column_name, is_nullable, data_type
|
||||
FROM information_schema.columns
|
||||
WHERE table_schema = 'public'
|
||||
AND table_name = 'routines'
|
||||
AND column_name = 'variables'
|
||||
`,
|
||||
);
|
||||
expect(columns).toEqual([
|
||||
expect.objectContaining({
|
||||
column_name: "variables",
|
||||
is_nullable: "NO",
|
||||
data_type: "jsonb",
|
||||
}),
|
||||
]);
|
||||
} finally {
|
||||
await verifySql.end();
|
||||
}
|
||||
},
|
||||
20_000,
|
||||
);
|
||||
});
|
||||
|
||||
28
packages/db/src/embedded-postgres-error.test.ts
Normal file
28
packages/db/src/embedded-postgres-error.test.ts
Normal file
@@ -0,0 +1,28 @@
|
||||
import { describe, expect, it } from "vitest";
|
||||
import { createEmbeddedPostgresLogBuffer, formatEmbeddedPostgresError } from "./embedded-postgres-error.js";
|
||||
|
||||
describe("formatEmbeddedPostgresError", () => {
|
||||
it("adds a shared-memory hint when initdb logs expose the real cause", () => {
|
||||
const error = formatEmbeddedPostgresError("Postgres init script exited with code 1.", {
|
||||
fallbackMessage: "Failed to initialize embedded PostgreSQL cluster",
|
||||
recentLogs: [
|
||||
"running bootstrap script ...",
|
||||
"FATAL: could not create shared memory segment: Cannot allocate memory",
|
||||
"DETAIL: Failed system call was shmget(key=123, size=56, 03600).",
|
||||
],
|
||||
});
|
||||
|
||||
expect(error.message).toContain("could not allocate shared memory");
|
||||
expect(error.message).toContain("kern.sysv.shm");
|
||||
expect(error.message).toContain("could not create shared memory segment");
|
||||
});
|
||||
|
||||
it("keeps only recent non-empty log lines in the collector", () => {
|
||||
const buffer = createEmbeddedPostgresLogBuffer(2);
|
||||
buffer.append("line one\n\n");
|
||||
buffer.append("line two");
|
||||
buffer.append("line three");
|
||||
|
||||
expect(buffer.getRecentLogs()).toEqual(["line two", "line three"]);
|
||||
});
|
||||
});
|
||||
89
packages/db/src/embedded-postgres-error.ts
Normal file
89
packages/db/src/embedded-postgres-error.ts
Normal file
@@ -0,0 +1,89 @@
|
||||
const DEFAULT_RECENT_LOG_LIMIT = 40;
|
||||
const RECENT_LOG_SUMMARY_LINES = 8;
|
||||
|
||||
function toError(error: unknown, fallbackMessage: string): Error {
|
||||
if (error instanceof Error) return error;
|
||||
if (error === undefined) return new Error(fallbackMessage);
|
||||
if (typeof error === "string") return new Error(`${fallbackMessage}: ${error}`);
|
||||
|
||||
try {
|
||||
return new Error(`${fallbackMessage}: ${JSON.stringify(error)}`);
|
||||
} catch {
|
||||
return new Error(`${fallbackMessage}: ${String(error)}`);
|
||||
}
|
||||
}
|
||||
|
||||
function summarizeRecentLogs(recentLogs: string[]): string | null {
|
||||
if (recentLogs.length === 0) return null;
|
||||
return recentLogs
|
||||
.slice(-RECENT_LOG_SUMMARY_LINES)
|
||||
.map((line) => line.trim())
|
||||
.filter((line) => line.length > 0)
|
||||
.join(" | ");
|
||||
}
|
||||
|
||||
function detectEmbeddedPostgresHint(recentLogs: string[]): string | null {
|
||||
const haystack = recentLogs.join("\n").toLowerCase();
|
||||
if (!haystack.includes("could not create shared memory segment")) {
|
||||
return null;
|
||||
}
|
||||
|
||||
return (
|
||||
"Embedded PostgreSQL bootstrap could not allocate shared memory. " +
|
||||
"On macOS, this usually means the host's kern.sysv.shm* limits are too low for another local PostgreSQL cluster. " +
|
||||
"Stop other local PostgreSQL servers or raise the shared-memory sysctls, then retry."
|
||||
);
|
||||
}
|
||||
|
||||
export function createEmbeddedPostgresLogBuffer(limit = DEFAULT_RECENT_LOG_LIMIT): {
|
||||
append(message: unknown): void;
|
||||
getRecentLogs(): string[];
|
||||
} {
|
||||
const recentLogs: string[] = [];
|
||||
|
||||
return {
|
||||
append(message: unknown) {
|
||||
const text =
|
||||
typeof message === "string"
|
||||
? message
|
||||
: message instanceof Error
|
||||
? message.message
|
||||
: String(message ?? "");
|
||||
|
||||
for (const rawLine of text.split(/\r?\n/)) {
|
||||
const line = rawLine.trim();
|
||||
if (!line) continue;
|
||||
recentLogs.push(line);
|
||||
if (recentLogs.length > limit) {
|
||||
recentLogs.splice(0, recentLogs.length - limit);
|
||||
}
|
||||
}
|
||||
},
|
||||
getRecentLogs() {
|
||||
return [...recentLogs];
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
export function formatEmbeddedPostgresError(
|
||||
error: unknown,
|
||||
input: {
|
||||
fallbackMessage: string;
|
||||
recentLogs?: string[];
|
||||
},
|
||||
): Error {
|
||||
const baseError = toError(error, input.fallbackMessage);
|
||||
const recentLogs = input.recentLogs ?? [];
|
||||
const parts = [baseError.message];
|
||||
const hint = detectEmbeddedPostgresHint(recentLogs);
|
||||
const recentSummary = summarizeRecentLogs(recentLogs);
|
||||
|
||||
if (hint) {
|
||||
parts.push(hint);
|
||||
}
|
||||
if (recentSummary) {
|
||||
parts.push(`Recent embedded Postgres logs: ${recentSummary}`);
|
||||
}
|
||||
|
||||
return new Error(parts.join(" "));
|
||||
}
|
||||
@@ -11,6 +11,12 @@ export {
|
||||
type MigrationBootstrapResult,
|
||||
type Db,
|
||||
} from "./client.js";
|
||||
export {
|
||||
getEmbeddedPostgresTestSupport,
|
||||
startEmbeddedPostgresTestDatabase,
|
||||
type EmbeddedPostgresTestDatabase,
|
||||
type EmbeddedPostgresTestSupport,
|
||||
} from "./test-embedded-postgres.js";
|
||||
export {
|
||||
runDatabaseBackup,
|
||||
runDatabaseRestore,
|
||||
@@ -19,4 +25,8 @@ export {
|
||||
type RunDatabaseBackupResult,
|
||||
type RunDatabaseRestoreOptions,
|
||||
} from "./backup-lib.js";
|
||||
export {
|
||||
createEmbeddedPostgresLogBuffer,
|
||||
formatEmbeddedPostgresError,
|
||||
} from "./embedded-postgres-error.js";
|
||||
export * from "./schema/index.js";
|
||||
|
||||
@@ -2,6 +2,7 @@ import { existsSync, readFileSync, rmSync } from "node:fs";
|
||||
import { createServer } from "node:net";
|
||||
import path from "node:path";
|
||||
import { ensurePostgresDatabase, getPostgresDataDirectory } from "./client.js";
|
||||
import { createEmbeddedPostgresLogBuffer, formatEmbeddedPostgresError } from "./embedded-postgres-error.js";
|
||||
import { resolveDatabaseTarget } from "./runtime-config.js";
|
||||
|
||||
type EmbeddedPostgresInstance = {
|
||||
@@ -27,18 +28,6 @@ export type MigrationConnection = {
|
||||
stop: () => Promise<void>;
|
||||
};
|
||||
|
||||
function toError(error: unknown, fallbackMessage: string): Error {
|
||||
if (error instanceof Error) return error;
|
||||
if (error === undefined) return new Error(fallbackMessage);
|
||||
if (typeof error === "string") return new Error(`${fallbackMessage}: ${error}`);
|
||||
|
||||
try {
|
||||
return new Error(`${fallbackMessage}: ${JSON.stringify(error)}`);
|
||||
} catch {
|
||||
return new Error(`${fallbackMessage}: ${String(error)}`);
|
||||
}
|
||||
}
|
||||
|
||||
function readRunningPostmasterPid(postmasterPidFile: string): number | null {
|
||||
if (!existsSync(postmasterPidFile)) return null;
|
||||
try {
|
||||
@@ -109,6 +98,7 @@ async function ensureEmbeddedPostgresConnection(
|
||||
const runningPid = readRunningPostmasterPid(postmasterPidFile);
|
||||
const runningPort = readPidFilePort(postmasterPidFile);
|
||||
const preferredAdminConnectionString = `postgres://paperclip:paperclip@127.0.0.1:${preferredPort}/postgres`;
|
||||
const logBuffer = createEmbeddedPostgresLogBuffer();
|
||||
|
||||
if (!runningPid && existsSync(pgVersionFile)) {
|
||||
try {
|
||||
@@ -150,19 +140,20 @@ async function ensureEmbeddedPostgresConnection(
|
||||
password: "paperclip",
|
||||
port: selectedPort,
|
||||
persistent: true,
|
||||
initdbFlags: ["--encoding=UTF8", "--locale=C"],
|
||||
onLog: () => {},
|
||||
onError: () => {},
|
||||
initdbFlags: ["--encoding=UTF8", "--locale=C", "--lc-messages=C"],
|
||||
onLog: logBuffer.append,
|
||||
onError: logBuffer.append,
|
||||
});
|
||||
|
||||
if (!existsSync(path.resolve(dataDir, "PG_VERSION"))) {
|
||||
try {
|
||||
await instance.initialise();
|
||||
} catch (error) {
|
||||
throw toError(
|
||||
error,
|
||||
`Failed to initialize embedded PostgreSQL cluster in ${dataDir} on port ${selectedPort}`,
|
||||
);
|
||||
throw formatEmbeddedPostgresError(error, {
|
||||
fallbackMessage:
|
||||
`Failed to initialize embedded PostgreSQL cluster in ${dataDir} on port ${selectedPort}`,
|
||||
recentLogs: logBuffer.getRecentLogs(),
|
||||
});
|
||||
}
|
||||
}
|
||||
if (existsSync(postmasterPidFile)) {
|
||||
@@ -171,7 +162,10 @@ async function ensureEmbeddedPostgresConnection(
|
||||
try {
|
||||
await instance.start();
|
||||
} catch (error) {
|
||||
throw toError(error, `Failed to start embedded PostgreSQL on port ${selectedPort}`);
|
||||
throw formatEmbeddedPostgresError(error, {
|
||||
fallbackMessage: `Failed to start embedded PostgreSQL on port ${selectedPort}`,
|
||||
recentLogs: logBuffer.getRecentLogs(),
|
||||
});
|
||||
}
|
||||
|
||||
const adminConnectionString = `postgres://paperclip:paperclip@127.0.0.1:${selectedPort}/postgres`;
|
||||
|
||||
17
packages/db/src/migrations/0045_workable_shockwave.sql
Normal file
17
packages/db/src/migrations/0045_workable_shockwave.sql
Normal file
@@ -0,0 +1,17 @@
|
||||
CREATE TABLE "issue_inbox_archives" (
|
||||
"id" uuid PRIMARY KEY DEFAULT gen_random_uuid() NOT NULL,
|
||||
"company_id" uuid NOT NULL,
|
||||
"issue_id" uuid NOT NULL,
|
||||
"user_id" text NOT NULL,
|
||||
"archived_at" timestamp with time zone DEFAULT now() NOT NULL,
|
||||
"created_at" timestamp with time zone DEFAULT now() NOT NULL,
|
||||
"updated_at" timestamp with time zone DEFAULT now() NOT NULL
|
||||
);
|
||||
--> statement-breakpoint
|
||||
DROP INDEX "board_api_keys_key_hash_idx";--> statement-breakpoint
|
||||
ALTER TABLE "issue_inbox_archives" ADD CONSTRAINT "issue_inbox_archives_company_id_companies_id_fk" FOREIGN KEY ("company_id") REFERENCES "public"."companies"("id") ON DELETE no action ON UPDATE no action;--> statement-breakpoint
|
||||
ALTER TABLE "issue_inbox_archives" ADD CONSTRAINT "issue_inbox_archives_issue_id_issues_id_fk" FOREIGN KEY ("issue_id") REFERENCES "public"."issues"("id") ON DELETE no action ON UPDATE no action;--> statement-breakpoint
|
||||
CREATE INDEX "issue_inbox_archives_company_issue_idx" ON "issue_inbox_archives" USING btree ("company_id","issue_id");--> statement-breakpoint
|
||||
CREATE INDEX "issue_inbox_archives_company_user_idx" ON "issue_inbox_archives" USING btree ("company_id","user_id");--> statement-breakpoint
|
||||
CREATE UNIQUE INDEX "issue_inbox_archives_company_issue_user_idx" ON "issue_inbox_archives" USING btree ("company_id","issue_id","user_id");--> statement-breakpoint
|
||||
CREATE UNIQUE INDEX "board_api_keys_key_hash_idx" ON "board_api_keys" USING btree ("key_hash");
|
||||
11
packages/db/src/migrations/0046_smooth_sentinels.sql
Normal file
11
packages/db/src/migrations/0046_smooth_sentinels.sql
Normal file
@@ -0,0 +1,11 @@
|
||||
ALTER TABLE "document_revisions" ADD COLUMN IF NOT EXISTS "title" text;--> statement-breakpoint
|
||||
ALTER TABLE "document_revisions" ADD COLUMN IF NOT EXISTS "format" text;--> statement-breakpoint
|
||||
ALTER TABLE "document_revisions" ALTER COLUMN "format" SET DEFAULT 'markdown';
|
||||
--> statement-breakpoint
|
||||
UPDATE "document_revisions" AS "dr"
|
||||
SET
|
||||
"title" = COALESCE("dr"."title", "d"."title"),
|
||||
"format" = COALESCE("dr"."format", "d"."format", 'markdown')
|
||||
FROM "documents" AS "d"
|
||||
WHERE "d"."id" = "dr"."document_id";--> statement-breakpoint
|
||||
ALTER TABLE "document_revisions" ALTER COLUMN "format" SET NOT NULL;
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user